blob: 9e5316d89f029c2889c2ce96bba76112a539941f [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070023#include "heap_poisoning.h"
Chris Larsen701566a2015-10-27 15:29:13 -070024#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe895f9222017-07-05 09:53:32 -070026#include "mirror/object_array-inl.h"
Chris Larsen701566a2015-10-27 15:29:13 -070027#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070028#include "scoped_thread_state_change-inl.h"
Chris Larsen701566a2015-10-27 15:29:13 -070029#include "thread.h"
30#include "utils/mips/assembler_mips.h"
31#include "utils/mips/constants_mips.h"
32
33namespace art {
34
35namespace mips {
36
37IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
Vladimir Markoca6fff82017-10-03 14:49:14 +010038 : codegen_(codegen), allocator_(codegen->GetGraph()->GetAllocator()) {
Chris Larsen701566a2015-10-27 15:29:13 -070039}
40
41MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
42 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
43}
44
45ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
Vladimir Markoca6fff82017-10-03 14:49:14 +010046 return codegen_->GetGraph()->GetAllocator();
Chris Larsen701566a2015-10-27 15:29:13 -070047}
48
Alexey Frunzebb9863a2016-01-11 15:51:16 -080049inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080050 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
51}
52
Alexey Frunzebb9863a2016-01-11 15:51:16 -080053inline bool IntrinsicCodeGeneratorMIPS::IsR6() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080054 return codegen_->GetInstructionSetFeatures().IsR6();
55}
56
Alexey Frunzebb9863a2016-01-11 15:51:16 -080057inline bool IntrinsicCodeGeneratorMIPS::Is32BitFPU() const {
58 return codegen_->GetInstructionSetFeatures().Is32BitFloatingPoint();
59}
60
Lena Djokic0d2cab52018-03-06 15:20:45 +010061inline bool IntrinsicCodeGeneratorMIPS::HasMsa() const {
62 return codegen_->GetInstructionSetFeatures().HasMsa();
63}
64
Chris Larsen701566a2015-10-27 15:29:13 -070065#define __ codegen->GetAssembler()->
66
67static void MoveFromReturnRegister(Location trg,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010068 DataType::Type type,
Chris Larsen701566a2015-10-27 15:29:13 -070069 CodeGeneratorMIPS* codegen) {
70 if (!trg.IsValid()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010071 DCHECK_EQ(type, DataType::Type::kVoid);
Chris Larsen701566a2015-10-27 15:29:13 -070072 return;
73 }
74
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010075 DCHECK_NE(type, DataType::Type::kVoid);
Chris Larsen701566a2015-10-27 15:29:13 -070076
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010077 if (DataType::IsIntegralType(type) || type == DataType::Type::kReference) {
Chris Larsen701566a2015-10-27 15:29:13 -070078 Register trg_reg = trg.AsRegister<Register>();
79 if (trg_reg != V0) {
80 __ Move(V0, trg_reg);
81 }
82 } else {
83 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
84 if (trg_reg != F0) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010085 if (type == DataType::Type::kFloat32) {
Chris Larsen701566a2015-10-27 15:29:13 -070086 __ MovS(F0, trg_reg);
87 } else {
88 __ MovD(F0, trg_reg);
89 }
90 }
91 }
92}
93
94static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
95 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
96 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
97}
98
99// Slow-path for fallback (calling the managed code to handle the
100// intrinsic) in an intrinsified call. This will copy the arguments
101// into the positions for a regular call.
102//
103// Note: The actual parameters are required to be in the locations
104// given by the invoke's location summary. If an intrinsic
105// modifies those locations before a slowpath call, they must be
106// restored!
107class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
108 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000109 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : SlowPathCodeMIPS(invoke), invoke_(invoke) { }
Chris Larsen701566a2015-10-27 15:29:13 -0700110
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100111 void EmitNativeCode(CodeGenerator* codegen_in) override {
Chris Larsen701566a2015-10-27 15:29:13 -0700112 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
113
114 __ Bind(GetEntryLabel());
115
116 SaveLiveRegisters(codegen, invoke_->GetLocations());
117
118 MoveArguments(invoke_, codegen);
119
120 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100121 codegen->GenerateStaticOrDirectCall(
122 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700123 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100124 codegen->GenerateVirtualCall(
125 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700126 }
127
128 // Copy the result back to the expected output.
129 Location out = invoke_->GetLocations()->Out();
130 if (out.IsValid()) {
131 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
132 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
133 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
134 }
135
136 RestoreLiveRegisters(codegen, invoke_->GetLocations());
137 __ B(GetExitLabel());
138 }
139
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100140 const char* GetDescription() const override { return "IntrinsicSlowPathMIPS"; }
Chris Larsen701566a2015-10-27 15:29:13 -0700141
142 private:
143 // The instruction where this slow path is happening.
144 HInvoke* const invoke_;
145
146 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
147};
148
149#undef __
150
151bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
152 Dispatch(invoke);
153 LocationSummary* res = invoke->GetLocations();
154 return res != nullptr && res->Intrinsified();
155}
156
157#define __ assembler->
158
Vladimir Markoca6fff82017-10-03 14:49:14 +0100159static void CreateFPToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
160 LocationSummary* locations =
161 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700162 locations->SetInAt(0, Location::RequiresFpuRegister());
163 locations->SetOut(Location::RequiresRegister());
164}
165
166static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
167 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
168
169 if (is64bit) {
170 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
171 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
172
173 __ Mfc1(out_lo, in);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800174 __ MoveFromFpuHigh(out_hi, in);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700175 } else {
176 Register out = locations->Out().AsRegister<Register>();
177
178 __ Mfc1(out, in);
179 }
180}
181
182// long java.lang.Double.doubleToRawLongBits(double)
183void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100184 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700185}
186
187void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800188 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700189}
190
191// int java.lang.Float.floatToRawIntBits(float)
192void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100193 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700194}
195
196void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800197 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700198}
199
Vladimir Markoca6fff82017-10-03 14:49:14 +0100200static void CreateIntToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
201 LocationSummary* locations =
202 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700203 locations->SetInAt(0, Location::RequiresRegister());
204 locations->SetOut(Location::RequiresFpuRegister());
205}
206
207static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
208 FRegister out = locations->Out().AsFpuRegister<FRegister>();
209
210 if (is64bit) {
211 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
212 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
213
214 __ Mtc1(in_lo, out);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800215 __ MoveToFpuHigh(in_hi, out);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700216 } else {
217 Register in = locations->InAt(0).AsRegister<Register>();
218
219 __ Mtc1(in, out);
220 }
221}
222
223// double java.lang.Double.longBitsToDouble(long)
224void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100225 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700226}
227
228void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800229 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700230}
231
232// float java.lang.Float.intBitsToFloat(int)
233void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100234 CreateIntToFPLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700235}
236
237void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800238 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700239}
240
Vladimir Markoca6fff82017-10-03 14:49:14 +0100241static void CreateIntToIntLocations(ArenaAllocator* allocator,
Chris Larsen86829602015-11-18 12:27:52 -0800242 HInvoke* invoke,
243 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100244 LocationSummary* locations =
245 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700246 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800247 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700248}
249
Chris Larsen70014c82015-11-18 12:26:08 -0800250static void GenReverse(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100251 DataType::Type type,
Chris Larsen70014c82015-11-18 12:26:08 -0800252 bool isR2OrNewer,
253 bool isR6,
254 bool reverseBits,
255 MipsAssembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100256 DCHECK(type == DataType::Type::kInt16 ||
257 type == DataType::Type::kInt32 ||
258 type == DataType::Type::kInt64);
259 DCHECK(type != DataType::Type::kInt16 || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700260
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100261 if (type == DataType::Type::kInt16) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700262 Register in = locations->InAt(0).AsRegister<Register>();
263 Register out = locations->Out().AsRegister<Register>();
264
265 if (isR2OrNewer) {
266 __ Wsbh(out, in);
267 __ Seh(out, out);
268 } else {
269 __ Sll(TMP, in, 24);
270 __ Sra(TMP, TMP, 16);
271 __ Sll(out, in, 16);
272 __ Srl(out, out, 24);
273 __ Or(out, out, TMP);
274 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100275 } else if (type == DataType::Type::kInt32) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700276 Register in = locations->InAt(0).AsRegister<Register>();
277 Register out = locations->Out().AsRegister<Register>();
278
279 if (isR2OrNewer) {
280 __ Rotr(out, in, 16);
281 __ Wsbh(out, out);
282 } else {
283 // MIPS32r1
284 // __ Rotr(out, in, 16);
285 __ Sll(TMP, in, 16);
286 __ Srl(out, in, 16);
287 __ Or(out, out, TMP);
288 // __ Wsbh(out, out);
289 __ LoadConst32(AT, 0x00FF00FF);
290 __ And(TMP, out, AT);
291 __ Sll(TMP, TMP, 8);
292 __ Srl(out, out, 8);
293 __ And(out, out, AT);
294 __ Or(out, out, TMP);
295 }
Chris Larsen70014c82015-11-18 12:26:08 -0800296 if (reverseBits) {
297 if (isR6) {
298 __ Bitswap(out, out);
299 } else {
300 __ LoadConst32(AT, 0x0F0F0F0F);
301 __ And(TMP, out, AT);
302 __ Sll(TMP, TMP, 4);
303 __ Srl(out, out, 4);
304 __ And(out, out, AT);
305 __ Or(out, TMP, out);
306 __ LoadConst32(AT, 0x33333333);
307 __ And(TMP, out, AT);
308 __ Sll(TMP, TMP, 2);
309 __ Srl(out, out, 2);
310 __ And(out, out, AT);
311 __ Or(out, TMP, out);
312 __ LoadConst32(AT, 0x55555555);
313 __ And(TMP, out, AT);
314 __ Sll(TMP, TMP, 1);
315 __ Srl(out, out, 1);
316 __ And(out, out, AT);
317 __ Or(out, TMP, out);
318 }
319 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100320 } else if (type == DataType::Type::kInt64) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700321 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
322 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
323 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
324 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
325
326 if (isR2OrNewer) {
327 __ Rotr(AT, in_hi, 16);
328 __ Rotr(TMP, in_lo, 16);
329 __ Wsbh(out_lo, AT);
330 __ Wsbh(out_hi, TMP);
331 } else {
332 // When calling CreateIntToIntLocations() we promised that the
333 // use of the out_lo/out_hi wouldn't overlap with the use of
334 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
335 // until we're completely done reading from in_lo/in_hi.
336 // __ Rotr(TMP, in_lo, 16);
337 __ Sll(TMP, in_lo, 16);
338 __ Srl(AT, in_lo, 16);
339 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
340 // to write to out_hi.
341 // __ Rotr(out_lo, in_hi, 16);
342 __ Sll(AT, in_hi, 16);
343 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
344 // from in_lo/in_hi so it's okay to
345 // write to out_lo/out_hi.
346 __ Or(out_lo, out_lo, AT);
347 // __ Wsbh(out_hi, out_hi);
348 __ LoadConst32(AT, 0x00FF00FF);
349 __ And(out_hi, TMP, AT);
350 __ Sll(out_hi, out_hi, 8);
351 __ Srl(TMP, TMP, 8);
352 __ And(TMP, TMP, AT);
353 __ Or(out_hi, out_hi, TMP);
354 // __ Wsbh(out_lo, out_lo);
355 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
356 __ Sll(TMP, TMP, 8);
357 __ Srl(out_lo, out_lo, 8);
358 __ And(out_lo, out_lo, AT);
359 __ Or(out_lo, out_lo, TMP);
360 }
Chris Larsen70014c82015-11-18 12:26:08 -0800361 if (reverseBits) {
362 if (isR6) {
363 __ Bitswap(out_hi, out_hi);
364 __ Bitswap(out_lo, out_lo);
365 } else {
366 __ LoadConst32(AT, 0x0F0F0F0F);
367 __ And(TMP, out_hi, AT);
368 __ Sll(TMP, TMP, 4);
369 __ Srl(out_hi, out_hi, 4);
370 __ And(out_hi, out_hi, AT);
371 __ Or(out_hi, TMP, out_hi);
372 __ And(TMP, out_lo, AT);
373 __ Sll(TMP, TMP, 4);
374 __ Srl(out_lo, out_lo, 4);
375 __ And(out_lo, out_lo, AT);
376 __ Or(out_lo, TMP, out_lo);
377 __ LoadConst32(AT, 0x33333333);
378 __ And(TMP, out_hi, AT);
379 __ Sll(TMP, TMP, 2);
380 __ Srl(out_hi, out_hi, 2);
381 __ And(out_hi, out_hi, AT);
382 __ Or(out_hi, TMP, out_hi);
383 __ And(TMP, out_lo, AT);
384 __ Sll(TMP, TMP, 2);
385 __ Srl(out_lo, out_lo, 2);
386 __ And(out_lo, out_lo, AT);
387 __ Or(out_lo, TMP, out_lo);
388 __ LoadConst32(AT, 0x55555555);
389 __ And(TMP, out_hi, AT);
390 __ Sll(TMP, TMP, 1);
391 __ Srl(out_hi, out_hi, 1);
392 __ And(out_hi, out_hi, AT);
393 __ Or(out_hi, TMP, out_hi);
394 __ And(TMP, out_lo, AT);
395 __ Sll(TMP, TMP, 1);
396 __ Srl(out_lo, out_lo, 1);
397 __ And(out_lo, out_lo, AT);
398 __ Or(out_lo, TMP, out_lo);
399 }
400 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700401 }
402}
403
404// int java.lang.Integer.reverseBytes(int)
405void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100406 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700407}
408
409void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800410 GenReverse(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100411 DataType::Type::kInt32,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800412 IsR2OrNewer(),
413 IsR6(),
Andreas Gampe3db70682018-12-26 15:12:03 -0800414 /* reverseBits= */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800415 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700416}
417
418// long java.lang.Long.reverseBytes(long)
419void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100420 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700421}
422
423void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800424 GenReverse(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100425 DataType::Type::kInt64,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800426 IsR2OrNewer(),
427 IsR6(),
Andreas Gampe3db70682018-12-26 15:12:03 -0800428 /* reverseBits= */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800429 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700430}
431
432// short java.lang.Short.reverseBytes(short)
433void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100434 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700435}
436
437void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800438 GenReverse(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100439 DataType::Type::kInt16,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800440 IsR2OrNewer(),
441 IsR6(),
Andreas Gampe3db70682018-12-26 15:12:03 -0800442 /* reverseBits= */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800443 GetAssembler());
444}
445
Chris Larsene3845472015-11-18 12:27:15 -0800446static void GenNumberOfLeadingZeroes(LocationSummary* locations,
447 bool is64bit,
448 bool isR6,
449 MipsAssembler* assembler) {
450 Register out = locations->Out().AsRegister<Register>();
451 if (is64bit) {
452 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
453 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
454
455 if (isR6) {
456 __ ClzR6(AT, in_hi);
457 __ ClzR6(TMP, in_lo);
458 __ Seleqz(TMP, TMP, in_hi);
459 } else {
460 __ ClzR2(AT, in_hi);
461 __ ClzR2(TMP, in_lo);
462 __ Movn(TMP, ZERO, in_hi);
463 }
464 __ Addu(out, AT, TMP);
465 } else {
466 Register in = locations->InAt(0).AsRegister<Register>();
467
468 if (isR6) {
469 __ ClzR6(out, in);
470 } else {
471 __ ClzR2(out, in);
472 }
473 }
474}
475
476// int java.lang.Integer.numberOfLeadingZeros(int i)
477void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100478 CreateIntToIntLocations(allocator_, invoke);
Chris Larsene3845472015-11-18 12:27:15 -0800479}
480
481void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800482 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit= */ false, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800483}
484
485// int java.lang.Long.numberOfLeadingZeros(long i)
486void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100487 CreateIntToIntLocations(allocator_, invoke);
Chris Larsene3845472015-11-18 12:27:15 -0800488}
489
490void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800491 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit= */ true, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800492}
493
Chris Larsen86829602015-11-18 12:27:52 -0800494static void GenNumberOfTrailingZeroes(LocationSummary* locations,
495 bool is64bit,
496 bool isR6,
Chris Larsen86829602015-11-18 12:27:52 -0800497 MipsAssembler* assembler) {
498 Register out = locations->Out().AsRegister<Register>();
499 Register in_lo;
500 Register in;
501
502 if (is64bit) {
Chris Larsen86829602015-11-18 12:27:52 -0800503 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
504
505 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
506
507 // If in_lo is zero then count the number of trailing zeroes in in_hi;
508 // otherwise count the number of trailing zeroes in in_lo.
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800509 // out = in_lo ? in_lo : in_hi;
Chris Larsen86829602015-11-18 12:27:52 -0800510 if (isR6) {
511 __ Seleqz(out, in_hi, in_lo);
512 __ Selnez(TMP, in_lo, in_lo);
513 __ Or(out, out, TMP);
514 } else {
515 __ Movz(out, in_hi, in_lo);
516 __ Movn(out, in_lo, in_lo);
517 }
518
519 in = out;
520 } else {
521 in = locations->InAt(0).AsRegister<Register>();
522 // Give in_lo a dummy value to keep the compiler from complaining.
523 // Since we only get here in the 32-bit case, this value will never
524 // be used.
525 in_lo = in;
526 }
527
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800528 if (isR6) {
529 // We don't have an instruction to count the number of trailing zeroes.
530 // Start by flipping the bits end-for-end so we can count the number of
531 // leading zeroes instead.
Chris Larsen86829602015-11-18 12:27:52 -0800532 __ Rotr(out, in, 16);
533 __ Wsbh(out, out);
Chris Larsen86829602015-11-18 12:27:52 -0800534 __ Bitswap(out, out);
535 __ ClzR6(out, out);
536 } else {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800537 // Convert trailing zeroes to trailing ones, and bits to their left
538 // to zeroes.
539 __ Addiu(TMP, in, -1);
540 __ Xor(out, TMP, in);
541 __ And(out, out, TMP);
542 // Count number of leading zeroes.
Chris Larsen86829602015-11-18 12:27:52 -0800543 __ ClzR2(out, out);
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800544 // Subtract number of leading zeroes from 32 to get number of trailing ones.
545 // Remember that the trailing ones were formerly trailing zeroes.
546 __ LoadConst32(TMP, 32);
547 __ Subu(out, TMP, out);
Chris Larsen86829602015-11-18 12:27:52 -0800548 }
549
550 if (is64bit) {
551 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
552 // number of trailing zeroes in in_lo (32) to get the correct final count
553 __ LoadConst32(TMP, 32);
554 if (isR6) {
555 __ Seleqz(TMP, TMP, in_lo);
556 } else {
557 __ Movn(TMP, ZERO, in_lo);
558 }
559 __ Addu(out, out, TMP);
560 }
561}
562
563// int java.lang.Integer.numberOfTrailingZeros(int i)
564void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100565 CreateIntToIntLocations(allocator_, invoke, Location::kOutputOverlap);
Chris Larsen86829602015-11-18 12:27:52 -0800566}
567
568void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800569 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit= */ false, IsR6(), GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800570}
571
572// int java.lang.Long.numberOfTrailingZeros(long i)
573void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100574 CreateIntToIntLocations(allocator_, invoke, Location::kOutputOverlap);
Chris Larsen86829602015-11-18 12:27:52 -0800575}
576
577void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -0800578 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit= */ true, IsR6(), GetAssembler());
Chris Larsene16ce5a2015-11-18 12:30:20 -0800579}
580
Chris Larsen70014c82015-11-18 12:26:08 -0800581// int java.lang.Integer.reverse(int)
582void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100583 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70014c82015-11-18 12:26:08 -0800584}
585
586void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
587 GenReverse(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100588 DataType::Type::kInt32,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800589 IsR2OrNewer(),
590 IsR6(),
Andreas Gampe3db70682018-12-26 15:12:03 -0800591 /* reverseBits= */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800592 GetAssembler());
593}
594
595// long java.lang.Long.reverse(long)
596void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100597 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen70014c82015-11-18 12:26:08 -0800598}
599
600void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
601 GenReverse(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100602 DataType::Type::kInt64,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800603 IsR2OrNewer(),
604 IsR6(),
Andreas Gampe3db70682018-12-26 15:12:03 -0800605 /* reverseBits= */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800606 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700607}
608
Vladimir Markoca6fff82017-10-03 14:49:14 +0100609static void CreateFPToFPLocations(ArenaAllocator* allocator, HInvoke* invoke) {
610 LocationSummary* locations =
611 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsenb74353a2015-11-20 09:07:09 -0800612 locations->SetInAt(0, Location::RequiresFpuRegister());
613 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
614}
615
Chris Larsenedc16452016-02-12 17:59:00 -0800616static void GenBitCount(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100617 DataType::Type type,
Chris Larsenedc16452016-02-12 17:59:00 -0800618 bool isR6,
Lena Djokic0d2cab52018-03-06 15:20:45 +0100619 bool hasMsa,
Chris Larsenedc16452016-02-12 17:59:00 -0800620 MipsAssembler* assembler) {
Chris Larsenedc16452016-02-12 17:59:00 -0800621 Register out = locations->Out().AsRegister<Register>();
622
623 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
624 //
625 // A generalization of the best bit counting method to integers of
626 // bit-widths up to 128 (parameterized by type T) is this:
627 //
628 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
629 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
630 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
631 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
632 //
633 // For comparison, for 32-bit quantities, this algorithm can be executed
634 // using 20 MIPS instructions (the calls to LoadConst32() generate two
635 // machine instructions each for the values being used in this algorithm).
636 // A(n unrolled) loop-based algorithm required 25 instructions.
637 //
638 // For 64-bit quantities, this algorithm gets executed twice, (once
639 // for in_lo, and again for in_hi), but saves a few instructions
640 // because the mask values only have to be loaded once. Using this
Chris Larsen8ca4f972016-04-14 16:16:29 -0700641 // algorithm the count for a 64-bit operand can be performed in 29
Chris Larsenedc16452016-02-12 17:59:00 -0800642 // instructions compared to a loop-based algorithm which required 47
643 // instructions.
644
Lena Djokic0d2cab52018-03-06 15:20:45 +0100645 if (hasMsa) {
646 if (type == DataType::Type::kInt32) {
647 Register in = locations->InAt(0).AsRegister<Register>();
648 __ Mtc1(in, FTMP);
649 __ PcntW(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
650 __ Mfc1(out, FTMP);
Chris Larsenedc16452016-02-12 17:59:00 -0800651 } else {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100652 DCHECK_EQ(type, DataType::Type::kInt64);
653 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
654 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
655 __ Mtc1(in_lo, FTMP);
656 __ Mthc1(in_hi, FTMP);
657 __ PcntD(static_cast<VectorRegister>(FTMP), static_cast<VectorRegister>(FTMP));
658 __ Mfc1(out, FTMP);
Chris Larsenedc16452016-02-12 17:59:00 -0800659 }
Roland Levillainfa3912e2016-04-01 18:21:55 +0100660 } else {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100661 if (type == DataType::Type::kInt32) {
662 Register in = locations->InAt(0).AsRegister<Register>();
Chris Larsenedc16452016-02-12 17:59:00 -0800663
Lena Djokic0d2cab52018-03-06 15:20:45 +0100664 __ Srl(TMP, in, 1);
665 __ LoadConst32(AT, 0x55555555);
666 __ And(TMP, TMP, AT);
667 __ Subu(TMP, in, TMP);
668 __ LoadConst32(AT, 0x33333333);
669 __ And(out, TMP, AT);
670 __ Srl(TMP, TMP, 2);
671 __ And(TMP, TMP, AT);
672 __ Addu(TMP, out, TMP);
673 __ Srl(out, TMP, 4);
674 __ Addu(out, out, TMP);
675 __ LoadConst32(AT, 0x0F0F0F0F);
676 __ And(out, out, AT);
677 __ LoadConst32(TMP, 0x01010101);
678 if (isR6) {
679 __ MulR6(out, out, TMP);
680 } else {
681 __ MulR2(out, out, TMP);
682 }
683 __ Srl(out, out, 24);
Chris Larsenedc16452016-02-12 17:59:00 -0800684 } else {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100685 DCHECK_EQ(type, DataType::Type::kInt64);
686 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
687 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
688 Register tmp_hi = locations->GetTemp(0).AsRegister<Register>();
689 Register out_hi = locations->GetTemp(1).AsRegister<Register>();
690 Register tmp_lo = TMP;
691 Register out_lo = out;
Chris Larsenedc16452016-02-12 17:59:00 -0800692
Lena Djokic0d2cab52018-03-06 15:20:45 +0100693 __ Srl(tmp_lo, in_lo, 1);
694 __ Srl(tmp_hi, in_hi, 1);
695
696 __ LoadConst32(AT, 0x55555555);
697
698 __ And(tmp_lo, tmp_lo, AT);
699 __ Subu(tmp_lo, in_lo, tmp_lo);
700
701 __ And(tmp_hi, tmp_hi, AT);
702 __ Subu(tmp_hi, in_hi, tmp_hi);
703
704 __ LoadConst32(AT, 0x33333333);
705
706 __ And(out_lo, tmp_lo, AT);
707 __ Srl(tmp_lo, tmp_lo, 2);
708 __ And(tmp_lo, tmp_lo, AT);
709 __ Addu(tmp_lo, out_lo, tmp_lo);
710
711 __ And(out_hi, tmp_hi, AT);
712 __ Srl(tmp_hi, tmp_hi, 2);
713 __ And(tmp_hi, tmp_hi, AT);
714 __ Addu(tmp_hi, out_hi, tmp_hi);
715
716 // Here we deviate from the original algorithm a bit. We've reached
717 // the stage where the bitfields holding the subtotals are large
718 // enough to hold the combined subtotals for both the low word, and
719 // the high word. This means that we can add the subtotals for the
720 // the high, and low words into a single word, and compute the final
721 // result for both the high, and low words using fewer instructions.
722 __ LoadConst32(AT, 0x0F0F0F0F);
723
724 __ Addu(TMP, tmp_hi, tmp_lo);
725
726 __ Srl(out, TMP, 4);
727 __ And(out, out, AT);
728 __ And(TMP, TMP, AT);
729 __ Addu(out, out, TMP);
730
731 __ LoadConst32(AT, 0x01010101);
732
733 if (isR6) {
734 __ MulR6(out, out, AT);
735 } else {
736 __ MulR2(out, out, AT);
737 }
738
739 __ Srl(out, out, 24);
740 }
Chris Larsenedc16452016-02-12 17:59:00 -0800741 }
742}
743
744// int java.lang.Integer.bitCount(int)
745void IntrinsicLocationsBuilderMIPS::VisitIntegerBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100746 CreateIntToIntLocations(allocator_, invoke);
Chris Larsenedc16452016-02-12 17:59:00 -0800747}
748
749void IntrinsicCodeGeneratorMIPS::VisitIntegerBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100750 GenBitCount(invoke->GetLocations(), DataType::Type::kInt32, IsR6(), HasMsa(), GetAssembler());
Chris Larsenedc16452016-02-12 17:59:00 -0800751}
752
753// int java.lang.Long.bitCount(int)
754void IntrinsicLocationsBuilderMIPS::VisitLongBitCount(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100755 LocationSummary* locations =
756 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsenedc16452016-02-12 17:59:00 -0800757 locations->SetInAt(0, Location::RequiresRegister());
758 locations->SetOut(Location::RequiresRegister());
759 locations->AddTemp(Location::RequiresRegister());
760 locations->AddTemp(Location::RequiresRegister());
761}
762
763void IntrinsicCodeGeneratorMIPS::VisitLongBitCount(HInvoke* invoke) {
Lena Djokic0d2cab52018-03-06 15:20:45 +0100764 GenBitCount(invoke->GetLocations(), DataType::Type::kInt64, IsR6(), HasMsa(), GetAssembler());
Chris Larsenedc16452016-02-12 17:59:00 -0800765}
766
Chris Larsenb74353a2015-11-20 09:07:09 -0800767// double java.lang.Math.sqrt(double)
768void IntrinsicLocationsBuilderMIPS::VisitMathSqrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100769 CreateFPToFPLocations(allocator_, invoke);
Chris Larsenb74353a2015-11-20 09:07:09 -0800770}
771
772void IntrinsicCodeGeneratorMIPS::VisitMathSqrt(HInvoke* invoke) {
773 LocationSummary* locations = invoke->GetLocations();
774 MipsAssembler* assembler = GetAssembler();
775 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
776 FRegister out = locations->Out().AsFpuRegister<FRegister>();
777
778 __ SqrtD(out, in);
779}
780
Chris Larsen3acee732015-11-18 13:31:08 -0800781// byte libcore.io.Memory.peekByte(long address)
782void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100783 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800784}
785
786void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
787 MipsAssembler* assembler = GetAssembler();
788 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
789 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
790
791 __ Lb(out, adr, 0);
792}
793
794// short libcore.io.Memory.peekShort(long address)
795void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100796 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800797}
798
799void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
800 MipsAssembler* assembler = GetAssembler();
801 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
802 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
803
804 if (IsR6()) {
805 __ Lh(out, adr, 0);
806 } else if (IsR2OrNewer()) {
807 // Unlike for words, there are no lhl/lhr instructions to load
808 // unaligned halfwords so the code loads individual bytes, in case
809 // the address isn't halfword-aligned, and assembles them into a
810 // signed halfword.
811 __ Lb(AT, adr, 1); // This byte must be sign-extended.
812 __ Lb(out, adr, 0); // This byte can be either sign-extended, or
813 // zero-extended because the following
814 // instruction overwrites the sign bits.
815 __ Ins(out, AT, 8, 24);
816 } else {
817 __ Lbu(AT, adr, 0); // This byte must be zero-extended. If it's not
818 // the "or" instruction below will destroy the upper
819 // 24 bits of the final result.
820 __ Lb(out, adr, 1); // This byte must be sign-extended.
821 __ Sll(out, out, 8);
822 __ Or(out, out, AT);
823 }
824}
825
826// int libcore.io.Memory.peekInt(long address)
827void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100828 CreateIntToIntLocations(allocator_, invoke, Location::kOutputOverlap);
Chris Larsen3acee732015-11-18 13:31:08 -0800829}
830
831void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
832 MipsAssembler* assembler = GetAssembler();
833 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
834 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
835
836 if (IsR6()) {
837 __ Lw(out, adr, 0);
838 } else {
839 __ Lwr(out, adr, 0);
840 __ Lwl(out, adr, 3);
841 }
842}
843
844// long libcore.io.Memory.peekLong(long address)
845void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100846 CreateIntToIntLocations(allocator_, invoke, Location::kOutputOverlap);
Chris Larsen3acee732015-11-18 13:31:08 -0800847}
848
849void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
850 MipsAssembler* assembler = GetAssembler();
851 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
852 Register out_lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
853 Register out_hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
854
855 if (IsR6()) {
856 __ Lw(out_lo, adr, 0);
857 __ Lw(out_hi, adr, 4);
858 } else {
859 __ Lwr(out_lo, adr, 0);
860 __ Lwl(out_lo, adr, 3);
861 __ Lwr(out_hi, adr, 4);
862 __ Lwl(out_hi, adr, 7);
863 }
864}
865
Vladimir Markoca6fff82017-10-03 14:49:14 +0100866static void CreateIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
867 LocationSummary* locations =
868 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen3acee732015-11-18 13:31:08 -0800869 locations->SetInAt(0, Location::RequiresRegister());
870 locations->SetInAt(1, Location::RequiresRegister());
871}
872
873// void libcore.io.Memory.pokeByte(long address, byte value)
874void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100875 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800876}
877
878void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
879 MipsAssembler* assembler = GetAssembler();
880 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
881 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
882
883 __ Sb(val, adr, 0);
884}
885
886// void libcore.io.Memory.pokeShort(long address, short value)
887void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100888 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800889}
890
891void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
892 MipsAssembler* assembler = GetAssembler();
893 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
894 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
895
896 if (IsR6()) {
897 __ Sh(val, adr, 0);
898 } else {
899 // Unlike for words, there are no shl/shr instructions to store
900 // unaligned halfwords so the code stores individual bytes, in case
901 // the address isn't halfword-aligned.
902 __ Sb(val, adr, 0);
903 __ Srl(AT, val, 8);
904 __ Sb(AT, adr, 1);
905 }
906}
907
908// void libcore.io.Memory.pokeInt(long address, int value)
909void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100910 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800911}
912
913void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
914 MipsAssembler* assembler = GetAssembler();
915 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
916 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
917
918 if (IsR6()) {
919 __ Sw(val, adr, 0);
920 } else {
921 __ Swr(val, adr, 0);
922 __ Swl(val, adr, 3);
923 }
924}
925
926// void libcore.io.Memory.pokeLong(long address, long value)
927void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100928 CreateIntIntToVoidLocations(allocator_, invoke);
Chris Larsen3acee732015-11-18 13:31:08 -0800929}
930
931void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
932 MipsAssembler* assembler = GetAssembler();
933 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
934 Register val_lo = invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>();
935 Register val_hi = invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>();
936
937 if (IsR6()) {
938 __ Sw(val_lo, adr, 0);
939 __ Sw(val_hi, adr, 4);
940 } else {
941 __ Swr(val_lo, adr, 0);
942 __ Swl(val_lo, adr, 3);
943 __ Swr(val_hi, adr, 4);
944 __ Swl(val_hi, adr, 7);
945 }
946}
947
Chris Larsencf283da2016-01-19 16:45:35 -0800948// Thread java.lang.Thread.currentThread()
949void IntrinsicLocationsBuilderMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100950 LocationSummary* locations =
951 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -0800952 locations->SetOut(Location::RequiresRegister());
953}
954
955void IntrinsicCodeGeneratorMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
956 MipsAssembler* assembler = GetAssembler();
957 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
958
959 __ LoadFromOffset(kLoadWord,
960 out,
961 TR,
962 Thread::PeerOffset<kMipsPointerSize>().Int32Value());
963}
964
Vladimir Markoca6fff82017-10-03 14:49:14 +0100965static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
Alexey Frunze15958152017-02-09 19:08:30 -0800966 HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100967 DataType::Type type) {
Alexey Frunze15958152017-02-09 19:08:30 -0800968 bool can_call = kEmitCompilerReadBarrier &&
969 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
970 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Vladimir Markoca6fff82017-10-03 14:49:14 +0100971 LocationSummary* locations =
972 new (allocator) LocationSummary(invoke,
973 can_call
974 ? LocationSummary::kCallOnSlowPath
975 : LocationSummary::kNoCall,
976 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700977 if (can_call && kUseBakerReadBarrier) {
978 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
979 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -0800980 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
981 locations->SetInAt(1, Location::RequiresRegister());
982 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -0800983 locations->SetOut(Location::RequiresRegister(),
984 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100985 if (type == DataType::Type::kReference && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze15958152017-02-09 19:08:30 -0800986 // We need a temporary register for the read barrier marking slow
987 // path in InstructionCodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier.
988 locations->AddTemp(Location::RequiresRegister());
989 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -0800990}
991
Alexey Frunze15958152017-02-09 19:08:30 -0800992// Note that the caller must supply a properly aligned memory address.
993// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -0800994static void GenUnsafeGet(HInvoke* invoke,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100995 DataType::Type type,
Chris Larsen4fdc6d92015-12-14 13:26:14 -0800996 bool is_volatile,
997 bool is_R6,
998 CodeGeneratorMIPS* codegen) {
999 LocationSummary* locations = invoke->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001000 DCHECK((type == DataType::Type::kInt32) ||
1001 (type == DataType::Type::kInt64) ||
1002 (type == DataType::Type::kReference)) << type;
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001003 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001004 // Target register.
1005 Location trg_loc = locations->Out();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001006 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001007 Location base_loc = locations->InAt(1);
1008 Register base = base_loc.AsRegister<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001009 // The "offset" argument is passed as a "long". Since this code is for
1010 // a 32-bit processor, we can only use 32-bit addresses, so we only
1011 // need the low 32-bits of offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001012 Location offset_loc = locations->InAt(2);
1013 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001014
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001015 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == DataType::Type::kReference))) {
Alexey Frunze15958152017-02-09 19:08:30 -08001016 __ Addu(TMP, base, offset_lo);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001017 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001018
Alexey Frunze15958152017-02-09 19:08:30 -08001019 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001020 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08001021 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
1022 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
1023 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile loads.
1024 if (is_R6) {
1025 __ Lw(trg_lo, TMP, 0);
1026 __ Lw(trg_hi, TMP, 4);
1027 } else {
1028 __ Lwr(trg_lo, TMP, 0);
1029 __ Lwl(trg_lo, TMP, 3);
1030 __ Lwr(trg_hi, TMP, 4);
1031 __ Lwl(trg_hi, TMP, 7);
1032 }
1033 break;
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001034 }
Alexey Frunzec061de12017-02-14 13:27:23 -08001035
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001036 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08001037 Register trg = trg_loc.AsRegister<Register>();
1038 if (is_R6) {
1039 __ Lw(trg, TMP, 0);
1040 } else {
1041 __ Lwr(trg, TMP, 0);
1042 __ Lwl(trg, TMP, 3);
1043 }
1044 if (is_volatile) {
1045 __ Sync(0);
1046 }
1047 break;
Alexey Frunzec061de12017-02-14 13:27:23 -08001048 }
Alexey Frunze15958152017-02-09 19:08:30 -08001049
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001050 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08001051 Register trg = trg_loc.AsRegister<Register>();
1052 if (kEmitCompilerReadBarrier) {
1053 if (kUseBakerReadBarrier) {
1054 Location temp = locations->GetTemp(0);
1055 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1056 trg_loc,
1057 base,
Andreas Gampe3db70682018-12-26 15:12:03 -08001058 /* offset= */ 0U,
1059 /* index= */ offset_loc,
Alexey Frunze15958152017-02-09 19:08:30 -08001060 TIMES_1,
1061 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08001062 /* needs_null_check= */ false);
Alexey Frunze15958152017-02-09 19:08:30 -08001063 if (is_volatile) {
1064 __ Sync(0);
1065 }
1066 } else {
1067 if (is_R6) {
1068 __ Lw(trg, TMP, 0);
1069 } else {
1070 __ Lwr(trg, TMP, 0);
1071 __ Lwl(trg, TMP, 3);
1072 }
1073 if (is_volatile) {
1074 __ Sync(0);
1075 }
1076 codegen->GenerateReadBarrierSlow(invoke,
1077 trg_loc,
1078 trg_loc,
1079 base_loc,
Andreas Gampe3db70682018-12-26 15:12:03 -08001080 /* offset= */ 0U,
1081 /* index= */ offset_loc);
Alexey Frunze15958152017-02-09 19:08:30 -08001082 }
1083 } else {
1084 if (is_R6) {
1085 __ Lw(trg, TMP, 0);
1086 } else {
1087 __ Lwr(trg, TMP, 0);
1088 __ Lwl(trg, TMP, 3);
1089 }
1090 if (is_volatile) {
1091 __ Sync(0);
1092 }
1093 __ MaybeUnpoisonHeapReference(trg);
1094 }
1095 break;
1096 }
1097
1098 default:
1099 LOG(FATAL) << "Unexpected type " << type;
1100 UNREACHABLE();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001101 }
1102}
1103
1104// int sun.misc.Unsafe.getInt(Object o, long offset)
1105void IntrinsicLocationsBuilderMIPS::VisitUnsafeGet(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001106 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001107}
1108
1109void IntrinsicCodeGeneratorMIPS::VisitUnsafeGet(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001110 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ false, IsR6(), codegen_);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001111}
1112
1113// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1114void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001115 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001116}
1117
1118void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001119 GenUnsafeGet(invoke, DataType::Type::kInt32, /* is_volatile= */ true, IsR6(), codegen_);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001120}
1121
1122// long sun.misc.Unsafe.getLong(Object o, long offset)
1123void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001124 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt64);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001125}
1126
1127void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001128 GenUnsafeGet(invoke, DataType::Type::kInt64, /* is_volatile= */ false, IsR6(), codegen_);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001129}
1130
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001131// Object sun.misc.Unsafe.getObject(Object o, long offset)
1132void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001133 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001134}
1135
1136void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001137 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ false, IsR6(), codegen_);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001138}
1139
1140// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1141void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001142 CreateIntIntIntToIntLocations(allocator_, invoke, DataType::Type::kReference);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001143}
1144
1145void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001146 GenUnsafeGet(invoke, DataType::Type::kReference, /* is_volatile= */ true, IsR6(), codegen_);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001147}
1148
Vladimir Markoca6fff82017-10-03 14:49:14 +01001149static void CreateIntIntIntIntToVoidLocations(ArenaAllocator* allocator, HInvoke* invoke) {
1150 LocationSummary* locations =
1151 new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001152 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1153 locations->SetInAt(1, Location::RequiresRegister());
1154 locations->SetInAt(2, Location::RequiresRegister());
1155 locations->SetInAt(3, Location::RequiresRegister());
1156}
1157
Alexey Frunze15958152017-02-09 19:08:30 -08001158// Note that the caller must supply a properly aligned memory address.
1159// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001160static void GenUnsafePut(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001161 DataType::Type type,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001162 bool is_volatile,
1163 bool is_ordered,
1164 bool is_R6,
1165 CodeGeneratorMIPS* codegen) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001166 DCHECK((type == DataType::Type::kInt32) ||
1167 (type == DataType::Type::kInt64) ||
1168 (type == DataType::Type::kReference)) << type;
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001169 MipsAssembler* assembler = codegen->GetAssembler();
1170 // Object pointer.
1171 Register base = locations->InAt(1).AsRegister<Register>();
1172 // The "offset" argument is passed as a "long", i.e., it's 64-bits in
1173 // size. Since this code is for a 32-bit processor, we can only use
1174 // 32-bit addresses, so we only need the low 32-bits of offset.
1175 Register offset_lo = locations->InAt(2).AsRegisterPairLow<Register>();
1176
1177 __ Addu(TMP, base, offset_lo);
1178 if (is_volatile || is_ordered) {
1179 __ Sync(0);
1180 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001181 if ((type == DataType::Type::kInt32) || (type == DataType::Type::kReference)) {
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001182 Register value = locations->InAt(3).AsRegister<Register>();
1183
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001184 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001185 __ PoisonHeapReference(AT, value);
1186 value = AT;
1187 }
1188
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001189 if (is_R6) {
1190 __ Sw(value, TMP, 0);
1191 } else {
1192 __ Swr(value, TMP, 0);
1193 __ Swl(value, TMP, 3);
1194 }
1195 } else {
1196 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
1197 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001198 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile stores.
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001199 if (is_R6) {
1200 __ Sw(value_lo, TMP, 0);
1201 __ Sw(value_hi, TMP, 4);
1202 } else {
1203 __ Swr(value_lo, TMP, 0);
1204 __ Swl(value_lo, TMP, 3);
1205 __ Swr(value_hi, TMP, 4);
1206 __ Swl(value_hi, TMP, 7);
1207 }
1208 }
1209
1210 if (is_volatile) {
1211 __ Sync(0);
1212 }
1213
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001214 if (type == DataType::Type::kReference) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01001215 bool value_can_be_null = true; // TODO: Worth finding out this information?
1216 codegen->MarkGCCard(base, locations->InAt(3).AsRegister<Register>(), value_can_be_null);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001217 }
1218}
1219
1220// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1221void IntrinsicLocationsBuilderMIPS::VisitUnsafePut(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001222 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001223}
1224
1225void IntrinsicCodeGeneratorMIPS::VisitUnsafePut(HInvoke* invoke) {
1226 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001227 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001228 /* is_volatile= */ false,
1229 /* is_ordered= */ false,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001230 IsR6(),
1231 codegen_);
1232}
1233
1234// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1235void IntrinsicLocationsBuilderMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001236 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001237}
1238
1239void IntrinsicCodeGeneratorMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
1240 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001241 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001242 /* is_volatile= */ false,
1243 /* is_ordered= */ true,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001244 IsR6(),
1245 codegen_);
1246}
1247
1248// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1249void IntrinsicLocationsBuilderMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001250 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001251}
1252
1253void IntrinsicCodeGeneratorMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
1254 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001255 DataType::Type::kInt32,
Andreas Gampe3db70682018-12-26 15:12:03 -08001256 /* is_volatile= */ true,
1257 /* is_ordered= */ false,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001258 IsR6(),
1259 codegen_);
1260}
1261
1262// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1263void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObject(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001264 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001265}
1266
1267void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObject(HInvoke* invoke) {
1268 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001269 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001270 /* is_volatile= */ false,
1271 /* is_ordered= */ false,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001272 IsR6(),
1273 codegen_);
1274}
1275
1276// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1277void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001278 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001279}
1280
1281void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1282 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001283 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001284 /* is_volatile= */ false,
1285 /* is_ordered= */ true,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001286 IsR6(),
1287 codegen_);
1288}
1289
1290// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1291void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001292 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001293}
1294
1295void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1296 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001297 DataType::Type::kReference,
Andreas Gampe3db70682018-12-26 15:12:03 -08001298 /* is_volatile= */ true,
1299 /* is_ordered= */ false,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001300 IsR6(),
1301 codegen_);
1302}
1303
1304// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1305void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLong(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001306 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001307}
1308
1309void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLong(HInvoke* invoke) {
1310 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001311 DataType::Type::kInt64,
Andreas Gampe3db70682018-12-26 15:12:03 -08001312 /* is_volatile= */ false,
1313 /* is_ordered= */ false,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001314 IsR6(),
1315 codegen_);
1316}
1317
1318// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1319void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001320 CreateIntIntIntIntToVoidLocations(allocator_, invoke);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001321}
1322
1323void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1324 GenUnsafePut(invoke->GetLocations(),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001325 DataType::Type::kInt64,
Andreas Gampe3db70682018-12-26 15:12:03 -08001326 /* is_volatile= */ false,
1327 /* is_ordered= */ true,
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001328 IsR6(),
1329 codegen_);
1330}
1331
Vladimir Markoca6fff82017-10-03 14:49:14 +01001332static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* allocator, HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001333 bool can_call = kEmitCompilerReadBarrier &&
1334 kUseBakerReadBarrier &&
1335 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001336 LocationSummary* locations =
1337 new (allocator) LocationSummary(invoke,
1338 can_call
1339 ? LocationSummary::kCallOnSlowPath
1340 : LocationSummary::kNoCall,
1341 kIntrinsified);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001342 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1343 locations->SetInAt(1, Location::RequiresRegister());
1344 locations->SetInAt(2, Location::RequiresRegister());
1345 locations->SetInAt(3, Location::RequiresRegister());
1346 locations->SetInAt(4, Location::RequiresRegister());
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001347 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001348
1349 // Temporary register used in CAS by (Baker) read barrier.
1350 if (can_call) {
1351 locations->AddTemp(Location::RequiresRegister());
1352 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001353}
1354
Alexey Frunze15958152017-02-09 19:08:30 -08001355// Note that the caller must supply a properly aligned memory address.
1356// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001357static void GenCas(HInvoke* invoke, DataType::Type type, CodeGeneratorMIPS* codegen) {
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001358 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001359 LocationSummary* locations = invoke->GetLocations();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001360 bool isR6 = codegen->GetInstructionSetFeatures().IsR6();
1361 Register base = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001362 Location offset_loc = locations->InAt(2);
1363 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001364 Register expected = locations->InAt(3).AsRegister<Register>();
1365 Register value = locations->InAt(4).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001366 Location out_loc = locations->Out();
1367 Register out = out_loc.AsRegister<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001368
1369 DCHECK_NE(base, out);
1370 DCHECK_NE(offset_lo, out);
1371 DCHECK_NE(expected, out);
1372
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001373 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08001374 // The only read barrier implementation supporting the
1375 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1376 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1377
1378 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1379 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevice114da22016-12-26 14:21:43 +01001380 bool value_can_be_null = true; // TODO: Worth finding out this information?
1381 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001382
1383 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1384 Location temp = locations->GetTemp(0);
1385 // Need to make sure the reference stored in the field is a to-space
1386 // one before attempting the CAS or the CAS could fail incorrectly.
1387 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1388 invoke,
1389 out_loc, // Unused, used only as a "temporary" within the read barrier.
1390 base,
Andreas Gampe3db70682018-12-26 15:12:03 -08001391 /* offset= */ 0u,
1392 /* index= */ offset_loc,
Alexey Frunze15958152017-02-09 19:08:30 -08001393 ScaleFactor::TIMES_1,
1394 temp,
Andreas Gampe3db70682018-12-26 15:12:03 -08001395 /* needs_null_check= */ false,
1396 /* always_update_field= */ true);
Alexey Frunze15958152017-02-09 19:08:30 -08001397 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001398 }
1399
Alexey Frunzec061de12017-02-14 13:27:23 -08001400 MipsLabel loop_head, exit_loop;
1401 __ Addu(TMP, base, offset_lo);
1402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001403 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001404 __ PoisonHeapReference(expected);
1405 // Do not poison `value`, if it is the same register as
1406 // `expected`, which has just been poisoned.
1407 if (value != expected) {
1408 __ PoisonHeapReference(value);
1409 }
1410 }
1411
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001412 // do {
1413 // tmp_value = [tmp_ptr] - expected;
1414 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1415 // result = tmp_value != 0;
1416
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001417 __ Sync(0);
1418 __ Bind(&loop_head);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001419 if ((type == DataType::Type::kInt32) || (type == DataType::Type::kReference)) {
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001420 if (isR6) {
1421 __ LlR6(out, TMP);
1422 } else {
1423 __ LlR2(out, TMP);
1424 }
1425 } else {
Alexey Frunzec061de12017-02-14 13:27:23 -08001426 LOG(FATAL) << "Unsupported op size " << type;
1427 UNREACHABLE();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001428 }
1429 __ Subu(out, out, expected); // If we didn't get the 'expected'
1430 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1431 __ Beqz(out, &exit_loop); // return.
1432 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1433 // If we use 'value' directly, we would lose 'value'
1434 // in the case that the store fails. Whether the
1435 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001436 // correct Boolean value into the 'out' register.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001437 // This test isn't really necessary. We only support DataType::Type::kInt,
1438 // DataType::Type::kReference, and we already verified that we're working on one
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001439 // of those two types. It's left here in case the code needs to support
1440 // other types in the future.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001441 if ((type == DataType::Type::kInt32) || (type == DataType::Type::kReference)) {
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001442 if (isR6) {
1443 __ ScR6(out, TMP);
1444 } else {
1445 __ ScR2(out, TMP);
1446 }
1447 }
1448 __ Beqz(out, &loop_head); // If we couldn't do the read-modify-write
1449 // cycle atomically then retry.
1450 __ Bind(&exit_loop);
1451 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08001452
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001453 if (kPoisonHeapReferences && type == DataType::Type::kReference) {
Alexey Frunzec061de12017-02-14 13:27:23 -08001454 __ UnpoisonHeapReference(expected);
1455 // Do not unpoison `value`, if it is the same register as
1456 // `expected`, which has just been unpoisoned.
1457 if (value != expected) {
1458 __ UnpoisonHeapReference(value);
1459 }
1460 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001461}
1462
1463// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
1464void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001465 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001466}
1467
1468void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001469 GenCas(invoke, DataType::Type::kInt32, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001470}
1471
1472// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
1473void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001474 // The only read barrier implementation supporting the
1475 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1476 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
1477 return;
1478 }
1479
Vladimir Markoca6fff82017-10-03 14:49:14 +01001480 CreateIntIntIntIntIntToIntPlusTemps(allocator_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001481}
1482
1483void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001484 // The only read barrier implementation supporting the
1485 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1486 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1487
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001488 GenCas(invoke, DataType::Type::kReference, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001489}
1490
Chris Larsencf283da2016-01-19 16:45:35 -08001491// int java.lang.String.compareTo(String anotherString)
1492void IntrinsicLocationsBuilderMIPS::VisitStringCompareTo(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001493 LocationSummary* locations = new (allocator_) LocationSummary(
1494 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001495 InvokeRuntimeCallingConvention calling_convention;
1496 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1497 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001498 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001499 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1500}
1501
1502void IntrinsicCodeGeneratorMIPS::VisitStringCompareTo(HInvoke* invoke) {
1503 MipsAssembler* assembler = GetAssembler();
1504 LocationSummary* locations = invoke->GetLocations();
1505
1506 // Note that the null check must have been done earlier.
1507 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1508
1509 Register argument = locations->InAt(1).AsRegister<Register>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001510 SlowPathCodeMIPS* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsencf283da2016-01-19 16:45:35 -08001511 codegen_->AddSlowPath(slow_path);
1512 __ Beqz(argument, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01001513 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08001514 __ Bind(slow_path->GetExitLabel());
1515}
1516
Chris Larsen16ba2b42015-11-02 10:58:31 -08001517// boolean java.lang.String.equals(Object anObject)
1518void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001519 LocationSummary* locations =
1520 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen16ba2b42015-11-02 10:58:31 -08001521 locations->SetInAt(0, Location::RequiresRegister());
1522 locations->SetInAt(1, Location::RequiresRegister());
1523 locations->SetOut(Location::RequiresRegister());
1524
1525 // Temporary registers to store lengths of strings and for calculations.
1526 locations->AddTemp(Location::RequiresRegister());
1527 locations->AddTemp(Location::RequiresRegister());
1528 locations->AddTemp(Location::RequiresRegister());
1529}
1530
1531void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
1532 MipsAssembler* assembler = GetAssembler();
1533 LocationSummary* locations = invoke->GetLocations();
1534
1535 Register str = locations->InAt(0).AsRegister<Register>();
1536 Register arg = locations->InAt(1).AsRegister<Register>();
1537 Register out = locations->Out().AsRegister<Register>();
1538
1539 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1540 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1541 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1542
1543 MipsLabel loop;
1544 MipsLabel end;
1545 MipsLabel return_true;
1546 MipsLabel return_false;
1547
1548 // Get offsets of count, value, and class fields within a string object.
1549 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1550 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1551 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1552
1553 // Note that the null check must have been done earlier.
1554 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1555
1556 // If the register containing the pointer to "this", and the register
1557 // containing the pointer to "anObject" are the same register then
1558 // "this", and "anObject" are the same object and we can
1559 // short-circuit the logic to a true result.
1560 if (str == arg) {
1561 __ LoadConst32(out, 1);
1562 return;
1563 }
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001564 StringEqualsOptimizations optimizations(invoke);
1565 if (!optimizations.GetArgumentNotNull()) {
1566 // Check if input is null, return false if it is.
1567 __ Beqz(arg, &return_false);
1568 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08001569
1570 // Reference equality check, return true if same reference.
1571 __ Beq(str, arg, &return_true);
1572
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001573 if (!optimizations.GetArgumentIsString()) {
1574 // Instanceof check for the argument by comparing class fields.
1575 // All string objects must have the same type since String cannot be subclassed.
1576 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1577 // If the argument is a string object, its class field must be equal to receiver's class field.
Roland Levillain1d775d22018-09-07 13:56:57 +01001578 //
1579 // As the String class is expected to be non-movable, we can read the class
1580 // field from String.equals' arguments without read barriers.
1581 AssertNonMovableStringClass();
1582 // /* HeapReference<Class> */ temp1 = str->klass_
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001583 __ Lw(temp1, str, class_offset);
Roland Levillain1d775d22018-09-07 13:56:57 +01001584 // /* HeapReference<Class> */ temp2 = arg->klass_
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001585 __ Lw(temp2, arg, class_offset);
Roland Levillain1d775d22018-09-07 13:56:57 +01001586 // Also, because we use the previously loaded class references only in the
1587 // following comparison, we don't need to unpoison them.
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01001588 __ Bne(temp1, temp2, &return_false);
1589 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08001590
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001591 // Load `count` fields of this and argument strings.
Chris Larsen16ba2b42015-11-02 10:58:31 -08001592 __ Lw(temp1, str, count_offset);
1593 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001594 // Check if `count` fields are equal, return false if they're not.
1595 // Also compares the compression style, if differs return false.
Chris Larsen16ba2b42015-11-02 10:58:31 -08001596 __ Bne(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001597 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1598 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1599 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen16ba2b42015-11-02 10:58:31 -08001600 __ Beqz(temp1, &return_true);
1601
1602 // Don't overwrite input registers
1603 __ Move(TMP, str);
1604 __ Move(temp3, arg);
1605
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001606 // Assertions that must hold in order to compare strings 4 bytes at a time.
Chris Larsen16ba2b42015-11-02 10:58:31 -08001607 DCHECK_ALIGNED(value_offset, 4);
1608 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1609
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001610 // For string compression, calculate the number of bytes to compare (not chars).
1611 if (mirror::kUseStringCompression) {
1612 // Extract compression flag.
1613 if (IsR2OrNewer()) {
1614 __ Ext(temp2, temp1, 0, 1);
1615 } else {
1616 __ Sll(temp2, temp1, 31);
1617 __ Srl(temp2, temp2, 31);
1618 }
1619 __ Srl(temp1, temp1, 1); // Extract length.
1620 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
1621 }
1622
1623 // Loop to compare strings 4 bytes at a time starting at the beginning of the string.
1624 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen16ba2b42015-11-02 10:58:31 -08001625 __ Bind(&loop);
1626 __ Lw(out, TMP, value_offset);
1627 __ Lw(temp2, temp3, value_offset);
1628 __ Bne(out, temp2, &return_false);
1629 __ Addiu(TMP, TMP, 4);
1630 __ Addiu(temp3, temp3, 4);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01001631 // With string compression, we have compared 4 bytes, otherwise 2 chars.
1632 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -4 : -2);
Chris Larsen16ba2b42015-11-02 10:58:31 -08001633 __ Bgtz(temp1, &loop);
1634
1635 // Return true and exit the function.
1636 // If loop does not result in returning false, we return true.
1637 __ Bind(&return_true);
1638 __ LoadConst32(out, 1);
1639 __ B(&end);
1640
1641 // Return false and exit the function.
1642 __ Bind(&return_false);
1643 __ LoadConst32(out, 0);
1644 __ Bind(&end);
1645}
1646
Chris Larsencf283da2016-01-19 16:45:35 -08001647static void GenerateStringIndexOf(HInvoke* invoke,
1648 bool start_at_zero,
1649 MipsAssembler* assembler,
Vladimir Marko174b2e22017-10-12 13:34:49 +01001650 CodeGeneratorMIPS* codegen) {
Chris Larsencf283da2016-01-19 16:45:35 -08001651 LocationSummary* locations = invoke->GetLocations();
1652 Register tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<Register>() : TMP;
1653
1654 // Note that the null check must have been done earlier.
1655 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1656
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001657 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1658 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsencf283da2016-01-19 16:45:35 -08001659 SlowPathCodeMIPS* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001660 HInstruction* code_point = invoke->InputAt(1);
1661 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001662 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsencf283da2016-01-19 16:45:35 -08001663 // Always needs the slow-path. We could directly dispatch to it,
1664 // but this case should be rare, so for simplicity just put the
1665 // full slow-path down and branch unconditionally.
Vladimir Marko174b2e22017-10-12 13:34:49 +01001666 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsencf283da2016-01-19 16:45:35 -08001667 codegen->AddSlowPath(slow_path);
1668 __ B(slow_path->GetEntryLabel());
1669 __ Bind(slow_path->GetExitLabel());
1670 return;
1671 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001672 } else if (code_point->GetType() != DataType::Type::kUint16) {
Chris Larsencf283da2016-01-19 16:45:35 -08001673 Register char_reg = locations->InAt(1).AsRegister<Register>();
1674 // The "bltu" conditional branch tests to see if the character value
1675 // fits in a valid 16-bit (MIPS halfword) value. If it doesn't then
1676 // the character being searched for, if it exists in the string, is
1677 // encoded using UTF-16 and stored in the string as two (16-bit)
1678 // halfwords. Currently the assembly code used to implement this
1679 // intrinsic doesn't support searching for a character stored as
1680 // two halfwords so we fallback to using the generic implementation
1681 // of indexOf().
1682 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
Vladimir Marko174b2e22017-10-12 13:34:49 +01001683 slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsencf283da2016-01-19 16:45:35 -08001684 codegen->AddSlowPath(slow_path);
1685 __ Bltu(tmp_reg, char_reg, slow_path->GetEntryLabel());
1686 }
1687
1688 if (start_at_zero) {
1689 DCHECK_EQ(tmp_reg, A2);
1690 // Start-index = 0.
1691 __ Clear(tmp_reg);
1692 }
1693
Serban Constantinescufca16662016-07-14 09:21:59 +01001694 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08001695 if (slow_path != nullptr) {
1696 __ Bind(slow_path->GetExitLabel());
1697 }
1698}
1699
1700// int java.lang.String.indexOf(int ch)
1701void IntrinsicLocationsBuilderMIPS::VisitStringIndexOf(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001702 LocationSummary* locations = new (allocator_) LocationSummary(
1703 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001704 // We have a hand-crafted assembly stub that follows the runtime
1705 // calling convention. So it's best to align the inputs accordingly.
1706 InvokeRuntimeCallingConvention calling_convention;
1707 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1708 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001709 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001710 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1711
1712 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1713 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1714}
1715
1716void IntrinsicCodeGeneratorMIPS::VisitStringIndexOf(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001717 GenerateStringIndexOf(invoke, /* start_at_zero= */ true, GetAssembler(), codegen_);
Chris Larsencf283da2016-01-19 16:45:35 -08001718}
1719
1720// int java.lang.String.indexOf(int ch, int fromIndex)
1721void IntrinsicLocationsBuilderMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001722 LocationSummary* locations = new (allocator_) LocationSummary(
1723 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001724 // We have a hand-crafted assembly stub that follows the runtime
1725 // calling convention. So it's best to align the inputs accordingly.
1726 InvokeRuntimeCallingConvention calling_convention;
1727 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1728 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1729 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001730 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001731 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1732
1733 // Need a temp for slow-path codepoint compare.
1734 locations->AddTemp(Location::RequiresRegister());
1735}
1736
1737void IntrinsicCodeGeneratorMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
Andreas Gampe3db70682018-12-26 15:12:03 -08001738 GenerateStringIndexOf(invoke, /* start_at_zero= */ false, GetAssembler(), codegen_);
Chris Larsencf283da2016-01-19 16:45:35 -08001739}
1740
1741// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
1742void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001743 LocationSummary* locations = new (allocator_) LocationSummary(
1744 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001745 InvokeRuntimeCallingConvention calling_convention;
1746 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1747 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1748 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1749 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001750 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001751 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1752}
1753
1754void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
1755 MipsAssembler* assembler = GetAssembler();
1756 LocationSummary* locations = invoke->GetLocations();
1757
1758 Register byte_array = locations->InAt(0).AsRegister<Register>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001759 SlowPathCodeMIPS* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsencf283da2016-01-19 16:45:35 -08001760 codegen_->AddSlowPath(slow_path);
1761 __ Beqz(byte_array, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01001762 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08001763 __ Bind(slow_path->GetExitLabel());
1764}
1765
1766// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1767void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001768 LocationSummary* locations =
1769 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001770 InvokeRuntimeCallingConvention calling_convention;
1771 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1772 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1773 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001774 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001775 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1776}
1777
1778void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
Chris Larsencf283da2016-01-19 16:45:35 -08001779 // No need to emit code checking whether `locations->InAt(2)` is a null
1780 // pointer, as callers of the native method
1781 //
1782 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1783 //
1784 // all include a null check on `data` before calling that method.
Serban Constantinescufca16662016-07-14 09:21:59 +01001785 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08001786}
1787
1788// java.lang.StringFactory.newStringFromString(String toCopy)
1789void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001790 LocationSummary* locations = new (allocator_) LocationSummary(
1791 invoke, LocationSummary::kCallOnMainAndSlowPath, kIntrinsified);
Chris Larsencf283da2016-01-19 16:45:35 -08001792 InvokeRuntimeCallingConvention calling_convention;
1793 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001794 Location outLocation = calling_convention.GetReturnLocation(DataType::Type::kInt32);
Chris Larsencf283da2016-01-19 16:45:35 -08001795 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
1796}
1797
1798void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
1799 MipsAssembler* assembler = GetAssembler();
1800 LocationSummary* locations = invoke->GetLocations();
1801
1802 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
Vladimir Marko174b2e22017-10-12 13:34:49 +01001803 SlowPathCodeMIPS* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsencf283da2016-01-19 16:45:35 -08001804 codegen_->AddSlowPath(slow_path);
1805 __ Beqz(string_to_copy, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01001806 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08001807 __ Bind(slow_path->GetExitLabel());
1808}
1809
Chris Larsen2714fe62016-02-11 14:23:53 -08001810static void GenIsInfinite(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001811 const DataType::Type type,
Chris Larsen2714fe62016-02-11 14:23:53 -08001812 const bool isR6,
1813 MipsAssembler* assembler) {
1814 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
1815 Register out = locations->Out().AsRegister<Register>();
1816
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001817 DCHECK(type == DataType::Type::kFloat32 || type == DataType::Type::kFloat64);
Chris Larsen2714fe62016-02-11 14:23:53 -08001818
1819 if (isR6) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001820 if (type == DataType::Type::kFloat64) {
Chris Larsen2714fe62016-02-11 14:23:53 -08001821 __ ClassD(FTMP, in);
1822 } else {
1823 __ ClassS(FTMP, in);
1824 }
1825 __ Mfc1(out, FTMP);
1826 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
1827 __ Sltu(out, ZERO, out);
1828 } else {
1829 // If one, or more, of the exponent bits is zero, then the number can't be infinite.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001830 if (type == DataType::Type::kFloat64) {
Chris Larsen2714fe62016-02-11 14:23:53 -08001831 __ MoveFromFpuHigh(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01001832 __ LoadConst32(AT, High32Bits(kPositiveInfinityDouble));
Chris Larsen2714fe62016-02-11 14:23:53 -08001833 } else {
1834 __ Mfc1(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01001835 __ LoadConst32(AT, kPositiveInfinityFloat);
Chris Larsen2714fe62016-02-11 14:23:53 -08001836 }
1837 __ Xor(TMP, TMP, AT);
1838
1839 __ Sll(TMP, TMP, 1);
1840
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001841 if (type == DataType::Type::kFloat64) {
Chris Larsen2714fe62016-02-11 14:23:53 -08001842 __ Mfc1(AT, in);
1843 __ Or(TMP, TMP, AT);
1844 }
1845 // If any of the significand bits are one, then the number is not infinite.
1846 __ Sltiu(out, TMP, 1);
1847 }
1848}
1849
1850// boolean java.lang.Float.isInfinite(float)
1851void IntrinsicLocationsBuilderMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001852 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen2714fe62016-02-11 14:23:53 -08001853}
1854
1855void IntrinsicCodeGeneratorMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001856 GenIsInfinite(invoke->GetLocations(), DataType::Type::kFloat32, IsR6(), GetAssembler());
Chris Larsen2714fe62016-02-11 14:23:53 -08001857}
1858
1859// boolean java.lang.Double.isInfinite(double)
1860void IntrinsicLocationsBuilderMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001861 CreateFPToIntLocations(allocator_, invoke);
Chris Larsen2714fe62016-02-11 14:23:53 -08001862}
1863
1864void IntrinsicCodeGeneratorMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001865 GenIsInfinite(invoke->GetLocations(), DataType::Type::kFloat64, IsR6(), GetAssembler());
Chris Larsen2714fe62016-02-11 14:23:53 -08001866}
1867
Chris Larsen97759342016-02-16 17:10:40 -08001868static void GenHighestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001869 const DataType::Type type,
Chris Larsen97759342016-02-16 17:10:40 -08001870 bool isR6,
1871 MipsAssembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001872 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen97759342016-02-16 17:10:40 -08001873
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001874 if (type == DataType::Type::kInt64) {
Chris Larsen97759342016-02-16 17:10:40 -08001875 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1876 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1877 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1878 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1879
1880 if (isR6) {
1881 __ ClzR6(TMP, in_hi);
1882 } else {
1883 __ ClzR2(TMP, in_hi);
1884 }
1885 __ LoadConst32(AT, 0x80000000);
1886 __ Srlv(out_hi, AT, TMP);
1887 __ And(out_hi, out_hi, in_hi);
1888 if (isR6) {
1889 __ ClzR6(TMP, in_lo);
1890 } else {
1891 __ ClzR2(TMP, in_lo);
1892 }
1893 __ Srlv(out_lo, AT, TMP);
1894 __ And(out_lo, out_lo, in_lo);
1895 if (isR6) {
1896 __ Seleqz(out_lo, out_lo, out_hi);
1897 } else {
1898 __ Movn(out_lo, ZERO, out_hi);
1899 }
1900 } else {
1901 Register in = locations->InAt(0).AsRegister<Register>();
1902 Register out = locations->Out().AsRegister<Register>();
1903
1904 if (isR6) {
1905 __ ClzR6(TMP, in);
1906 } else {
1907 __ ClzR2(TMP, in);
1908 }
1909 __ LoadConst32(AT, 0x80000000);
1910 __ Srlv(AT, AT, TMP); // Srlv shifts in the range of [0;31] bits (lower 5 bits of arg).
1911 __ And(out, AT, in); // So this is required for 0 (=shift by 32).
1912 }
1913}
1914
1915// int java.lang.Integer.highestOneBit(int)
1916void IntrinsicLocationsBuilderMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001917 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen97759342016-02-16 17:10:40 -08001918}
1919
1920void IntrinsicCodeGeneratorMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001921 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt32, IsR6(), GetAssembler());
Chris Larsen97759342016-02-16 17:10:40 -08001922}
1923
1924// long java.lang.Long.highestOneBit(long)
1925void IntrinsicLocationsBuilderMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001926 CreateIntToIntLocations(allocator_, invoke, Location::kOutputOverlap);
Chris Larsen97759342016-02-16 17:10:40 -08001927}
1928
1929void IntrinsicCodeGeneratorMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001930 GenHighestOneBit(invoke->GetLocations(), DataType::Type::kInt64, IsR6(), GetAssembler());
Chris Larsen97759342016-02-16 17:10:40 -08001931}
1932
1933static void GenLowestOneBit(LocationSummary* locations,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001934 const DataType::Type type,
Chris Larsen97759342016-02-16 17:10:40 -08001935 bool isR6,
1936 MipsAssembler* assembler) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001937 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64);
Chris Larsen97759342016-02-16 17:10:40 -08001938
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001939 if (type == DataType::Type::kInt64) {
Chris Larsen97759342016-02-16 17:10:40 -08001940 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1941 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1942 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1943 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1944
1945 __ Subu(TMP, ZERO, in_lo);
1946 __ And(out_lo, TMP, in_lo);
1947 __ Subu(TMP, ZERO, in_hi);
1948 __ And(out_hi, TMP, in_hi);
1949 if (isR6) {
1950 __ Seleqz(out_hi, out_hi, out_lo);
1951 } else {
1952 __ Movn(out_hi, ZERO, out_lo);
1953 }
1954 } else {
1955 Register in = locations->InAt(0).AsRegister<Register>();
1956 Register out = locations->Out().AsRegister<Register>();
1957
1958 __ Subu(TMP, ZERO, in);
1959 __ And(out, TMP, in);
1960 }
1961}
1962
1963// int java.lang.Integer.lowestOneBit(int)
1964void IntrinsicLocationsBuilderMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001965 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen97759342016-02-16 17:10:40 -08001966}
1967
1968void IntrinsicCodeGeneratorMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001969 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt32, IsR6(), GetAssembler());
Chris Larsen97759342016-02-16 17:10:40 -08001970}
1971
1972// long java.lang.Long.lowestOneBit(long)
1973void IntrinsicLocationsBuilderMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001974 CreateIntToIntLocations(allocator_, invoke);
Chris Larsen97759342016-02-16 17:10:40 -08001975}
1976
1977void IntrinsicCodeGeneratorMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001978 GenLowestOneBit(invoke->GetLocations(), DataType::Type::kInt64, IsR6(), GetAssembler());
Chris Larsen97759342016-02-16 17:10:40 -08001979}
1980
Chris Larsenf09d5322016-04-22 12:06:34 -07001981// int java.lang.Math.round(float)
1982void IntrinsicLocationsBuilderMIPS::VisitMathRoundFloat(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01001983 LocationSummary* locations =
1984 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsenf09d5322016-04-22 12:06:34 -07001985 locations->SetInAt(0, Location::RequiresFpuRegister());
1986 locations->AddTemp(Location::RequiresFpuRegister());
1987 locations->SetOut(Location::RequiresRegister());
1988}
1989
1990void IntrinsicCodeGeneratorMIPS::VisitMathRoundFloat(HInvoke* invoke) {
1991 LocationSummary* locations = invoke->GetLocations();
1992 MipsAssembler* assembler = GetAssembler();
1993 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
1994 FRegister half = locations->GetTemp(0).AsFpuRegister<FRegister>();
1995 Register out = locations->Out().AsRegister<Register>();
1996
1997 MipsLabel done;
Chris Larsenf09d5322016-04-22 12:06:34 -07001998
Chris Larsenf09d5322016-04-22 12:06:34 -07001999 if (IsR6()) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002000 // out = floor(in);
2001 //
2002 // if (out != MAX_VALUE && out != MIN_VALUE) {
2003 // TMP = ((in - out) >= 0.5) ? 1 : 0;
2004 // return out += TMP;
2005 // }
2006 // return out;
Chris Larsenf09d5322016-04-22 12:06:34 -07002007
Lena Djokicf4e23a82017-05-09 15:43:45 +02002008 // out = floor(in);
2009 __ FloorWS(FTMP, in);
2010 __ Mfc1(out, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002011
Lena Djokicf4e23a82017-05-09 15:43:45 +02002012 // if (out != MAX_VALUE && out != MIN_VALUE)
2013 __ Addiu(TMP, out, 1);
2014 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
2015 // or out - 0x7FFF FFFF.
2016 // IOW, TMP = 1 if out = Int.MIN_VALUE
2017 // or TMP = 0 if out = Int.MAX_VALUE.
2018 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
2019 // or out = Int.MAX_VALUE.
2020 __ Beqz(TMP, &done);
Chris Larsenf09d5322016-04-22 12:06:34 -07002021
Lena Djokicf4e23a82017-05-09 15:43:45 +02002022 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2023 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2024 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2025 __ SubS(FTMP, in, FTMP);
2026 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002027
Chris Larsenf09d5322016-04-22 12:06:34 -07002028 __ CmpLeS(FTMP, half, FTMP);
Chris Larsen07f712f2016-06-10 16:06:02 -07002029 __ Mfc1(TMP, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002030
2031 // Return out -= TMP.
2032 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002033 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002034 // if (in.isNaN) {
2035 // return 0;
2036 // }
2037 //
2038 // out = floor.w.s(in);
2039 //
2040 // /*
2041 // * This "if" statement is only needed for the pre-R6 version of floor.w.s
2042 // * which outputs Integer.MAX_VALUE for negative numbers with magnitudes
2043 // * too large to fit in a 32-bit integer.
2044 // */
2045 // if (out == Integer.MAX_VALUE) {
2046 // TMP = (in < 0.0f) ? 1 : 0;
2047 // /*
2048 // * If TMP is 1, then adding it to out will wrap its value from
2049 // * Integer.MAX_VALUE to Integer.MIN_VALUE.
2050 // */
2051 // return out += TMP;
2052 // }
2053 //
2054 // /*
2055 // * For negative values not handled by the previous "if" statement the
2056 // * test here will correctly set the value of TMP.
2057 // */
2058 // TMP = ((in - out) >= 0.5f) ? 1 : 0;
2059 // return out += TMP;
2060
2061 MipsLabel finite;
2062 MipsLabel add;
2063
2064 // Test for NaN.
2065 __ CunS(in, in);
2066
2067 // Return zero for NaN.
2068 __ Move(out, ZERO);
2069 __ Bc1t(&done);
2070
2071 // out = floor(in);
2072 __ FloorWS(FTMP, in);
2073 __ Mfc1(out, FTMP);
2074
2075 __ LoadConst32(TMP, -1);
2076
2077 // TMP = (out = java.lang.Integer.MAX_VALUE) ? -1 : 0;
2078 __ LoadConst32(AT, std::numeric_limits<int32_t>::max());
2079 __ Bne(AT, out, &finite);
2080
2081 __ Mtc1(ZERO, FTMP);
2082 __ ColtS(in, FTMP);
2083
2084 __ B(&add);
2085
2086 __ Bind(&finite);
2087
2088 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2089 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2090 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2091 __ SubS(FTMP, in, FTMP);
2092 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002093 __ ColeS(half, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002094
Lena Djokicf4e23a82017-05-09 15:43:45 +02002095 __ Bind(&add);
Chris Larsenf09d5322016-04-22 12:06:34 -07002096
Chris Larsenf09d5322016-04-22 12:06:34 -07002097 __ Movf(TMP, ZERO);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002098
2099 // Return out -= TMP.
2100 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002101 }
Chris Larsenf09d5322016-04-22 12:06:34 -07002102 __ Bind(&done);
2103}
2104
Chris Larsen692235e2016-11-21 16:04:53 -08002105// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
2106void IntrinsicLocationsBuilderMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002107 LocationSummary* locations =
2108 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
Chris Larsen692235e2016-11-21 16:04:53 -08002109 locations->SetInAt(0, Location::RequiresRegister());
2110 locations->SetInAt(1, Location::RequiresRegister());
2111 locations->SetInAt(2, Location::RequiresRegister());
2112 locations->SetInAt(3, Location::RequiresRegister());
2113 locations->SetInAt(4, Location::RequiresRegister());
2114
Chris Larsenfe4ff442017-03-23 11:25:12 -07002115 locations->AddTemp(Location::RequiresRegister());
2116 locations->AddTemp(Location::RequiresRegister());
2117 locations->AddTemp(Location::RequiresRegister());
Chris Larsen692235e2016-11-21 16:04:53 -08002118}
2119
2120void IntrinsicCodeGeneratorMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2121 MipsAssembler* assembler = GetAssembler();
2122 LocationSummary* locations = invoke->GetLocations();
2123
2124 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002125 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen692235e2016-11-21 16:04:53 -08002126 DCHECK_EQ(char_size, 2u);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002127 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen692235e2016-11-21 16:04:53 -08002128
2129 Register srcObj = locations->InAt(0).AsRegister<Register>();
2130 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2131 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2132 Register dstObj = locations->InAt(3).AsRegister<Register>();
2133 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2134
2135 Register dstPtr = locations->GetTemp(0).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002136 Register srcPtr = locations->GetTemp(1).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002137 Register numChrs = locations->GetTemp(2).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002138
2139 MipsLabel done;
Chris Larsenfe4ff442017-03-23 11:25:12 -07002140 MipsLabel loop;
Chris Larsen692235e2016-11-21 16:04:53 -08002141
2142 // Location of data in char array buffer.
2143 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2144
2145 // Get offset of value field within a string object.
2146 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
2147
2148 __ Beq(srcEnd, srcBegin, &done); // No characters to move.
2149
2150 // Calculate number of characters to be copied.
2151 __ Subu(numChrs, srcEnd, srcBegin);
2152
2153 // Calculate destination address.
2154 __ Addiu(dstPtr, dstObj, data_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002155 __ ShiftAndAdd(dstPtr, dstBegin, dstPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002156
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002157 if (mirror::kUseStringCompression) {
2158 MipsLabel uncompressed_copy, compressed_loop;
2159 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2160 // Load count field and extract compression flag.
2161 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2162 __ Sll(TMP, TMP, 31);
2163
Chris Larsenfe4ff442017-03-23 11:25:12 -07002164 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002165 __ Bnez(TMP, &uncompressed_copy);
2166
2167 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2168 __ Addu(srcPtr, srcObj, srcBegin);
2169 __ Bind(&compressed_loop);
2170 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2171 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2172 __ Addiu(numChrs, numChrs, -1);
2173 __ Addiu(srcPtr, srcPtr, 1);
2174 __ Addiu(dstPtr, dstPtr, 2);
2175 __ Bnez(numChrs, &compressed_loop);
2176
2177 __ B(&done);
2178 __ Bind(&uncompressed_copy);
2179 }
2180
Chris Larsen692235e2016-11-21 16:04:53 -08002181 // Calculate source address.
2182 __ Addiu(srcPtr, srcObj, value_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002183 __ ShiftAndAdd(srcPtr, srcBegin, srcPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002184
Chris Larsenfe4ff442017-03-23 11:25:12 -07002185 __ Bind(&loop);
2186 __ Lh(AT, srcPtr, 0);
2187 __ Addiu(numChrs, numChrs, -1);
2188 __ Addiu(srcPtr, srcPtr, char_size);
2189 __ Sh(AT, dstPtr, 0);
2190 __ Addiu(dstPtr, dstPtr, char_size);
2191 __ Bnez(numChrs, &loop);
Chris Larsen692235e2016-11-21 16:04:53 -08002192
2193 __ Bind(&done);
2194}
2195
Vladimir Markoca6fff82017-10-03 14:49:14 +01002196static void CreateFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2197 LocationSummary* locations =
2198 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002199 InvokeRuntimeCallingConvention calling_convention;
2200
2201 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsenb9005fa2017-03-24 12:11:54 -07002203}
2204
Vladimir Markoca6fff82017-10-03 14:49:14 +01002205static void CreateFPFPToFPCallLocations(ArenaAllocator* allocator, HInvoke* invoke) {
2206 LocationSummary* locations =
2207 new (allocator) LocationSummary(invoke, LocationSummary::kCallOnMainOnly, kIntrinsified);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002208 InvokeRuntimeCallingConvention calling_convention;
2209
2210 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2211 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002212 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kFloat64));
Chris Larsenb9005fa2017-03-24 12:11:54 -07002213}
2214
2215static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorMIPS* codegen, QuickEntrypointEnum entry) {
2216 LocationSummary* locations = invoke->GetLocations();
2217 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2218 DCHECK_EQ(in, F12);
2219 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2220 DCHECK_EQ(out, F0);
2221
2222 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2223}
2224
2225static void GenFPFPToFPCall(HInvoke* invoke,
2226 CodeGeneratorMIPS* codegen,
2227 QuickEntrypointEnum entry) {
2228 LocationSummary* locations = invoke->GetLocations();
2229 FRegister in0 = locations->InAt(0).AsFpuRegister<FRegister>();
2230 DCHECK_EQ(in0, F12);
2231 FRegister in1 = locations->InAt(1).AsFpuRegister<FRegister>();
2232 DCHECK_EQ(in1, F14);
2233 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2234 DCHECK_EQ(out, F0);
2235
2236 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2237}
2238
2239// static double java.lang.Math.cos(double a)
2240void IntrinsicLocationsBuilderMIPS::VisitMathCos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002241 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002242}
2243
2244void IntrinsicCodeGeneratorMIPS::VisitMathCos(HInvoke* invoke) {
2245 GenFPToFPCall(invoke, codegen_, kQuickCos);
2246}
2247
2248// static double java.lang.Math.sin(double a)
2249void IntrinsicLocationsBuilderMIPS::VisitMathSin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002250 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002251}
2252
2253void IntrinsicCodeGeneratorMIPS::VisitMathSin(HInvoke* invoke) {
2254 GenFPToFPCall(invoke, codegen_, kQuickSin);
2255}
2256
2257// static double java.lang.Math.acos(double a)
2258void IntrinsicLocationsBuilderMIPS::VisitMathAcos(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002259 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002260}
2261
2262void IntrinsicCodeGeneratorMIPS::VisitMathAcos(HInvoke* invoke) {
2263 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2264}
2265
2266// static double java.lang.Math.asin(double a)
2267void IntrinsicLocationsBuilderMIPS::VisitMathAsin(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002268 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002269}
2270
2271void IntrinsicCodeGeneratorMIPS::VisitMathAsin(HInvoke* invoke) {
2272 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2273}
2274
2275// static double java.lang.Math.atan(double a)
2276void IntrinsicLocationsBuilderMIPS::VisitMathAtan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002277 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002278}
2279
2280void IntrinsicCodeGeneratorMIPS::VisitMathAtan(HInvoke* invoke) {
2281 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2282}
2283
2284// static double java.lang.Math.atan2(double y, double x)
2285void IntrinsicLocationsBuilderMIPS::VisitMathAtan2(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002286 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002287}
2288
2289void IntrinsicCodeGeneratorMIPS::VisitMathAtan2(HInvoke* invoke) {
2290 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2291}
2292
Vladimir Marko4d179872018-01-19 14:50:10 +00002293// static double java.lang.Math.pow(double y, double x)
2294void IntrinsicLocationsBuilderMIPS::VisitMathPow(HInvoke* invoke) {
2295 CreateFPFPToFPCallLocations(allocator_, invoke);
2296}
2297
2298void IntrinsicCodeGeneratorMIPS::VisitMathPow(HInvoke* invoke) {
2299 GenFPFPToFPCall(invoke, codegen_, kQuickPow);
2300}
2301
Chris Larsenb9005fa2017-03-24 12:11:54 -07002302// static double java.lang.Math.cbrt(double a)
2303void IntrinsicLocationsBuilderMIPS::VisitMathCbrt(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002304 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002305}
2306
2307void IntrinsicCodeGeneratorMIPS::VisitMathCbrt(HInvoke* invoke) {
2308 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2309}
2310
2311// static double java.lang.Math.cosh(double x)
2312void IntrinsicLocationsBuilderMIPS::VisitMathCosh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002313 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002314}
2315
2316void IntrinsicCodeGeneratorMIPS::VisitMathCosh(HInvoke* invoke) {
2317 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2318}
2319
2320// static double java.lang.Math.exp(double a)
2321void IntrinsicLocationsBuilderMIPS::VisitMathExp(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002322 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002323}
2324
2325void IntrinsicCodeGeneratorMIPS::VisitMathExp(HInvoke* invoke) {
2326 GenFPToFPCall(invoke, codegen_, kQuickExp);
2327}
2328
2329// static double java.lang.Math.expm1(double x)
2330void IntrinsicLocationsBuilderMIPS::VisitMathExpm1(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002331 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002332}
2333
2334void IntrinsicCodeGeneratorMIPS::VisitMathExpm1(HInvoke* invoke) {
2335 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2336}
2337
2338// static double java.lang.Math.hypot(double x, double y)
2339void IntrinsicLocationsBuilderMIPS::VisitMathHypot(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002340 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002341}
2342
2343void IntrinsicCodeGeneratorMIPS::VisitMathHypot(HInvoke* invoke) {
2344 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2345}
2346
2347// static double java.lang.Math.log(double a)
2348void IntrinsicLocationsBuilderMIPS::VisitMathLog(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002349 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002350}
2351
2352void IntrinsicCodeGeneratorMIPS::VisitMathLog(HInvoke* invoke) {
2353 GenFPToFPCall(invoke, codegen_, kQuickLog);
2354}
2355
2356// static double java.lang.Math.log10(double x)
2357void IntrinsicLocationsBuilderMIPS::VisitMathLog10(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002358 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002359}
2360
2361void IntrinsicCodeGeneratorMIPS::VisitMathLog10(HInvoke* invoke) {
2362 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2363}
2364
2365// static double java.lang.Math.nextAfter(double start, double direction)
2366void IntrinsicLocationsBuilderMIPS::VisitMathNextAfter(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002367 CreateFPFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002368}
2369
2370void IntrinsicCodeGeneratorMIPS::VisitMathNextAfter(HInvoke* invoke) {
2371 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2372}
2373
2374// static double java.lang.Math.sinh(double x)
2375void IntrinsicLocationsBuilderMIPS::VisitMathSinh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002376 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002377}
2378
2379void IntrinsicCodeGeneratorMIPS::VisitMathSinh(HInvoke* invoke) {
2380 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2381}
2382
2383// static double java.lang.Math.tan(double a)
2384void IntrinsicLocationsBuilderMIPS::VisitMathTan(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002385 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002386}
2387
2388void IntrinsicCodeGeneratorMIPS::VisitMathTan(HInvoke* invoke) {
2389 GenFPToFPCall(invoke, codegen_, kQuickTan);
2390}
2391
2392// static double java.lang.Math.tanh(double x)
2393void IntrinsicLocationsBuilderMIPS::VisitMathTanh(HInvoke* invoke) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002394 CreateFPToFPCallLocations(allocator_, invoke);
Chris Larsenb9005fa2017-03-24 12:11:54 -07002395}
2396
2397void IntrinsicCodeGeneratorMIPS::VisitMathTanh(HInvoke* invoke) {
2398 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2399}
2400
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002401// static void java.lang.System.arraycopy(Object src, int srcPos,
2402// Object dest, int destPos,
2403// int length)
2404void IntrinsicLocationsBuilderMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
2405 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2406 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2407 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2408
2409 // As long as we are checking, we might as well check to see if the src and dest
2410 // positions are >= 0.
2411 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2412 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2413 // We will have to fail anyways.
2414 return;
2415 }
2416
2417 // And since we are already checking, check the length too.
2418 if (length != nullptr) {
2419 int32_t len = length->GetValue();
2420 if (len < 0) {
2421 // Just call as normal.
2422 return;
2423 }
2424 }
2425
2426 // Okay, it is safe to generate inline code.
2427 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002428 new (allocator_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002429 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2430 locations->SetInAt(0, Location::RequiresRegister());
2431 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2432 locations->SetInAt(2, Location::RequiresRegister());
2433 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2434 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2435
2436 locations->AddTemp(Location::RequiresRegister());
2437 locations->AddTemp(Location::RequiresRegister());
2438 locations->AddTemp(Location::RequiresRegister());
2439}
2440
2441// Utility routine to verify that "length(input) - pos >= length"
2442static void EnoughItems(MipsAssembler* assembler,
2443 Register length_input_minus_pos,
2444 Location length,
2445 SlowPathCodeMIPS* slow_path) {
2446 if (length.IsConstant()) {
2447 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
2448
2449 if (IsInt<16>(length_constant)) {
2450 __ Slti(TMP, length_input_minus_pos, length_constant);
2451 __ Bnez(TMP, slow_path->GetEntryLabel());
2452 } else {
2453 __ LoadConst32(TMP, length_constant);
2454 __ Blt(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
2455 }
2456 } else {
2457 __ Blt(length_input_minus_pos, length.AsRegister<Register>(), slow_path->GetEntryLabel());
2458 }
2459}
2460
2461static void CheckPosition(MipsAssembler* assembler,
2462 Location pos,
2463 Register input,
2464 Location length,
2465 SlowPathCodeMIPS* slow_path,
2466 bool length_is_input_length = false) {
2467 // Where is the length in the Array?
2468 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
2469
2470 // Calculate length(input) - pos.
2471 if (pos.IsConstant()) {
2472 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
2473 if (pos_const == 0) {
2474 if (!length_is_input_length) {
2475 // Check that length(input) >= length.
2476 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2477 EnoughItems(assembler, AT, length, slow_path);
2478 }
2479 } else {
2480 // Check that (length(input) - pos) >= zero.
2481 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2482 DCHECK_GT(pos_const, 0);
2483 __ Addiu32(AT, AT, -pos_const, TMP);
2484 __ Bltz(AT, slow_path->GetEntryLabel());
2485
2486 // Verify that (length(input) - pos) >= length.
2487 EnoughItems(assembler, AT, length, slow_path);
2488 }
2489 } else if (length_is_input_length) {
2490 // The only way the copy can succeed is if pos is zero.
2491 Register pos_reg = pos.AsRegister<Register>();
2492 __ Bnez(pos_reg, slow_path->GetEntryLabel());
2493 } else {
2494 // Verify that pos >= 0.
2495 Register pos_reg = pos.AsRegister<Register>();
2496 __ Bltz(pos_reg, slow_path->GetEntryLabel());
2497
2498 // Check that (length(input) - pos) >= zero.
2499 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
2500 __ Subu(AT, AT, pos_reg);
2501 __ Bltz(AT, slow_path->GetEntryLabel());
2502
2503 // Verify that (length(input) - pos) >= length.
2504 EnoughItems(assembler, AT, length, slow_path);
2505 }
2506}
2507
2508void IntrinsicCodeGeneratorMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
2509 MipsAssembler* assembler = GetAssembler();
2510 LocationSummary* locations = invoke->GetLocations();
2511
2512 Register src = locations->InAt(0).AsRegister<Register>();
2513 Location src_pos = locations->InAt(1);
2514 Register dest = locations->InAt(2).AsRegister<Register>();
2515 Location dest_pos = locations->InAt(3);
2516 Location length = locations->InAt(4);
2517
2518 MipsLabel loop;
2519
2520 Register dest_base = locations->GetTemp(0).AsRegister<Register>();
2521 Register src_base = locations->GetTemp(1).AsRegister<Register>();
2522 Register count = locations->GetTemp(2).AsRegister<Register>();
2523
Vladimir Marko174b2e22017-10-12 13:34:49 +01002524 SlowPathCodeMIPS* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathMIPS(invoke);
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002525 codegen_->AddSlowPath(slow_path);
2526
2527 // Bail out if the source and destination are the same (to handle overlap).
2528 __ Beq(src, dest, slow_path->GetEntryLabel());
2529
2530 // Bail out if the source is null.
2531 __ Beqz(src, slow_path->GetEntryLabel());
2532
2533 // Bail out if the destination is null.
2534 __ Beqz(dest, slow_path->GetEntryLabel());
2535
2536 // Load length into register for count.
2537 if (length.IsConstant()) {
2538 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
2539 } else {
2540 // If the length is negative, bail out.
2541 // We have already checked in the LocationsBuilder for the constant case.
2542 __ Bltz(length.AsRegister<Register>(), slow_path->GetEntryLabel());
2543
2544 __ Move(count, length.AsRegister<Register>());
2545 }
2546
2547 // Validity checks: source.
2548 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
2549
2550 // Validity checks: dest.
2551 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
2552
2553 // If count is zero, we're done.
2554 __ Beqz(count, slow_path->GetExitLabel());
2555
2556 // Okay, everything checks out. Finally time to do the copy.
2557 // Check assumption that sizeof(Char) is 2 (used in scaling below).
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002558 const size_t char_size = DataType::Size(DataType::Type::kUint16);
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002559 DCHECK_EQ(char_size, 2u);
2560
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002561 const size_t char_shift = DataType::SizeShift(DataType::Type::kUint16);
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002562
2563 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2564
2565 // Calculate source and destination addresses.
2566 if (src_pos.IsConstant()) {
2567 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
2568
2569 __ Addiu32(src_base, src, data_offset + char_size * src_pos_const, TMP);
2570 } else {
2571 __ Addiu32(src_base, src, data_offset, TMP);
2572 __ ShiftAndAdd(src_base, src_pos.AsRegister<Register>(), src_base, char_shift);
2573 }
2574 if (dest_pos.IsConstant()) {
2575 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2576
2577 __ Addiu32(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
2578 } else {
2579 __ Addiu32(dest_base, dest, data_offset, TMP);
2580 __ ShiftAndAdd(dest_base, dest_pos.AsRegister<Register>(), dest_base, char_shift);
2581 }
2582
2583 __ Bind(&loop);
2584 __ Lh(TMP, src_base, 0);
2585 __ Addiu(src_base, src_base, char_size);
2586 __ Addiu(count, count, -1);
2587 __ Sh(TMP, dest_base, 0);
2588 __ Addiu(dest_base, dest_base, char_size);
2589 __ Bnez(count, &loop);
2590
2591 __ Bind(slow_path->GetExitLabel());
2592}
2593
Chris Larsen5633ce72017-04-10 15:47:40 -07002594// long java.lang.Integer.valueOf(long)
2595void IntrinsicLocationsBuilderMIPS::VisitIntegerValueOf(HInvoke* invoke) {
2596 InvokeRuntimeCallingConvention calling_convention;
2597 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2598 invoke,
2599 codegen_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002600 calling_convention.GetReturnLocation(DataType::Type::kReference),
Chris Larsen5633ce72017-04-10 15:47:40 -07002601 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2602}
2603
2604void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01002605 IntrinsicVisitor::IntegerValueOfInfo info =
2606 IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions());
Chris Larsen5633ce72017-04-10 15:47:40 -07002607 LocationSummary* locations = invoke->GetLocations();
2608 MipsAssembler* assembler = GetAssembler();
2609 InstructionCodeGeneratorMIPS* icodegen =
2610 down_cast<InstructionCodeGeneratorMIPS*>(codegen_->GetInstructionVisitor());
2611
2612 Register out = locations->Out().AsRegister<Register>();
Chris Larsen5633ce72017-04-10 15:47:40 -07002613 if (invoke->InputAt(0)->IsConstant()) {
2614 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
Vladimir Marko6fd16062018-06-26 11:02:04 +01002615 if (static_cast<uint32_t>(value - info.low) < info.length) {
Chris Larsen5633ce72017-04-10 15:47:40 -07002616 // Just embed the j.l.Integer in the code.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002617 DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference);
2618 codegen_->LoadBootImageAddress(out, info.value_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002619 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002620 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002621 // Allocate and initialize a new j.l.Integer.
2622 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2623 // JIT object table.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002624 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2625 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002626 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
2627 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2628 // one.
2629 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2630 }
2631 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002632 DCHECK(locations->CanCall());
Chris Larsen5633ce72017-04-10 15:47:40 -07002633 Register in = locations->InAt(0).AsRegister<Register>();
2634 MipsLabel allocate, done;
Chris Larsen5633ce72017-04-10 15:47:40 -07002635
Chris Larsen5633ce72017-04-10 15:47:40 -07002636 __ Addiu32(out, in, -info.low);
Vladimir Markoeebb8212018-06-05 14:57:24 +01002637 // As unsigned quantities is out < info.length ?
2638 if (IsUint<15>(info.length)) {
2639 __ Sltiu(AT, out, info.length);
Chris Larsen5633ce72017-04-10 15:47:40 -07002640 } else {
Vladimir Markoeebb8212018-06-05 14:57:24 +01002641 __ LoadConst32(AT, info.length);
Chris Larsen5633ce72017-04-10 15:47:40 -07002642 __ Sltu(AT, out, AT);
2643 }
Vladimir Markoeebb8212018-06-05 14:57:24 +01002644 // Branch if out >= info.length. This means that "in" is outside of the valid range.
Chris Larsen5633ce72017-04-10 15:47:40 -07002645 __ Beqz(AT, &allocate);
2646
2647 // If the value is within the bounds, load the j.l.Integer directly from the array.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002648 codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_reference);
Chris Larsen5633ce72017-04-10 15:47:40 -07002649 __ ShiftAndAdd(out, out, TMP, TIMES_4);
2650 __ Lw(out, out, 0);
2651 __ MaybeUnpoisonHeapReference(out);
2652 __ B(&done);
2653
2654 __ Bind(&allocate);
2655 // Otherwise allocate and initialize a new j.l.Integer.
Vladimir Marko6fd16062018-06-26 11:02:04 +01002656 codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(),
2657 info.integer_boot_image_offset);
Chris Larsen5633ce72017-04-10 15:47:40 -07002658 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2659 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2660 // one.
2661 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2662 __ Bind(&done);
2663 }
2664}
2665
Chris Larsenb065b032017-11-02 12:13:20 -07002666// static boolean java.lang.Thread.interrupted()
2667void IntrinsicLocationsBuilderMIPS::VisitThreadInterrupted(HInvoke* invoke) {
2668 LocationSummary* locations =
2669 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2670 locations->SetOut(Location::RequiresRegister());
2671}
2672
2673void IntrinsicCodeGeneratorMIPS::VisitThreadInterrupted(HInvoke* invoke) {
2674 MipsAssembler* assembler = GetAssembler();
2675 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
2676 int32_t offset = Thread::InterruptedOffset<kMipsPointerSize>().Int32Value();
2677 __ LoadFromOffset(kLoadWord, out, TR, offset);
2678 MipsLabel done;
2679 __ Beqz(out, &done);
2680 __ Sync(0);
2681 __ StoreToOffset(kStoreWord, ZERO, TR, offset);
2682 __ Sync(0);
2683 __ Bind(&done);
2684}
2685
Hans Boehmc7b28de2018-03-09 17:05:28 -08002686void IntrinsicLocationsBuilderMIPS::VisitReachabilityFence(HInvoke* invoke) {
2687 LocationSummary* locations =
2688 new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
2689 locations->SetInAt(0, Location::Any());
2690}
2691
2692void IntrinsicCodeGeneratorMIPS::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { }
2693
Chris Larsen2714fe62016-02-11 14:23:53 -08002694// Unimplemented intrinsics.
2695
Aart Bik2f9fcc92016-03-01 15:16:54 -08002696UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil)
2697UNIMPLEMENTED_INTRINSIC(MIPS, MathFloor)
2698UNIMPLEMENTED_INTRINSIC(MIPS, MathRint)
2699UNIMPLEMENTED_INTRINSIC(MIPS, MathRoundDouble)
Alexey Frunze15958152017-02-09 19:08:30 -08002700UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetLongVolatile);
2701UNIMPLEMENTED_INTRINSIC(MIPS, UnsafePutLongVolatile);
Aart Bik2f9fcc92016-03-01 15:16:54 -08002702UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeCASLong)
Chris Larsen701566a2015-10-27 15:29:13 -07002703
Aart Bik2f9fcc92016-03-01 15:16:54 -08002704UNIMPLEMENTED_INTRINSIC(MIPS, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002705UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08002706
xueliang.zhongcb58b072017-10-13 12:06:56 +01002707UNIMPLEMENTED_INTRINSIC(MIPS, CRC32Update)
Evgeny Astigeevich15c5b972018-11-20 13:41:40 +00002708UNIMPLEMENTED_INTRINSIC(MIPS, CRC32UpdateBytes)
Evgeny Astigeevich776a7c22018-12-17 11:40:34 +00002709UNIMPLEMENTED_INTRINSIC(MIPS, CRC32UpdateByteBuffer)
xueliang.zhongcb58b072017-10-13 12:06:56 +01002710
Aart Bikff7d89c2016-11-07 08:49:28 -08002711UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOf);
2712UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002713UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferAppend);
2714UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferLength);
2715UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferToString);
Vladimir Markod4561172017-10-30 17:48:25 +00002716UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendObject);
2717UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendString);
2718UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendCharSequence);
2719UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendCharArray);
2720UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendBoolean);
2721UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendChar);
2722UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendInt);
2723UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendLong);
2724UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendFloat);
2725UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppendDouble);
Aart Bik71bf7b42016-11-16 10:17:46 -08002726UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderLength);
2727UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002728
Aart Bik0e54c012016-03-04 12:08:31 -08002729// 1.8.
2730UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddInt)
2731UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddLong)
2732UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetInt)
2733UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetLong)
2734UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetObject)
Chris Larsen701566a2015-10-27 15:29:13 -07002735
Aart Bik0e54c012016-03-04 12:08:31 -08002736UNREACHABLE_INTRINSICS(MIPS)
Chris Larsen2714fe62016-02-11 14:23:53 -08002737
Chris Larsen701566a2015-10-27 15:29:13 -07002738#undef __
2739
2740} // namespace mips
2741} // namespace art