blob: 4cea6dfdfb46c04a112c5b1db0939f49ee8011fc [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
Andreas Gampe508fdf32017-06-05 16:42:13 -070026#include "scoped_thread_state_change-inl.h"
Chris Larsen701566a2015-10-27 15:29:13 -070027#include "thread.h"
28#include "utils/mips/assembler_mips.h"
29#include "utils/mips/constants_mips.h"
30
31namespace art {
32
33namespace mips {
34
35IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
Chris Larsen5633ce72017-04-10 15:47:40 -070036 : codegen_(codegen), arena_(codegen->GetGraph()->GetArena()) {
Chris Larsen701566a2015-10-27 15:29:13 -070037}
38
39MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
40 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
41}
42
43ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
44 return codegen_->GetGraph()->GetArena();
45}
46
Alexey Frunzebb9863a2016-01-11 15:51:16 -080047inline bool IntrinsicCodeGeneratorMIPS::IsR2OrNewer() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080048 return codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
49}
50
Alexey Frunzebb9863a2016-01-11 15:51:16 -080051inline bool IntrinsicCodeGeneratorMIPS::IsR6() const {
Chris Larsene16ce5a2015-11-18 12:30:20 -080052 return codegen_->GetInstructionSetFeatures().IsR6();
53}
54
Alexey Frunzebb9863a2016-01-11 15:51:16 -080055inline bool IntrinsicCodeGeneratorMIPS::Is32BitFPU() const {
56 return codegen_->GetInstructionSetFeatures().Is32BitFloatingPoint();
57}
58
Chris Larsen701566a2015-10-27 15:29:13 -070059#define __ codegen->GetAssembler()->
60
61static void MoveFromReturnRegister(Location trg,
62 Primitive::Type type,
63 CodeGeneratorMIPS* codegen) {
64 if (!trg.IsValid()) {
65 DCHECK_EQ(type, Primitive::kPrimVoid);
66 return;
67 }
68
69 DCHECK_NE(type, Primitive::kPrimVoid);
70
71 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
72 Register trg_reg = trg.AsRegister<Register>();
73 if (trg_reg != V0) {
74 __ Move(V0, trg_reg);
75 }
76 } else {
77 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
78 if (trg_reg != F0) {
79 if (type == Primitive::kPrimFloat) {
80 __ MovS(F0, trg_reg);
81 } else {
82 __ MovD(F0, trg_reg);
83 }
84 }
85 }
86}
87
88static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
89 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
90 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
91}
92
93// Slow-path for fallback (calling the managed code to handle the
94// intrinsic) in an intrinsified call. This will copy the arguments
95// into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations
98// given by the invoke's location summary. If an intrinsic
99// modifies those locations before a slowpath call, they must be
100// restored!
101class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
102 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000103 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : SlowPathCodeMIPS(invoke), invoke_(invoke) { }
Chris Larsen701566a2015-10-27 15:29:13 -0700104
105 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
106 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
107
108 __ Bind(GetEntryLabel());
109
110 SaveLiveRegisters(codegen, invoke_->GetLocations());
111
112 MoveArguments(invoke_, codegen);
113
114 if (invoke_->IsInvokeStaticOrDirect()) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100115 codegen->GenerateStaticOrDirectCall(
116 invoke_->AsInvokeStaticOrDirect(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700117 } else {
Vladimir Markoe7197bf2017-06-02 17:00:23 +0100118 codegen->GenerateVirtualCall(
119 invoke_->AsInvokeVirtual(), Location::RegisterLocation(A0), this);
Chris Larsen701566a2015-10-27 15:29:13 -0700120 }
121
122 // Copy the result back to the expected output.
123 Location out = invoke_->GetLocations()->Out();
124 if (out.IsValid()) {
125 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
126 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
127 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
128 }
129
130 RestoreLiveRegisters(codegen, invoke_->GetLocations());
131 __ B(GetExitLabel());
132 }
133
134 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
135
136 private:
137 // The instruction where this slow path is happening.
138 HInvoke* const invoke_;
139
140 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
141};
142
143#undef __
144
145bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
146 Dispatch(invoke);
147 LocationSummary* res = invoke->GetLocations();
148 return res != nullptr && res->Intrinsified();
149}
150
151#define __ assembler->
152
Chris Larsen3f8bf652015-10-28 10:08:56 -0700153static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
154 LocationSummary* locations = new (arena) LocationSummary(invoke,
155 LocationSummary::kNoCall,
156 kIntrinsified);
157 locations->SetInAt(0, Location::RequiresFpuRegister());
158 locations->SetOut(Location::RequiresRegister());
159}
160
161static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
162 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
163
164 if (is64bit) {
165 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
166 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
167
168 __ Mfc1(out_lo, in);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800169 __ MoveFromFpuHigh(out_hi, in);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700170 } else {
171 Register out = locations->Out().AsRegister<Register>();
172
173 __ Mfc1(out, in);
174 }
175}
176
177// long java.lang.Double.doubleToRawLongBits(double)
178void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
179 CreateFPToIntLocations(arena_, invoke);
180}
181
182void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000183 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700184}
185
186// int java.lang.Float.floatToRawIntBits(float)
187void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
188 CreateFPToIntLocations(arena_, invoke);
189}
190
191void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000192 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700193}
194
195static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
196 LocationSummary* locations = new (arena) LocationSummary(invoke,
197 LocationSummary::kNoCall,
198 kIntrinsified);
199 locations->SetInAt(0, Location::RequiresRegister());
200 locations->SetOut(Location::RequiresFpuRegister());
201}
202
203static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
204 FRegister out = locations->Out().AsFpuRegister<FRegister>();
205
206 if (is64bit) {
207 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
208 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
209
210 __ Mtc1(in_lo, out);
Alexey Frunzebb9863a2016-01-11 15:51:16 -0800211 __ MoveToFpuHigh(in_hi, out);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700212 } else {
213 Register in = locations->InAt(0).AsRegister<Register>();
214
215 __ Mtc1(in, out);
216 }
217}
218
219// double java.lang.Double.longBitsToDouble(long)
220void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
221 CreateIntToFPLocations(arena_, invoke);
222}
223
224void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000225 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700226}
227
228// float java.lang.Float.intBitsToFloat(int)
229void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
230 CreateIntToFPLocations(arena_, invoke);
231}
232
233void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000234 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700235}
236
Chris Larsen86829602015-11-18 12:27:52 -0800237static void CreateIntToIntLocations(ArenaAllocator* arena,
238 HInvoke* invoke,
239 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700240 LocationSummary* locations = new (arena) LocationSummary(invoke,
241 LocationSummary::kNoCall,
242 kIntrinsified);
243 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800244 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700245}
246
Chris Larsen70014c82015-11-18 12:26:08 -0800247static void GenReverse(LocationSummary* locations,
248 Primitive::Type type,
249 bool isR2OrNewer,
250 bool isR6,
251 bool reverseBits,
252 MipsAssembler* assembler) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700253 DCHECK(type == Primitive::kPrimShort ||
254 type == Primitive::kPrimInt ||
255 type == Primitive::kPrimLong);
Chris Larsen70014c82015-11-18 12:26:08 -0800256 DCHECK(type != Primitive::kPrimShort || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700257
258 if (type == Primitive::kPrimShort) {
259 Register in = locations->InAt(0).AsRegister<Register>();
260 Register out = locations->Out().AsRegister<Register>();
261
262 if (isR2OrNewer) {
263 __ Wsbh(out, in);
264 __ Seh(out, out);
265 } else {
266 __ Sll(TMP, in, 24);
267 __ Sra(TMP, TMP, 16);
268 __ Sll(out, in, 16);
269 __ Srl(out, out, 24);
270 __ Or(out, out, TMP);
271 }
272 } else if (type == Primitive::kPrimInt) {
273 Register in = locations->InAt(0).AsRegister<Register>();
274 Register out = locations->Out().AsRegister<Register>();
275
276 if (isR2OrNewer) {
277 __ Rotr(out, in, 16);
278 __ Wsbh(out, out);
279 } else {
280 // MIPS32r1
281 // __ Rotr(out, in, 16);
282 __ Sll(TMP, in, 16);
283 __ Srl(out, in, 16);
284 __ Or(out, out, TMP);
285 // __ Wsbh(out, out);
286 __ LoadConst32(AT, 0x00FF00FF);
287 __ And(TMP, out, AT);
288 __ Sll(TMP, TMP, 8);
289 __ Srl(out, out, 8);
290 __ And(out, out, AT);
291 __ Or(out, out, TMP);
292 }
Chris Larsen70014c82015-11-18 12:26:08 -0800293 if (reverseBits) {
294 if (isR6) {
295 __ Bitswap(out, out);
296 } else {
297 __ LoadConst32(AT, 0x0F0F0F0F);
298 __ And(TMP, out, AT);
299 __ Sll(TMP, TMP, 4);
300 __ Srl(out, out, 4);
301 __ And(out, out, AT);
302 __ Or(out, TMP, out);
303 __ LoadConst32(AT, 0x33333333);
304 __ And(TMP, out, AT);
305 __ Sll(TMP, TMP, 2);
306 __ Srl(out, out, 2);
307 __ And(out, out, AT);
308 __ Or(out, TMP, out);
309 __ LoadConst32(AT, 0x55555555);
310 __ And(TMP, out, AT);
311 __ Sll(TMP, TMP, 1);
312 __ Srl(out, out, 1);
313 __ And(out, out, AT);
314 __ Or(out, TMP, out);
315 }
316 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700317 } else if (type == Primitive::kPrimLong) {
318 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
319 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
320 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
321 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
322
323 if (isR2OrNewer) {
324 __ Rotr(AT, in_hi, 16);
325 __ Rotr(TMP, in_lo, 16);
326 __ Wsbh(out_lo, AT);
327 __ Wsbh(out_hi, TMP);
328 } else {
329 // When calling CreateIntToIntLocations() we promised that the
330 // use of the out_lo/out_hi wouldn't overlap with the use of
331 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
332 // until we're completely done reading from in_lo/in_hi.
333 // __ Rotr(TMP, in_lo, 16);
334 __ Sll(TMP, in_lo, 16);
335 __ Srl(AT, in_lo, 16);
336 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
337 // to write to out_hi.
338 // __ Rotr(out_lo, in_hi, 16);
339 __ Sll(AT, in_hi, 16);
340 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
341 // from in_lo/in_hi so it's okay to
342 // write to out_lo/out_hi.
343 __ Or(out_lo, out_lo, AT);
344 // __ Wsbh(out_hi, out_hi);
345 __ LoadConst32(AT, 0x00FF00FF);
346 __ And(out_hi, TMP, AT);
347 __ Sll(out_hi, out_hi, 8);
348 __ Srl(TMP, TMP, 8);
349 __ And(TMP, TMP, AT);
350 __ Or(out_hi, out_hi, TMP);
351 // __ Wsbh(out_lo, out_lo);
352 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
353 __ Sll(TMP, TMP, 8);
354 __ Srl(out_lo, out_lo, 8);
355 __ And(out_lo, out_lo, AT);
356 __ Or(out_lo, out_lo, TMP);
357 }
Chris Larsen70014c82015-11-18 12:26:08 -0800358 if (reverseBits) {
359 if (isR6) {
360 __ Bitswap(out_hi, out_hi);
361 __ Bitswap(out_lo, out_lo);
362 } else {
363 __ LoadConst32(AT, 0x0F0F0F0F);
364 __ And(TMP, out_hi, AT);
365 __ Sll(TMP, TMP, 4);
366 __ Srl(out_hi, out_hi, 4);
367 __ And(out_hi, out_hi, AT);
368 __ Or(out_hi, TMP, out_hi);
369 __ And(TMP, out_lo, AT);
370 __ Sll(TMP, TMP, 4);
371 __ Srl(out_lo, out_lo, 4);
372 __ And(out_lo, out_lo, AT);
373 __ Or(out_lo, TMP, out_lo);
374 __ LoadConst32(AT, 0x33333333);
375 __ And(TMP, out_hi, AT);
376 __ Sll(TMP, TMP, 2);
377 __ Srl(out_hi, out_hi, 2);
378 __ And(out_hi, out_hi, AT);
379 __ Or(out_hi, TMP, out_hi);
380 __ And(TMP, out_lo, AT);
381 __ Sll(TMP, TMP, 2);
382 __ Srl(out_lo, out_lo, 2);
383 __ And(out_lo, out_lo, AT);
384 __ Or(out_lo, TMP, out_lo);
385 __ LoadConst32(AT, 0x55555555);
386 __ And(TMP, out_hi, AT);
387 __ Sll(TMP, TMP, 1);
388 __ Srl(out_hi, out_hi, 1);
389 __ And(out_hi, out_hi, AT);
390 __ Or(out_hi, TMP, out_hi);
391 __ And(TMP, out_lo, AT);
392 __ Sll(TMP, TMP, 1);
393 __ Srl(out_lo, out_lo, 1);
394 __ And(out_lo, out_lo, AT);
395 __ Or(out_lo, TMP, out_lo);
396 }
397 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700398 }
399}
400
401// int java.lang.Integer.reverseBytes(int)
402void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
403 CreateIntToIntLocations(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800407 GenReverse(invoke->GetLocations(),
408 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800409 IsR2OrNewer(),
410 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800411 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800412 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700413}
414
415// long java.lang.Long.reverseBytes(long)
416void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
417 CreateIntToIntLocations(arena_, invoke);
418}
419
420void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800421 GenReverse(invoke->GetLocations(),
422 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800423 IsR2OrNewer(),
424 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800425 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800426 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700427}
428
429// short java.lang.Short.reverseBytes(short)
430void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
431 CreateIntToIntLocations(arena_, invoke);
432}
433
434void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800435 GenReverse(invoke->GetLocations(),
436 Primitive::kPrimShort,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800437 IsR2OrNewer(),
438 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800439 /* reverseBits */ false,
Chris Larsen70014c82015-11-18 12:26:08 -0800440 GetAssembler());
441}
442
Chris Larsene3845472015-11-18 12:27:15 -0800443static void GenNumberOfLeadingZeroes(LocationSummary* locations,
444 bool is64bit,
445 bool isR6,
446 MipsAssembler* assembler) {
447 Register out = locations->Out().AsRegister<Register>();
448 if (is64bit) {
449 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
450 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
451
452 if (isR6) {
453 __ ClzR6(AT, in_hi);
454 __ ClzR6(TMP, in_lo);
455 __ Seleqz(TMP, TMP, in_hi);
456 } else {
457 __ ClzR2(AT, in_hi);
458 __ ClzR2(TMP, in_lo);
459 __ Movn(TMP, ZERO, in_hi);
460 }
461 __ Addu(out, AT, TMP);
462 } else {
463 Register in = locations->InAt(0).AsRegister<Register>();
464
465 if (isR6) {
466 __ ClzR6(out, in);
467 } else {
468 __ ClzR2(out, in);
469 }
470 }
471}
472
473// int java.lang.Integer.numberOfLeadingZeros(int i)
474void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
475 CreateIntToIntLocations(arena_, invoke);
476}
477
478void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800479 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800480}
481
482// int java.lang.Long.numberOfLeadingZeros(long i)
483void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
484 CreateIntToIntLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800488 GenNumberOfLeadingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler());
Chris Larsene3845472015-11-18 12:27:15 -0800489}
490
Chris Larsen86829602015-11-18 12:27:52 -0800491static void GenNumberOfTrailingZeroes(LocationSummary* locations,
492 bool is64bit,
493 bool isR6,
Chris Larsen86829602015-11-18 12:27:52 -0800494 MipsAssembler* assembler) {
495 Register out = locations->Out().AsRegister<Register>();
496 Register in_lo;
497 Register in;
498
499 if (is64bit) {
Chris Larsen86829602015-11-18 12:27:52 -0800500 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
501
502 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
503
504 // If in_lo is zero then count the number of trailing zeroes in in_hi;
505 // otherwise count the number of trailing zeroes in in_lo.
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800506 // out = in_lo ? in_lo : in_hi;
Chris Larsen86829602015-11-18 12:27:52 -0800507 if (isR6) {
508 __ Seleqz(out, in_hi, in_lo);
509 __ Selnez(TMP, in_lo, in_lo);
510 __ Or(out, out, TMP);
511 } else {
512 __ Movz(out, in_hi, in_lo);
513 __ Movn(out, in_lo, in_lo);
514 }
515
516 in = out;
517 } else {
518 in = locations->InAt(0).AsRegister<Register>();
519 // Give in_lo a dummy value to keep the compiler from complaining.
520 // Since we only get here in the 32-bit case, this value will never
521 // be used.
522 in_lo = in;
523 }
524
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800525 if (isR6) {
526 // We don't have an instruction to count the number of trailing zeroes.
527 // Start by flipping the bits end-for-end so we can count the number of
528 // leading zeroes instead.
Chris Larsen86829602015-11-18 12:27:52 -0800529 __ Rotr(out, in, 16);
530 __ Wsbh(out, out);
Chris Larsen86829602015-11-18 12:27:52 -0800531 __ Bitswap(out, out);
532 __ ClzR6(out, out);
533 } else {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800534 // Convert trailing zeroes to trailing ones, and bits to their left
535 // to zeroes.
536 __ Addiu(TMP, in, -1);
537 __ Xor(out, TMP, in);
538 __ And(out, out, TMP);
539 // Count number of leading zeroes.
Chris Larsen86829602015-11-18 12:27:52 -0800540 __ ClzR2(out, out);
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800541 // Subtract number of leading zeroes from 32 to get number of trailing ones.
542 // Remember that the trailing ones were formerly trailing zeroes.
543 __ LoadConst32(TMP, 32);
544 __ Subu(out, TMP, out);
Chris Larsen86829602015-11-18 12:27:52 -0800545 }
546
547 if (is64bit) {
548 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
549 // number of trailing zeroes in in_lo (32) to get the correct final count
550 __ LoadConst32(TMP, 32);
551 if (isR6) {
552 __ Seleqz(TMP, TMP, in_lo);
553 } else {
554 __ Movn(TMP, ZERO, in_lo);
555 }
556 __ Addu(out, out, TMP);
557 }
558}
559
560// int java.lang.Integer.numberOfTrailingZeros(int i)
561void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
562 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
563}
564
565void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800566 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ false, IsR6(), GetAssembler());
Chris Larsen86829602015-11-18 12:27:52 -0800567}
568
569// int java.lang.Long.numberOfTrailingZeros(long i)
570void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
571 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
572}
573
574void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Chris Larsenbbb2ebe2016-02-17 17:44:58 -0800575 GenNumberOfTrailingZeroes(invoke->GetLocations(), /* is64bit */ true, IsR6(), GetAssembler());
Chris Larsene16ce5a2015-11-18 12:30:20 -0800576}
577
Chris Larsen70014c82015-11-18 12:26:08 -0800578// int java.lang.Integer.reverse(int)
579void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
580 CreateIntToIntLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
584 GenReverse(invoke->GetLocations(),
585 Primitive::kPrimInt,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800586 IsR2OrNewer(),
587 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800588 /* reverseBits */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800589 GetAssembler());
590}
591
592// long java.lang.Long.reverse(long)
593void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
594 CreateIntToIntLocations(arena_, invoke);
595}
596
597void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
598 GenReverse(invoke->GetLocations(),
599 Primitive::kPrimLong,
Chris Larsene16ce5a2015-11-18 12:30:20 -0800600 IsR2OrNewer(),
601 IsR6(),
Chris Larsenb74353a2015-11-20 09:07:09 -0800602 /* reverseBits */ true,
Chris Larsen70014c82015-11-18 12:26:08 -0800603 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700604}
605
Chris Larsenb74353a2015-11-20 09:07:09 -0800606static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
607 LocationSummary* locations = new (arena) LocationSummary(invoke,
608 LocationSummary::kNoCall,
609 kIntrinsified);
610 locations->SetInAt(0, Location::RequiresFpuRegister());
611 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
612}
613
Chris Larsenedc16452016-02-12 17:59:00 -0800614static void GenBitCount(LocationSummary* locations,
615 Primitive::Type type,
616 bool isR6,
617 MipsAssembler* assembler) {
Chris Larsenedc16452016-02-12 17:59:00 -0800618 Register out = locations->Out().AsRegister<Register>();
619
620 // https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel
621 //
622 // A generalization of the best bit counting method to integers of
623 // bit-widths up to 128 (parameterized by type T) is this:
624 //
625 // v = v - ((v >> 1) & (T)~(T)0/3); // temp
626 // v = (v & (T)~(T)0/15*3) + ((v >> 2) & (T)~(T)0/15*3); // temp
627 // v = (v + (v >> 4)) & (T)~(T)0/255*15; // temp
628 // c = (T)(v * ((T)~(T)0/255)) >> (sizeof(T) - 1) * BITS_PER_BYTE; // count
629 //
630 // For comparison, for 32-bit quantities, this algorithm can be executed
631 // using 20 MIPS instructions (the calls to LoadConst32() generate two
632 // machine instructions each for the values being used in this algorithm).
633 // A(n unrolled) loop-based algorithm required 25 instructions.
634 //
635 // For 64-bit quantities, this algorithm gets executed twice, (once
636 // for in_lo, and again for in_hi), but saves a few instructions
637 // because the mask values only have to be loaded once. Using this
Chris Larsen8ca4f972016-04-14 16:16:29 -0700638 // algorithm the count for a 64-bit operand can be performed in 29
Chris Larsenedc16452016-02-12 17:59:00 -0800639 // instructions compared to a loop-based algorithm which required 47
640 // instructions.
641
642 if (type == Primitive::kPrimInt) {
643 Register in = locations->InAt(0).AsRegister<Register>();
644
645 __ Srl(TMP, in, 1);
646 __ LoadConst32(AT, 0x55555555);
647 __ And(TMP, TMP, AT);
648 __ Subu(TMP, in, TMP);
649 __ LoadConst32(AT, 0x33333333);
650 __ And(out, TMP, AT);
651 __ Srl(TMP, TMP, 2);
652 __ And(TMP, TMP, AT);
653 __ Addu(TMP, out, TMP);
654 __ Srl(out, TMP, 4);
655 __ Addu(out, out, TMP);
656 __ LoadConst32(AT, 0x0F0F0F0F);
657 __ And(out, out, AT);
658 __ LoadConst32(TMP, 0x01010101);
659 if (isR6) {
660 __ MulR6(out, out, TMP);
661 } else {
662 __ MulR2(out, out, TMP);
663 }
664 __ Srl(out, out, 24);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100665 } else {
666 DCHECK_EQ(type, Primitive::kPrimLong);
Chris Larsenedc16452016-02-12 17:59:00 -0800667 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
668 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
669 Register tmp_hi = locations->GetTemp(0).AsRegister<Register>();
670 Register out_hi = locations->GetTemp(1).AsRegister<Register>();
671 Register tmp_lo = TMP;
672 Register out_lo = out;
673
674 __ Srl(tmp_lo, in_lo, 1);
675 __ Srl(tmp_hi, in_hi, 1);
676
677 __ LoadConst32(AT, 0x55555555);
678
679 __ And(tmp_lo, tmp_lo, AT);
680 __ Subu(tmp_lo, in_lo, tmp_lo);
681
682 __ And(tmp_hi, tmp_hi, AT);
683 __ Subu(tmp_hi, in_hi, tmp_hi);
684
685 __ LoadConst32(AT, 0x33333333);
686
687 __ And(out_lo, tmp_lo, AT);
688 __ Srl(tmp_lo, tmp_lo, 2);
689 __ And(tmp_lo, tmp_lo, AT);
690 __ Addu(tmp_lo, out_lo, tmp_lo);
Chris Larsenedc16452016-02-12 17:59:00 -0800691
692 __ And(out_hi, tmp_hi, AT);
693 __ Srl(tmp_hi, tmp_hi, 2);
694 __ And(tmp_hi, tmp_hi, AT);
695 __ Addu(tmp_hi, out_hi, tmp_hi);
Chris Larsenedc16452016-02-12 17:59:00 -0800696
Chris Larsen8ca4f972016-04-14 16:16:29 -0700697 // Here we deviate from the original algorithm a bit. We've reached
698 // the stage where the bitfields holding the subtotals are large
699 // enough to hold the combined subtotals for both the low word, and
700 // the high word. This means that we can add the subtotals for the
701 // the high, and low words into a single word, and compute the final
702 // result for both the high, and low words using fewer instructions.
Chris Larsenedc16452016-02-12 17:59:00 -0800703 __ LoadConst32(AT, 0x0F0F0F0F);
704
Chris Larsen8ca4f972016-04-14 16:16:29 -0700705 __ Addu(TMP, tmp_hi, tmp_lo);
706
707 __ Srl(out, TMP, 4);
708 __ And(out, out, AT);
709 __ And(TMP, TMP, AT);
710 __ Addu(out, out, TMP);
Chris Larsenedc16452016-02-12 17:59:00 -0800711
712 __ LoadConst32(AT, 0x01010101);
713
714 if (isR6) {
Chris Larsen8ca4f972016-04-14 16:16:29 -0700715 __ MulR6(out, out, AT);
Chris Larsenedc16452016-02-12 17:59:00 -0800716 } else {
Chris Larsen8ca4f972016-04-14 16:16:29 -0700717 __ MulR2(out, out, AT);
Chris Larsenedc16452016-02-12 17:59:00 -0800718 }
719
Chris Larsen8ca4f972016-04-14 16:16:29 -0700720 __ Srl(out, out, 24);
Chris Larsenedc16452016-02-12 17:59:00 -0800721 }
722}
723
724// int java.lang.Integer.bitCount(int)
725void IntrinsicLocationsBuilderMIPS::VisitIntegerBitCount(HInvoke* invoke) {
726 CreateIntToIntLocations(arena_, invoke);
727}
728
729void IntrinsicCodeGeneratorMIPS::VisitIntegerBitCount(HInvoke* invoke) {
730 GenBitCount(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
731}
732
733// int java.lang.Long.bitCount(int)
734void IntrinsicLocationsBuilderMIPS::VisitLongBitCount(HInvoke* invoke) {
735 LocationSummary* locations = new (arena_) LocationSummary(invoke,
736 LocationSummary::kNoCall,
737 kIntrinsified);
738 locations->SetInAt(0, Location::RequiresRegister());
739 locations->SetOut(Location::RequiresRegister());
740 locations->AddTemp(Location::RequiresRegister());
741 locations->AddTemp(Location::RequiresRegister());
742}
743
744void IntrinsicCodeGeneratorMIPS::VisitLongBitCount(HInvoke* invoke) {
745 GenBitCount(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
746}
747
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100748static void MathAbsFP(LocationSummary* locations,
749 bool is64bit,
750 bool isR2OrNewer,
751 bool isR6,
752 MipsAssembler* assembler) {
Chris Larsenb74353a2015-11-20 09:07:09 -0800753 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
754 FRegister out = locations->Out().AsFpuRegister<FRegister>();
755
Goran Jakovljevic5a6cbfc2017-01-13 12:13:39 +0100756 // Note, as a "quality of implementation", rather than pure "spec compliance", we require that
757 // Math.abs() clears the sign bit (but changes nothing else) for all numbers, including NaN
758 // (signaling NaN may become quiet though).
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100759 //
760 // The ABS.fmt instructions (abs.s and abs.d) do exactly that when NAN2008=1 (R6). For this case,
761 // both regular floating point numbers and NAN values are treated alike, only the sign bit is
762 // affected by this instruction.
763 // But when NAN2008=0 (R2 and before), the ABS.fmt instructions can't be used. For this case, any
764 // NaN operand signals invalid operation. This means that other bits (not just sign bit) might be
765 // changed when doing abs(NaN). Because of that, we clear sign bit in a different way.
766 if (isR6) {
767 if (is64bit) {
768 __ AbsD(out, in);
769 } else {
770 __ AbsS(out, in);
771 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800772 } else {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100773 if (is64bit) {
774 if (in != out) {
775 __ MovD(out, in);
776 }
777 __ MoveFromFpuHigh(TMP, in);
778 // ins instruction is not available for R1.
779 if (isR2OrNewer) {
780 __ Ins(TMP, ZERO, 31, 1);
781 } else {
782 __ Sll(TMP, TMP, 1);
783 __ Srl(TMP, TMP, 1);
784 }
785 __ MoveToFpuHigh(TMP, out);
786 } else {
787 __ Mfc1(TMP, in);
788 // ins instruction is not available for R1.
789 if (isR2OrNewer) {
790 __ Ins(TMP, ZERO, 31, 1);
791 } else {
792 __ Sll(TMP, TMP, 1);
793 __ Srl(TMP, TMP, 1);
794 }
795 __ Mtc1(TMP, out);
796 }
Chris Larsenb74353a2015-11-20 09:07:09 -0800797 }
798}
799
800// double java.lang.Math.abs(double)
801void IntrinsicLocationsBuilderMIPS::VisitMathAbsDouble(HInvoke* invoke) {
802 CreateFPToFPLocations(arena_, invoke);
803}
804
805void IntrinsicCodeGeneratorMIPS::VisitMathAbsDouble(HInvoke* invoke) {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100806 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, IsR2OrNewer(), IsR6(), GetAssembler());
Chris Larsenb74353a2015-11-20 09:07:09 -0800807}
808
809// float java.lang.Math.abs(float)
810void IntrinsicLocationsBuilderMIPS::VisitMathAbsFloat(HInvoke* invoke) {
811 CreateFPToFPLocations(arena_, invoke);
812}
813
814void IntrinsicCodeGeneratorMIPS::VisitMathAbsFloat(HInvoke* invoke) {
Goran Jakovljevicb6684652017-01-11 13:42:38 +0100815 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, IsR2OrNewer(), IsR6(), GetAssembler());
Chris Larsenb74353a2015-11-20 09:07:09 -0800816}
817
818static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
819 if (is64bit) {
820 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
821 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
822 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
823 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
824
825 // The comments in this section show the analogous operations which would
826 // be performed if we had 64-bit registers "in", and "out".
827 // __ Dsra32(AT, in, 31);
828 __ Sra(AT, in_hi, 31);
829 // __ Xor(out, in, AT);
830 __ Xor(TMP, in_lo, AT);
831 __ Xor(out_hi, in_hi, AT);
832 // __ Dsubu(out, out, AT);
833 __ Subu(out_lo, TMP, AT);
834 __ Sltu(TMP, out_lo, TMP);
835 __ Addu(out_hi, out_hi, TMP);
836 } else {
837 Register in = locations->InAt(0).AsRegister<Register>();
838 Register out = locations->Out().AsRegister<Register>();
839
840 __ Sra(AT, in, 31);
841 __ Xor(out, in, AT);
842 __ Subu(out, out, AT);
843 }
844}
845
846// int java.lang.Math.abs(int)
847void IntrinsicLocationsBuilderMIPS::VisitMathAbsInt(HInvoke* invoke) {
848 CreateIntToIntLocations(arena_, invoke);
849}
850
851void IntrinsicCodeGeneratorMIPS::VisitMathAbsInt(HInvoke* invoke) {
852 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
853}
854
855// long java.lang.Math.abs(long)
856void IntrinsicLocationsBuilderMIPS::VisitMathAbsLong(HInvoke* invoke) {
857 CreateIntToIntLocations(arena_, invoke);
858}
859
860void IntrinsicCodeGeneratorMIPS::VisitMathAbsLong(HInvoke* invoke) {
861 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
862}
863
864static void GenMinMaxFP(LocationSummary* locations,
865 bool is_min,
866 Primitive::Type type,
867 bool is_R6,
868 MipsAssembler* assembler) {
869 FRegister out = locations->Out().AsFpuRegister<FRegister>();
870 FRegister a = locations->InAt(0).AsFpuRegister<FRegister>();
871 FRegister b = locations->InAt(1).AsFpuRegister<FRegister>();
872
873 if (is_R6) {
874 MipsLabel noNaNs;
875 MipsLabel done;
876 FRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
877
878 // When Java computes min/max it prefers a NaN to a number; the
879 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
880 // the inputs is a NaN and the other is a valid number, the MIPS
881 // instruction will return the number; Java wants the NaN value
882 // returned. This is why there is extra logic preceding the use of
883 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
884 // NaN, return the NaN, otherwise return the min/max.
885 if (type == Primitive::kPrimDouble) {
886 __ CmpUnD(FTMP, a, b);
887 __ Bc1eqz(FTMP, &noNaNs);
888
889 // One of the inputs is a NaN
890 __ CmpEqD(ftmp, a, a);
891 // If a == a then b is the NaN, otherwise a is the NaN.
892 __ SelD(ftmp, a, b);
893
894 if (ftmp != out) {
895 __ MovD(out, ftmp);
896 }
897
898 __ B(&done);
899
900 __ Bind(&noNaNs);
901
902 if (is_min) {
903 __ MinD(out, a, b);
904 } else {
905 __ MaxD(out, a, b);
906 }
907 } else {
908 DCHECK_EQ(type, Primitive::kPrimFloat);
909 __ CmpUnS(FTMP, a, b);
910 __ Bc1eqz(FTMP, &noNaNs);
911
912 // One of the inputs is a NaN
913 __ CmpEqS(ftmp, a, a);
914 // If a == a then b is the NaN, otherwise a is the NaN.
915 __ SelS(ftmp, a, b);
916
917 if (ftmp != out) {
918 __ MovS(out, ftmp);
919 }
920
921 __ B(&done);
922
923 __ Bind(&noNaNs);
924
925 if (is_min) {
926 __ MinS(out, a, b);
927 } else {
928 __ MaxS(out, a, b);
929 }
930 }
931
932 __ Bind(&done);
933 } else {
934 MipsLabel ordered;
935 MipsLabel compare;
936 MipsLabel select;
937 MipsLabel done;
938
939 if (type == Primitive::kPrimDouble) {
940 __ CunD(a, b);
941 } else {
942 DCHECK_EQ(type, Primitive::kPrimFloat);
943 __ CunS(a, b);
944 }
945 __ Bc1f(&ordered);
946
947 // a or b (or both) is a NaN. Return one, which is a NaN.
948 if (type == Primitive::kPrimDouble) {
949 __ CeqD(b, b);
950 } else {
951 __ CeqS(b, b);
952 }
953 __ B(&select);
954
955 __ Bind(&ordered);
956
957 // Neither is a NaN.
958 // a == b? (-0.0 compares equal with +0.0)
959 // If equal, handle zeroes, else compare further.
960 if (type == Primitive::kPrimDouble) {
961 __ CeqD(a, b);
962 } else {
963 __ CeqS(a, b);
964 }
965 __ Bc1f(&compare);
966
967 // a == b either bit for bit or one is -0.0 and the other is +0.0.
968 if (type == Primitive::kPrimDouble) {
969 __ MoveFromFpuHigh(TMP, a);
970 __ MoveFromFpuHigh(AT, b);
971 } else {
972 __ Mfc1(TMP, a);
973 __ Mfc1(AT, b);
974 }
975
976 if (is_min) {
977 // -0.0 prevails over +0.0.
978 __ Or(TMP, TMP, AT);
979 } else {
980 // +0.0 prevails over -0.0.
981 __ And(TMP, TMP, AT);
982 }
983
984 if (type == Primitive::kPrimDouble) {
985 __ Mfc1(AT, a);
986 __ Mtc1(AT, out);
987 __ MoveToFpuHigh(TMP, out);
988 } else {
989 __ Mtc1(TMP, out);
990 }
991 __ B(&done);
992
993 __ Bind(&compare);
994
995 if (type == Primitive::kPrimDouble) {
996 if (is_min) {
997 // return (a <= b) ? a : b;
998 __ ColeD(a, b);
999 } else {
1000 // return (a >= b) ? a : b;
1001 __ ColeD(b, a); // b <= a
1002 }
1003 } else {
1004 if (is_min) {
1005 // return (a <= b) ? a : b;
1006 __ ColeS(a, b);
1007 } else {
1008 // return (a >= b) ? a : b;
1009 __ ColeS(b, a); // b <= a
1010 }
1011 }
1012
1013 __ Bind(&select);
1014
1015 if (type == Primitive::kPrimDouble) {
1016 __ MovtD(out, a);
1017 __ MovfD(out, b);
1018 } else {
1019 __ MovtS(out, a);
1020 __ MovfS(out, b);
1021 }
1022
1023 __ Bind(&done);
1024 }
1025}
1026
1027static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
1028 LocationSummary* locations = new (arena) LocationSummary(invoke,
1029 LocationSummary::kNoCall,
1030 kIntrinsified);
1031 locations->SetInAt(0, Location::RequiresFpuRegister());
1032 locations->SetInAt(1, Location::RequiresFpuRegister());
1033 locations->SetOut(Location::RequiresFpuRegister(), Location::kOutputOverlap);
1034}
1035
1036// double java.lang.Math.min(double, double)
1037void IntrinsicLocationsBuilderMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) {
1038 CreateFPFPToFPLocations(arena_, invoke);
1039}
1040
1041void IntrinsicCodeGeneratorMIPS::VisitMathMinDoubleDouble(HInvoke* invoke) {
1042 GenMinMaxFP(invoke->GetLocations(),
1043 /* is_min */ true,
1044 Primitive::kPrimDouble,
1045 IsR6(),
1046 GetAssembler());
1047}
1048
1049// float java.lang.Math.min(float, float)
1050void IntrinsicLocationsBuilderMIPS::VisitMathMinFloatFloat(HInvoke* invoke) {
1051 CreateFPFPToFPLocations(arena_, invoke);
1052}
1053
1054void IntrinsicCodeGeneratorMIPS::VisitMathMinFloatFloat(HInvoke* invoke) {
1055 GenMinMaxFP(invoke->GetLocations(),
1056 /* is_min */ true,
1057 Primitive::kPrimFloat,
1058 IsR6(),
1059 GetAssembler());
1060}
1061
1062// double java.lang.Math.max(double, double)
1063void IntrinsicLocationsBuilderMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) {
1064 CreateFPFPToFPLocations(arena_, invoke);
1065}
1066
1067void IntrinsicCodeGeneratorMIPS::VisitMathMaxDoubleDouble(HInvoke* invoke) {
1068 GenMinMaxFP(invoke->GetLocations(),
1069 /* is_min */ false,
1070 Primitive::kPrimDouble,
1071 IsR6(),
1072 GetAssembler());
1073}
1074
1075// float java.lang.Math.max(float, float)
1076void IntrinsicLocationsBuilderMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) {
1077 CreateFPFPToFPLocations(arena_, invoke);
1078}
1079
1080void IntrinsicCodeGeneratorMIPS::VisitMathMaxFloatFloat(HInvoke* invoke) {
1081 GenMinMaxFP(invoke->GetLocations(),
1082 /* is_min */ false,
1083 Primitive::kPrimFloat,
1084 IsR6(),
1085 GetAssembler());
1086}
1087
1088static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
1089 LocationSummary* locations = new (arena) LocationSummary(invoke,
1090 LocationSummary::kNoCall,
1091 kIntrinsified);
1092 locations->SetInAt(0, Location::RequiresRegister());
1093 locations->SetInAt(1, Location::RequiresRegister());
1094 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1095}
1096
1097static void GenMinMax(LocationSummary* locations,
1098 bool is_min,
1099 Primitive::Type type,
1100 bool is_R6,
1101 MipsAssembler* assembler) {
1102 if (is_R6) {
1103 // Some architectures, such as ARM and MIPS (prior to r6), have a
1104 // conditional move instruction which only changes the target
1105 // (output) register if the condition is true (MIPS prior to r6 had
1106 // MOVF, MOVT, MOVN, and MOVZ). The SELEQZ and SELNEZ instructions
1107 // always change the target (output) register. If the condition is
1108 // true the output register gets the contents of the "rs" register;
1109 // otherwise, the output register is set to zero. One consequence
1110 // of this is that to implement something like "rd = c==0 ? rs : rt"
1111 // MIPS64r6 needs to use a pair of SELEQZ/SELNEZ instructions.
1112 // After executing this pair of instructions one of the output
1113 // registers from the pair will necessarily contain zero. Then the
1114 // code ORs the output registers from the SELEQZ/SELNEZ instructions
1115 // to get the final result.
1116 //
1117 // The initial test to see if the output register is same as the
1118 // first input register is needed to make sure that value in the
1119 // first input register isn't clobbered before we've finished
1120 // computing the output value. The logic in the corresponding else
1121 // clause performs the same task but makes sure the second input
1122 // register isn't clobbered in the event that it's the same register
1123 // as the output register; the else clause also handles the case
1124 // where the output register is distinct from both the first, and the
1125 // second input registers.
1126 if (type == Primitive::kPrimLong) {
1127 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1128 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1129 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>();
1130 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>();
1131 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1132 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1133
1134 MipsLabel compare_done;
1135
1136 if (a_lo == b_lo) {
1137 if (out_lo != a_lo) {
1138 __ Move(out_lo, a_lo);
1139 __ Move(out_hi, a_hi);
1140 }
1141 } else {
1142 __ Slt(TMP, b_hi, a_hi);
1143 __ Bne(b_hi, a_hi, &compare_done);
1144
1145 __ Sltu(TMP, b_lo, a_lo);
1146
1147 __ Bind(&compare_done);
1148
1149 if (is_min) {
1150 __ Seleqz(AT, a_lo, TMP);
1151 __ Selnez(out_lo, b_lo, TMP); // Safe even if out_lo == a_lo/b_lo
1152 // because at this point we're
1153 // done using a_lo/b_lo.
1154 } else {
1155 __ Selnez(AT, a_lo, TMP);
1156 __ Seleqz(out_lo, b_lo, TMP); // ditto
1157 }
1158 __ Or(out_lo, out_lo, AT);
1159 if (is_min) {
1160 __ Seleqz(AT, a_hi, TMP);
1161 __ Selnez(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi
1162 } else {
1163 __ Selnez(AT, a_hi, TMP);
1164 __ Seleqz(out_hi, b_hi, TMP); // ditto but for out_hi & a_hi/b_hi
1165 }
1166 __ Or(out_hi, out_hi, AT);
1167 }
1168 } else {
1169 DCHECK_EQ(type, Primitive::kPrimInt);
1170 Register a = locations->InAt(0).AsRegister<Register>();
1171 Register b = locations->InAt(1).AsRegister<Register>();
1172 Register out = locations->Out().AsRegister<Register>();
1173
1174 if (a == b) {
1175 if (out != a) {
1176 __ Move(out, a);
1177 }
1178 } else {
1179 __ Slt(AT, b, a);
1180 if (is_min) {
1181 __ Seleqz(TMP, a, AT);
1182 __ Selnez(AT, b, AT);
1183 } else {
1184 __ Selnez(TMP, a, AT);
1185 __ Seleqz(AT, b, AT);
1186 }
1187 __ Or(out, TMP, AT);
1188 }
1189 }
1190 } else {
1191 if (type == Primitive::kPrimLong) {
1192 Register a_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1193 Register a_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1194 Register b_lo = locations->InAt(1).AsRegisterPairLow<Register>();
1195 Register b_hi = locations->InAt(1).AsRegisterPairHigh<Register>();
1196 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
1197 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
1198
1199 MipsLabel compare_done;
1200
1201 if (a_lo == b_lo) {
1202 if (out_lo != a_lo) {
1203 __ Move(out_lo, a_lo);
1204 __ Move(out_hi, a_hi);
1205 }
1206 } else {
1207 __ Slt(TMP, a_hi, b_hi);
1208 __ Bne(a_hi, b_hi, &compare_done);
1209
1210 __ Sltu(TMP, a_lo, b_lo);
1211
1212 __ Bind(&compare_done);
1213
1214 if (is_min) {
1215 if (out_lo != a_lo) {
1216 __ Movn(out_hi, a_hi, TMP);
1217 __ Movn(out_lo, a_lo, TMP);
1218 }
1219 if (out_lo != b_lo) {
1220 __ Movz(out_hi, b_hi, TMP);
1221 __ Movz(out_lo, b_lo, TMP);
1222 }
1223 } else {
1224 if (out_lo != a_lo) {
1225 __ Movz(out_hi, a_hi, TMP);
1226 __ Movz(out_lo, a_lo, TMP);
1227 }
1228 if (out_lo != b_lo) {
1229 __ Movn(out_hi, b_hi, TMP);
1230 __ Movn(out_lo, b_lo, TMP);
1231 }
1232 }
1233 }
1234 } else {
1235 DCHECK_EQ(type, Primitive::kPrimInt);
1236 Register a = locations->InAt(0).AsRegister<Register>();
1237 Register b = locations->InAt(1).AsRegister<Register>();
1238 Register out = locations->Out().AsRegister<Register>();
1239
1240 if (a == b) {
1241 if (out != a) {
1242 __ Move(out, a);
1243 }
1244 } else {
1245 __ Slt(AT, a, b);
1246 if (is_min) {
1247 if (out != a) {
1248 __ Movn(out, a, AT);
1249 }
1250 if (out != b) {
1251 __ Movz(out, b, AT);
1252 }
1253 } else {
1254 if (out != a) {
1255 __ Movz(out, a, AT);
1256 }
1257 if (out != b) {
1258 __ Movn(out, b, AT);
1259 }
1260 }
1261 }
1262 }
1263 }
1264}
1265
1266// int java.lang.Math.min(int, int)
1267void IntrinsicLocationsBuilderMIPS::VisitMathMinIntInt(HInvoke* invoke) {
1268 CreateIntIntToIntLocations(arena_, invoke);
1269}
1270
1271void IntrinsicCodeGeneratorMIPS::VisitMathMinIntInt(HInvoke* invoke) {
1272 GenMinMax(invoke->GetLocations(),
1273 /* is_min */ true,
1274 Primitive::kPrimInt,
1275 IsR6(),
1276 GetAssembler());
1277}
1278
1279// long java.lang.Math.min(long, long)
1280void IntrinsicLocationsBuilderMIPS::VisitMathMinLongLong(HInvoke* invoke) {
1281 CreateIntIntToIntLocations(arena_, invoke);
1282}
1283
1284void IntrinsicCodeGeneratorMIPS::VisitMathMinLongLong(HInvoke* invoke) {
1285 GenMinMax(invoke->GetLocations(),
1286 /* is_min */ true,
1287 Primitive::kPrimLong,
1288 IsR6(),
1289 GetAssembler());
1290}
1291
1292// int java.lang.Math.max(int, int)
1293void IntrinsicLocationsBuilderMIPS::VisitMathMaxIntInt(HInvoke* invoke) {
1294 CreateIntIntToIntLocations(arena_, invoke);
1295}
1296
1297void IntrinsicCodeGeneratorMIPS::VisitMathMaxIntInt(HInvoke* invoke) {
1298 GenMinMax(invoke->GetLocations(),
1299 /* is_min */ false,
1300 Primitive::kPrimInt,
1301 IsR6(),
1302 GetAssembler());
1303}
1304
1305// long java.lang.Math.max(long, long)
1306void IntrinsicLocationsBuilderMIPS::VisitMathMaxLongLong(HInvoke* invoke) {
1307 CreateIntIntToIntLocations(arena_, invoke);
1308}
1309
1310void IntrinsicCodeGeneratorMIPS::VisitMathMaxLongLong(HInvoke* invoke) {
1311 GenMinMax(invoke->GetLocations(),
1312 /* is_min */ false,
1313 Primitive::kPrimLong,
1314 IsR6(),
1315 GetAssembler());
1316}
1317
1318// double java.lang.Math.sqrt(double)
1319void IntrinsicLocationsBuilderMIPS::VisitMathSqrt(HInvoke* invoke) {
1320 CreateFPToFPLocations(arena_, invoke);
1321}
1322
1323void IntrinsicCodeGeneratorMIPS::VisitMathSqrt(HInvoke* invoke) {
1324 LocationSummary* locations = invoke->GetLocations();
1325 MipsAssembler* assembler = GetAssembler();
1326 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
1327 FRegister out = locations->Out().AsFpuRegister<FRegister>();
1328
1329 __ SqrtD(out, in);
1330}
1331
Chris Larsen3acee732015-11-18 13:31:08 -08001332// byte libcore.io.Memory.peekByte(long address)
1333void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
1334 CreateIntToIntLocations(arena_, invoke);
1335}
1336
1337void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekByte(HInvoke* invoke) {
1338 MipsAssembler* assembler = GetAssembler();
1339 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1340 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1341
1342 __ Lb(out, adr, 0);
1343}
1344
1345// short libcore.io.Memory.peekShort(long address)
1346void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
1347 CreateIntToIntLocations(arena_, invoke);
1348}
1349
1350void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekShortNative(HInvoke* invoke) {
1351 MipsAssembler* assembler = GetAssembler();
1352 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1353 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1354
1355 if (IsR6()) {
1356 __ Lh(out, adr, 0);
1357 } else if (IsR2OrNewer()) {
1358 // Unlike for words, there are no lhl/lhr instructions to load
1359 // unaligned halfwords so the code loads individual bytes, in case
1360 // the address isn't halfword-aligned, and assembles them into a
1361 // signed halfword.
1362 __ Lb(AT, adr, 1); // This byte must be sign-extended.
1363 __ Lb(out, adr, 0); // This byte can be either sign-extended, or
1364 // zero-extended because the following
1365 // instruction overwrites the sign bits.
1366 __ Ins(out, AT, 8, 24);
1367 } else {
1368 __ Lbu(AT, adr, 0); // This byte must be zero-extended. If it's not
1369 // the "or" instruction below will destroy the upper
1370 // 24 bits of the final result.
1371 __ Lb(out, adr, 1); // This byte must be sign-extended.
1372 __ Sll(out, out, 8);
1373 __ Or(out, out, AT);
1374 }
1375}
1376
1377// int libcore.io.Memory.peekInt(long address)
1378void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
1379 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
1380}
1381
1382void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekIntNative(HInvoke* invoke) {
1383 MipsAssembler* assembler = GetAssembler();
1384 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1385 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1386
1387 if (IsR6()) {
1388 __ Lw(out, adr, 0);
1389 } else {
1390 __ Lwr(out, adr, 0);
1391 __ Lwl(out, adr, 3);
1392 }
1393}
1394
1395// long libcore.io.Memory.peekLong(long address)
1396void IntrinsicLocationsBuilderMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
1397 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
1398}
1399
1400void IntrinsicCodeGeneratorMIPS::VisitMemoryPeekLongNative(HInvoke* invoke) {
1401 MipsAssembler* assembler = GetAssembler();
1402 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1403 Register out_lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
1404 Register out_hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
1405
1406 if (IsR6()) {
1407 __ Lw(out_lo, adr, 0);
1408 __ Lw(out_hi, adr, 4);
1409 } else {
1410 __ Lwr(out_lo, adr, 0);
1411 __ Lwl(out_lo, adr, 3);
1412 __ Lwr(out_hi, adr, 4);
1413 __ Lwl(out_hi, adr, 7);
1414 }
1415}
1416
1417static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1418 LocationSummary* locations = new (arena) LocationSummary(invoke,
1419 LocationSummary::kNoCall,
1420 kIntrinsified);
1421 locations->SetInAt(0, Location::RequiresRegister());
1422 locations->SetInAt(1, Location::RequiresRegister());
1423}
1424
1425// void libcore.io.Memory.pokeByte(long address, byte value)
1426void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
1427 CreateIntIntToVoidLocations(arena_, invoke);
1428}
1429
1430void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeByte(HInvoke* invoke) {
1431 MipsAssembler* assembler = GetAssembler();
1432 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1433 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1434
1435 __ Sb(val, adr, 0);
1436}
1437
1438// void libcore.io.Memory.pokeShort(long address, short value)
1439void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
1440 CreateIntIntToVoidLocations(arena_, invoke);
1441}
1442
1443void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeShortNative(HInvoke* invoke) {
1444 MipsAssembler* assembler = GetAssembler();
1445 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1446 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1447
1448 if (IsR6()) {
1449 __ Sh(val, adr, 0);
1450 } else {
1451 // Unlike for words, there are no shl/shr instructions to store
1452 // unaligned halfwords so the code stores individual bytes, in case
1453 // the address isn't halfword-aligned.
1454 __ Sb(val, adr, 0);
1455 __ Srl(AT, val, 8);
1456 __ Sb(AT, adr, 1);
1457 }
1458}
1459
1460// void libcore.io.Memory.pokeInt(long address, int value)
1461void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
1462 CreateIntIntToVoidLocations(arena_, invoke);
1463}
1464
1465void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeIntNative(HInvoke* invoke) {
1466 MipsAssembler* assembler = GetAssembler();
1467 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1468 Register val = invoke->GetLocations()->InAt(1).AsRegister<Register>();
1469
1470 if (IsR6()) {
1471 __ Sw(val, adr, 0);
1472 } else {
1473 __ Swr(val, adr, 0);
1474 __ Swl(val, adr, 3);
1475 }
1476}
1477
1478// void libcore.io.Memory.pokeLong(long address, long value)
1479void IntrinsicLocationsBuilderMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
1480 CreateIntIntToVoidLocations(arena_, invoke);
1481}
1482
1483void IntrinsicCodeGeneratorMIPS::VisitMemoryPokeLongNative(HInvoke* invoke) {
1484 MipsAssembler* assembler = GetAssembler();
1485 Register adr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
1486 Register val_lo = invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>();
1487 Register val_hi = invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>();
1488
1489 if (IsR6()) {
1490 __ Sw(val_lo, adr, 0);
1491 __ Sw(val_hi, adr, 4);
1492 } else {
1493 __ Swr(val_lo, adr, 0);
1494 __ Swl(val_lo, adr, 3);
1495 __ Swr(val_hi, adr, 4);
1496 __ Swl(val_hi, adr, 7);
1497 }
1498}
1499
Chris Larsencf283da2016-01-19 16:45:35 -08001500// Thread java.lang.Thread.currentThread()
1501void IntrinsicLocationsBuilderMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
1502 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1503 LocationSummary::kNoCall,
1504 kIntrinsified);
1505 locations->SetOut(Location::RequiresRegister());
1506}
1507
1508void IntrinsicCodeGeneratorMIPS::VisitThreadCurrentThread(HInvoke* invoke) {
1509 MipsAssembler* assembler = GetAssembler();
1510 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1511
1512 __ LoadFromOffset(kLoadWord,
1513 out,
1514 TR,
1515 Thread::PeerOffset<kMipsPointerSize>().Int32Value());
1516}
1517
Alexey Frunze15958152017-02-09 19:08:30 -08001518static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1519 HInvoke* invoke,
1520 Primitive::Type type) {
1521 bool can_call = kEmitCompilerReadBarrier &&
1522 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1523 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001524 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001525 (can_call
1526 ? LocationSummary::kCallOnSlowPath
1527 : LocationSummary::kNoCall),
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001528 kIntrinsified);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001529 if (can_call && kUseBakerReadBarrier) {
1530 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1531 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001532 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1533 locations->SetInAt(1, Location::RequiresRegister());
1534 locations->SetInAt(2, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001535 locations->SetOut(Location::RequiresRegister(),
1536 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
1537 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1538 // We need a temporary register for the read barrier marking slow
1539 // path in InstructionCodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier.
1540 locations->AddTemp(Location::RequiresRegister());
1541 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001542}
1543
Alexey Frunze15958152017-02-09 19:08:30 -08001544// Note that the caller must supply a properly aligned memory address.
1545// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001546static void GenUnsafeGet(HInvoke* invoke,
1547 Primitive::Type type,
1548 bool is_volatile,
1549 bool is_R6,
1550 CodeGeneratorMIPS* codegen) {
1551 LocationSummary* locations = invoke->GetLocations();
1552 DCHECK((type == Primitive::kPrimInt) ||
1553 (type == Primitive::kPrimLong) ||
1554 (type == Primitive::kPrimNot)) << type;
1555 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001556 // Target register.
1557 Location trg_loc = locations->Out();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001558 // Object pointer.
Alexey Frunze15958152017-02-09 19:08:30 -08001559 Location base_loc = locations->InAt(1);
1560 Register base = base_loc.AsRegister<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001561 // The "offset" argument is passed as a "long". Since this code is for
1562 // a 32-bit processor, we can only use 32-bit addresses, so we only
1563 // need the low 32-bits of offset.
Alexey Frunze15958152017-02-09 19:08:30 -08001564 Location offset_loc = locations->InAt(2);
1565 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001566
Alexey Frunze15958152017-02-09 19:08:30 -08001567 if (!(kEmitCompilerReadBarrier && kUseBakerReadBarrier && (type == Primitive::kPrimNot))) {
1568 __ Addu(TMP, base, offset_lo);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001569 }
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001570
Alexey Frunze15958152017-02-09 19:08:30 -08001571 switch (type) {
1572 case Primitive::kPrimLong: {
1573 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
1574 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
1575 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile loads.
1576 if (is_R6) {
1577 __ Lw(trg_lo, TMP, 0);
1578 __ Lw(trg_hi, TMP, 4);
1579 } else {
1580 __ Lwr(trg_lo, TMP, 0);
1581 __ Lwl(trg_lo, TMP, 3);
1582 __ Lwr(trg_hi, TMP, 4);
1583 __ Lwl(trg_hi, TMP, 7);
1584 }
1585 break;
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001586 }
Alexey Frunzec061de12017-02-14 13:27:23 -08001587
Alexey Frunze15958152017-02-09 19:08:30 -08001588 case Primitive::kPrimInt: {
1589 Register trg = trg_loc.AsRegister<Register>();
1590 if (is_R6) {
1591 __ Lw(trg, TMP, 0);
1592 } else {
1593 __ Lwr(trg, TMP, 0);
1594 __ Lwl(trg, TMP, 3);
1595 }
1596 if (is_volatile) {
1597 __ Sync(0);
1598 }
1599 break;
Alexey Frunzec061de12017-02-14 13:27:23 -08001600 }
Alexey Frunze15958152017-02-09 19:08:30 -08001601
1602 case Primitive::kPrimNot: {
1603 Register trg = trg_loc.AsRegister<Register>();
1604 if (kEmitCompilerReadBarrier) {
1605 if (kUseBakerReadBarrier) {
1606 Location temp = locations->GetTemp(0);
1607 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
1608 trg_loc,
1609 base,
1610 /* offset */ 0U,
1611 /* index */ offset_loc,
1612 TIMES_1,
1613 temp,
1614 /* needs_null_check */ false);
1615 if (is_volatile) {
1616 __ Sync(0);
1617 }
1618 } else {
1619 if (is_R6) {
1620 __ Lw(trg, TMP, 0);
1621 } else {
1622 __ Lwr(trg, TMP, 0);
1623 __ Lwl(trg, TMP, 3);
1624 }
1625 if (is_volatile) {
1626 __ Sync(0);
1627 }
1628 codegen->GenerateReadBarrierSlow(invoke,
1629 trg_loc,
1630 trg_loc,
1631 base_loc,
1632 /* offset */ 0U,
1633 /* index */ offset_loc);
1634 }
1635 } else {
1636 if (is_R6) {
1637 __ Lw(trg, TMP, 0);
1638 } else {
1639 __ Lwr(trg, TMP, 0);
1640 __ Lwl(trg, TMP, 3);
1641 }
1642 if (is_volatile) {
1643 __ Sync(0);
1644 }
1645 __ MaybeUnpoisonHeapReference(trg);
1646 }
1647 break;
1648 }
1649
1650 default:
1651 LOG(FATAL) << "Unexpected type " << type;
1652 UNREACHABLE();
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001653 }
1654}
1655
1656// int sun.misc.Unsafe.getInt(Object o, long offset)
1657void IntrinsicLocationsBuilderMIPS::VisitUnsafeGet(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001658 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001659}
1660
1661void IntrinsicCodeGeneratorMIPS::VisitUnsafeGet(HInvoke* invoke) {
1662 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, IsR6(), codegen_);
1663}
1664
1665// int sun.misc.Unsafe.getIntVolatile(Object o, long offset)
1666void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001667 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001668}
1669
1670void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetVolatile(HInvoke* invoke) {
1671 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, IsR6(), codegen_);
1672}
1673
1674// long sun.misc.Unsafe.getLong(Object o, long offset)
1675void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001676 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001677}
1678
1679void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetLong(HInvoke* invoke) {
1680 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, IsR6(), codegen_);
1681}
1682
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001683// Object sun.misc.Unsafe.getObject(Object o, long offset)
1684void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001685 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001686}
1687
1688void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObject(HInvoke* invoke) {
1689 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, IsR6(), codegen_);
1690}
1691
1692// Object sun.misc.Unsafe.getObjectVolatile(Object o, long offset)
1693void IntrinsicLocationsBuilderMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08001694 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001695}
1696
1697void IntrinsicCodeGeneratorMIPS::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1698 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, IsR6(), codegen_);
1699}
1700
1701static void CreateIntIntIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
1702 LocationSummary* locations = new (arena) LocationSummary(invoke,
1703 LocationSummary::kNoCall,
1704 kIntrinsified);
1705 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1706 locations->SetInAt(1, Location::RequiresRegister());
1707 locations->SetInAt(2, Location::RequiresRegister());
1708 locations->SetInAt(3, Location::RequiresRegister());
1709}
1710
Alexey Frunze15958152017-02-09 19:08:30 -08001711// Note that the caller must supply a properly aligned memory address.
1712// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001713static void GenUnsafePut(LocationSummary* locations,
1714 Primitive::Type type,
1715 bool is_volatile,
1716 bool is_ordered,
1717 bool is_R6,
1718 CodeGeneratorMIPS* codegen) {
1719 DCHECK((type == Primitive::kPrimInt) ||
1720 (type == Primitive::kPrimLong) ||
1721 (type == Primitive::kPrimNot)) << type;
1722 MipsAssembler* assembler = codegen->GetAssembler();
1723 // Object pointer.
1724 Register base = locations->InAt(1).AsRegister<Register>();
1725 // The "offset" argument is passed as a "long", i.e., it's 64-bits in
1726 // size. Since this code is for a 32-bit processor, we can only use
1727 // 32-bit addresses, so we only need the low 32-bits of offset.
1728 Register offset_lo = locations->InAt(2).AsRegisterPairLow<Register>();
1729
1730 __ Addu(TMP, base, offset_lo);
1731 if (is_volatile || is_ordered) {
1732 __ Sync(0);
1733 }
1734 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1735 Register value = locations->InAt(3).AsRegister<Register>();
1736
Alexey Frunzec061de12017-02-14 13:27:23 -08001737 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1738 __ PoisonHeapReference(AT, value);
1739 value = AT;
1740 }
1741
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001742 if (is_R6) {
1743 __ Sw(value, TMP, 0);
1744 } else {
1745 __ Swr(value, TMP, 0);
1746 __ Swl(value, TMP, 3);
1747 }
1748 } else {
1749 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
1750 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001751 CHECK(!is_volatile); // TODO: support atomic 8-byte volatile stores.
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001752 if (is_R6) {
1753 __ Sw(value_lo, TMP, 0);
1754 __ Sw(value_hi, TMP, 4);
1755 } else {
1756 __ Swr(value_lo, TMP, 0);
1757 __ Swl(value_lo, TMP, 3);
1758 __ Swr(value_hi, TMP, 4);
1759 __ Swl(value_hi, TMP, 7);
1760 }
1761 }
1762
1763 if (is_volatile) {
1764 __ Sync(0);
1765 }
1766
1767 if (type == Primitive::kPrimNot) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01001768 bool value_can_be_null = true; // TODO: Worth finding out this information?
1769 codegen->MarkGCCard(base, locations->InAt(3).AsRegister<Register>(), value_can_be_null);
Chris Larsen4fdc6d92015-12-14 13:26:14 -08001770 }
1771}
1772
1773// void sun.misc.Unsafe.putInt(Object o, long offset, int x)
1774void IntrinsicLocationsBuilderMIPS::VisitUnsafePut(HInvoke* invoke) {
1775 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1776}
1777
1778void IntrinsicCodeGeneratorMIPS::VisitUnsafePut(HInvoke* invoke) {
1779 GenUnsafePut(invoke->GetLocations(),
1780 Primitive::kPrimInt,
1781 /* is_volatile */ false,
1782 /* is_ordered */ false,
1783 IsR6(),
1784 codegen_);
1785}
1786
1787// void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x)
1788void IntrinsicLocationsBuilderMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
1789 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1790}
1791
1792void IntrinsicCodeGeneratorMIPS::VisitUnsafePutOrdered(HInvoke* invoke) {
1793 GenUnsafePut(invoke->GetLocations(),
1794 Primitive::kPrimInt,
1795 /* is_volatile */ false,
1796 /* is_ordered */ true,
1797 IsR6(),
1798 codegen_);
1799}
1800
1801// void sun.misc.Unsafe.putIntVolatile(Object o, long offset, int x)
1802void IntrinsicLocationsBuilderMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
1803 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1804}
1805
1806void IntrinsicCodeGeneratorMIPS::VisitUnsafePutVolatile(HInvoke* invoke) {
1807 GenUnsafePut(invoke->GetLocations(),
1808 Primitive::kPrimInt,
1809 /* is_volatile */ true,
1810 /* is_ordered */ false,
1811 IsR6(),
1812 codegen_);
1813}
1814
1815// void sun.misc.Unsafe.putObject(Object o, long offset, Object x)
1816void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObject(HInvoke* invoke) {
1817 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1818}
1819
1820void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObject(HInvoke* invoke) {
1821 GenUnsafePut(invoke->GetLocations(),
1822 Primitive::kPrimNot,
1823 /* is_volatile */ false,
1824 /* is_ordered */ false,
1825 IsR6(),
1826 codegen_);
1827}
1828
1829// void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x)
1830void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1831 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1832}
1833
1834void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1835 GenUnsafePut(invoke->GetLocations(),
1836 Primitive::kPrimNot,
1837 /* is_volatile */ false,
1838 /* is_ordered */ true,
1839 IsR6(),
1840 codegen_);
1841}
1842
1843// void sun.misc.Unsafe.putObjectVolatile(Object o, long offset, Object x)
1844void IntrinsicLocationsBuilderMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1845 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1846}
1847
1848void IntrinsicCodeGeneratorMIPS::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1849 GenUnsafePut(invoke->GetLocations(),
1850 Primitive::kPrimNot,
1851 /* is_volatile */ true,
1852 /* is_ordered */ false,
1853 IsR6(),
1854 codegen_);
1855}
1856
1857// void sun.misc.Unsafe.putLong(Object o, long offset, long x)
1858void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLong(HInvoke* invoke) {
1859 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1860}
1861
1862void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLong(HInvoke* invoke) {
1863 GenUnsafePut(invoke->GetLocations(),
1864 Primitive::kPrimLong,
1865 /* is_volatile */ false,
1866 /* is_ordered */ false,
1867 IsR6(),
1868 codegen_);
1869}
1870
1871// void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x)
1872void IntrinsicLocationsBuilderMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1873 CreateIntIntIntIntToVoidLocations(arena_, invoke);
1874}
1875
1876void IntrinsicCodeGeneratorMIPS::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1877 GenUnsafePut(invoke->GetLocations(),
1878 Primitive::kPrimLong,
1879 /* is_volatile */ false,
1880 /* is_ordered */ true,
1881 IsR6(),
1882 codegen_);
1883}
1884
Alexey Frunze15958152017-02-09 19:08:30 -08001885static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena, HInvoke* invoke) {
1886 bool can_call = kEmitCompilerReadBarrier &&
1887 kUseBakerReadBarrier &&
1888 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001889 LocationSummary* locations = new (arena) LocationSummary(invoke,
Alexey Frunze15958152017-02-09 19:08:30 -08001890 (can_call
1891 ? LocationSummary::kCallOnSlowPath
1892 : LocationSummary::kNoCall),
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001893 kIntrinsified);
1894 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1895 locations->SetInAt(1, Location::RequiresRegister());
1896 locations->SetInAt(2, Location::RequiresRegister());
1897 locations->SetInAt(3, Location::RequiresRegister());
1898 locations->SetInAt(4, Location::RequiresRegister());
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001899 locations->SetOut(Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08001900
1901 // Temporary register used in CAS by (Baker) read barrier.
1902 if (can_call) {
1903 locations->AddTemp(Location::RequiresRegister());
1904 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001905}
1906
Alexey Frunze15958152017-02-09 19:08:30 -08001907// Note that the caller must supply a properly aligned memory address.
1908// If they do not, the behavior is undefined (atomicity not guaranteed, exception may occur).
1909static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorMIPS* codegen) {
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001910 MipsAssembler* assembler = codegen->GetAssembler();
Alexey Frunze15958152017-02-09 19:08:30 -08001911 LocationSummary* locations = invoke->GetLocations();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001912 bool isR6 = codegen->GetInstructionSetFeatures().IsR6();
1913 Register base = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001914 Location offset_loc = locations->InAt(2);
1915 Register offset_lo = offset_loc.AsRegisterPairLow<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001916 Register expected = locations->InAt(3).AsRegister<Register>();
1917 Register value = locations->InAt(4).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08001918 Location out_loc = locations->Out();
1919 Register out = out_loc.AsRegister<Register>();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001920
1921 DCHECK_NE(base, out);
1922 DCHECK_NE(offset_lo, out);
1923 DCHECK_NE(expected, out);
1924
1925 if (type == Primitive::kPrimNot) {
Alexey Frunze15958152017-02-09 19:08:30 -08001926 // The only read barrier implementation supporting the
1927 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1928 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
1929
1930 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1931 // object and scan the receiver at the next GC for nothing.
Goran Jakovljevice114da22016-12-26 14:21:43 +01001932 bool value_can_be_null = true; // TODO: Worth finding out this information?
1933 codegen->MarkGCCard(base, value, value_can_be_null);
Alexey Frunze15958152017-02-09 19:08:30 -08001934
1935 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1936 Location temp = locations->GetTemp(0);
1937 // Need to make sure the reference stored in the field is a to-space
1938 // one before attempting the CAS or the CAS could fail incorrectly.
1939 codegen->GenerateReferenceLoadWithBakerReadBarrier(
1940 invoke,
1941 out_loc, // Unused, used only as a "temporary" within the read barrier.
1942 base,
1943 /* offset */ 0u,
1944 /* index */ offset_loc,
1945 ScaleFactor::TIMES_1,
1946 temp,
1947 /* needs_null_check */ false,
1948 /* always_update_field */ true);
1949 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001950 }
1951
Alexey Frunzec061de12017-02-14 13:27:23 -08001952 MipsLabel loop_head, exit_loop;
1953 __ Addu(TMP, base, offset_lo);
1954
1955 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1956 __ PoisonHeapReference(expected);
1957 // Do not poison `value`, if it is the same register as
1958 // `expected`, which has just been poisoned.
1959 if (value != expected) {
1960 __ PoisonHeapReference(value);
1961 }
1962 }
1963
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001964 // do {
1965 // tmp_value = [tmp_ptr] - expected;
1966 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1967 // result = tmp_value != 0;
1968
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001969 __ Sync(0);
1970 __ Bind(&loop_head);
1971 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1972 if (isR6) {
1973 __ LlR6(out, TMP);
1974 } else {
1975 __ LlR2(out, TMP);
1976 }
1977 } else {
Alexey Frunzec061de12017-02-14 13:27:23 -08001978 LOG(FATAL) << "Unsupported op size " << type;
1979 UNREACHABLE();
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001980 }
1981 __ Subu(out, out, expected); // If we didn't get the 'expected'
1982 __ Sltiu(out, out, 1); // value, set 'out' to false, and
1983 __ Beqz(out, &exit_loop); // return.
1984 __ Move(out, value); // Use 'out' for the 'store conditional' instruction.
1985 // If we use 'value' directly, we would lose 'value'
1986 // in the case that the store fails. Whether the
1987 // store succeeds, or fails, it will load the
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001988 // correct Boolean value into the 'out' register.
Alexey Frunze51aff3a2016-03-17 17:21:45 -07001989 // This test isn't really necessary. We only support Primitive::kPrimInt,
1990 // Primitive::kPrimNot, and we already verified that we're working on one
1991 // of those two types. It's left here in case the code needs to support
1992 // other types in the future.
1993 if ((type == Primitive::kPrimInt) || (type == Primitive::kPrimNot)) {
1994 if (isR6) {
1995 __ ScR6(out, TMP);
1996 } else {
1997 __ ScR2(out, TMP);
1998 }
1999 }
2000 __ Beqz(out, &loop_head); // If we couldn't do the read-modify-write
2001 // cycle atomically then retry.
2002 __ Bind(&exit_loop);
2003 __ Sync(0);
Alexey Frunzec061de12017-02-14 13:27:23 -08002004
2005 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2006 __ UnpoisonHeapReference(expected);
2007 // Do not unpoison `value`, if it is the same register as
2008 // `expected`, which has just been unpoisoned.
2009 if (value != expected) {
2010 __ UnpoisonHeapReference(value);
2011 }
2012 }
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002013}
2014
2015// boolean sun.misc.Unsafe.compareAndSwapInt(Object o, long offset, int expected, int x)
2016void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002017 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002018}
2019
2020void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASInt(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002021 GenCas(invoke, Primitive::kPrimInt, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002022}
2023
2024// boolean sun.misc.Unsafe.compareAndSwapObject(Object o, long offset, Object expected, Object x)
2025void IntrinsicLocationsBuilderMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002026 // The only read barrier implementation supporting the
2027 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2028 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
2029 return;
2030 }
2031
2032 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002033}
2034
2035void IntrinsicCodeGeneratorMIPS::VisitUnsafeCASObject(HInvoke* invoke) {
Alexey Frunze15958152017-02-09 19:08:30 -08002036 // The only read barrier implementation supporting the
2037 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2038 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
2039
2040 GenCas(invoke, Primitive::kPrimNot, codegen_);
Alexey Frunze51aff3a2016-03-17 17:21:45 -07002041}
2042
Chris Larsencf283da2016-01-19 16:45:35 -08002043// int java.lang.String.compareTo(String anotherString)
2044void IntrinsicLocationsBuilderMIPS::VisitStringCompareTo(HInvoke* invoke) {
2045 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescufca16662016-07-14 09:21:59 +01002046 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002047 kIntrinsified);
2048 InvokeRuntimeCallingConvention calling_convention;
2049 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2050 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2051 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2052 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2053}
2054
2055void IntrinsicCodeGeneratorMIPS::VisitStringCompareTo(HInvoke* invoke) {
2056 MipsAssembler* assembler = GetAssembler();
2057 LocationSummary* locations = invoke->GetLocations();
2058
2059 // Note that the null check must have been done earlier.
2060 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2061
2062 Register argument = locations->InAt(1).AsRegister<Register>();
2063 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2064 codegen_->AddSlowPath(slow_path);
2065 __ Beqz(argument, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002066 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002067 __ Bind(slow_path->GetExitLabel());
2068}
2069
Chris Larsen16ba2b42015-11-02 10:58:31 -08002070// boolean java.lang.String.equals(Object anObject)
2071void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
2072 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2073 LocationSummary::kNoCall,
2074 kIntrinsified);
2075 locations->SetInAt(0, Location::RequiresRegister());
2076 locations->SetInAt(1, Location::RequiresRegister());
2077 locations->SetOut(Location::RequiresRegister());
2078
2079 // Temporary registers to store lengths of strings and for calculations.
2080 locations->AddTemp(Location::RequiresRegister());
2081 locations->AddTemp(Location::RequiresRegister());
2082 locations->AddTemp(Location::RequiresRegister());
2083}
2084
2085void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
2086 MipsAssembler* assembler = GetAssembler();
2087 LocationSummary* locations = invoke->GetLocations();
2088
2089 Register str = locations->InAt(0).AsRegister<Register>();
2090 Register arg = locations->InAt(1).AsRegister<Register>();
2091 Register out = locations->Out().AsRegister<Register>();
2092
2093 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2094 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
2095 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
2096
2097 MipsLabel loop;
2098 MipsLabel end;
2099 MipsLabel return_true;
2100 MipsLabel return_false;
2101
2102 // Get offsets of count, value, and class fields within a string object.
2103 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2104 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2105 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
2106
2107 // Note that the null check must have been done earlier.
2108 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2109
2110 // If the register containing the pointer to "this", and the register
2111 // containing the pointer to "anObject" are the same register then
2112 // "this", and "anObject" are the same object and we can
2113 // short-circuit the logic to a true result.
2114 if (str == arg) {
2115 __ LoadConst32(out, 1);
2116 return;
2117 }
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01002118 StringEqualsOptimizations optimizations(invoke);
2119 if (!optimizations.GetArgumentNotNull()) {
2120 // Check if input is null, return false if it is.
2121 __ Beqz(arg, &return_false);
2122 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08002123
2124 // Reference equality check, return true if same reference.
2125 __ Beq(str, arg, &return_true);
2126
Goran Jakovljevic64fa84f2017-02-27 13:14:57 +01002127 if (!optimizations.GetArgumentIsString()) {
2128 // Instanceof check for the argument by comparing class fields.
2129 // All string objects must have the same type since String cannot be subclassed.
2130 // Receiver must be a string object, so its class field is equal to all strings' class fields.
2131 // If the argument is a string object, its class field must be equal to receiver's class field.
2132 __ Lw(temp1, str, class_offset);
2133 __ Lw(temp2, arg, class_offset);
2134 __ Bne(temp1, temp2, &return_false);
2135 }
Chris Larsen16ba2b42015-11-02 10:58:31 -08002136
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002137 // Load `count` fields of this and argument strings.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002138 __ Lw(temp1, str, count_offset);
2139 __ Lw(temp2, arg, count_offset);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002140 // Check if `count` fields are equal, return false if they're not.
2141 // Also compares the compression style, if differs return false.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002142 __ Bne(temp1, temp2, &return_false);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002143 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
2144 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2145 "Expecting 0=compressed, 1=uncompressed");
Chris Larsen16ba2b42015-11-02 10:58:31 -08002146 __ Beqz(temp1, &return_true);
2147
2148 // Don't overwrite input registers
2149 __ Move(TMP, str);
2150 __ Move(temp3, arg);
2151
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002152 // Assertions that must hold in order to compare strings 4 bytes at a time.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002153 DCHECK_ALIGNED(value_offset, 4);
2154 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
2155
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002156 // For string compression, calculate the number of bytes to compare (not chars).
2157 if (mirror::kUseStringCompression) {
2158 // Extract compression flag.
2159 if (IsR2OrNewer()) {
2160 __ Ext(temp2, temp1, 0, 1);
2161 } else {
2162 __ Sll(temp2, temp1, 31);
2163 __ Srl(temp2, temp2, 31);
2164 }
2165 __ Srl(temp1, temp1, 1); // Extract length.
2166 __ Sllv(temp1, temp1, temp2); // Double the byte count if uncompressed.
2167 }
2168
2169 // Loop to compare strings 4 bytes at a time starting at the beginning of the string.
2170 // Ok to do this because strings are zero-padded to kObjectAlignment.
Chris Larsen16ba2b42015-11-02 10:58:31 -08002171 __ Bind(&loop);
2172 __ Lw(out, TMP, value_offset);
2173 __ Lw(temp2, temp3, value_offset);
2174 __ Bne(out, temp2, &return_false);
2175 __ Addiu(TMP, TMP, 4);
2176 __ Addiu(temp3, temp3, 4);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002177 // With string compression, we have compared 4 bytes, otherwise 2 chars.
2178 __ Addiu(temp1, temp1, mirror::kUseStringCompression ? -4 : -2);
Chris Larsen16ba2b42015-11-02 10:58:31 -08002179 __ Bgtz(temp1, &loop);
2180
2181 // Return true and exit the function.
2182 // If loop does not result in returning false, we return true.
2183 __ Bind(&return_true);
2184 __ LoadConst32(out, 1);
2185 __ B(&end);
2186
2187 // Return false and exit the function.
2188 __ Bind(&return_false);
2189 __ LoadConst32(out, 0);
2190 __ Bind(&end);
2191}
2192
Chris Larsencf283da2016-01-19 16:45:35 -08002193static void GenerateStringIndexOf(HInvoke* invoke,
2194 bool start_at_zero,
2195 MipsAssembler* assembler,
2196 CodeGeneratorMIPS* codegen,
2197 ArenaAllocator* allocator) {
2198 LocationSummary* locations = invoke->GetLocations();
2199 Register tmp_reg = start_at_zero ? locations->GetTemp(0).AsRegister<Register>() : TMP;
2200
2201 // Note that the null check must have been done earlier.
2202 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
2203
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002204 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
2205 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Chris Larsencf283da2016-01-19 16:45:35 -08002206 SlowPathCodeMIPS* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002207 HInstruction* code_point = invoke->InputAt(1);
2208 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01002209 if (!IsUint<16>(code_point->AsIntConstant()->GetValue())) {
Chris Larsencf283da2016-01-19 16:45:35 -08002210 // Always needs the slow-path. We could directly dispatch to it,
2211 // but this case should be rare, so for simplicity just put the
2212 // full slow-path down and branch unconditionally.
2213 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke);
2214 codegen->AddSlowPath(slow_path);
2215 __ B(slow_path->GetEntryLabel());
2216 __ Bind(slow_path->GetExitLabel());
2217 return;
2218 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01002219 } else if (code_point->GetType() != Primitive::kPrimChar) {
Chris Larsencf283da2016-01-19 16:45:35 -08002220 Register char_reg = locations->InAt(1).AsRegister<Register>();
2221 // The "bltu" conditional branch tests to see if the character value
2222 // fits in a valid 16-bit (MIPS halfword) value. If it doesn't then
2223 // the character being searched for, if it exists in the string, is
2224 // encoded using UTF-16 and stored in the string as two (16-bit)
2225 // halfwords. Currently the assembly code used to implement this
2226 // intrinsic doesn't support searching for a character stored as
2227 // two halfwords so we fallback to using the generic implementation
2228 // of indexOf().
2229 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max());
2230 slow_path = new (allocator) IntrinsicSlowPathMIPS(invoke);
2231 codegen->AddSlowPath(slow_path);
2232 __ Bltu(tmp_reg, char_reg, slow_path->GetEntryLabel());
2233 }
2234
2235 if (start_at_zero) {
2236 DCHECK_EQ(tmp_reg, A2);
2237 // Start-index = 0.
2238 __ Clear(tmp_reg);
2239 }
2240
Serban Constantinescufca16662016-07-14 09:21:59 +01002241 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002242 if (slow_path != nullptr) {
2243 __ Bind(slow_path->GetExitLabel());
2244 }
2245}
2246
2247// int java.lang.String.indexOf(int ch)
2248void IntrinsicLocationsBuilderMIPS::VisitStringIndexOf(HInvoke* invoke) {
2249 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002250 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002251 kIntrinsified);
2252 // We have a hand-crafted assembly stub that follows the runtime
2253 // calling convention. So it's best to align the inputs accordingly.
2254 InvokeRuntimeCallingConvention calling_convention;
2255 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2256 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2257 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2258 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2259
2260 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
2261 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2262}
2263
2264void IntrinsicCodeGeneratorMIPS::VisitStringIndexOf(HInvoke* invoke) {
2265 GenerateStringIndexOf(invoke,
2266 /* start_at_zero */ true,
2267 GetAssembler(),
2268 codegen_,
2269 GetAllocator());
2270}
2271
2272// int java.lang.String.indexOf(int ch, int fromIndex)
2273void IntrinsicLocationsBuilderMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
2274 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002275 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002276 kIntrinsified);
2277 // We have a hand-crafted assembly stub that follows the runtime
2278 // calling convention. So it's best to align the inputs accordingly.
2279 InvokeRuntimeCallingConvention calling_convention;
2280 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2281 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2282 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2283 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2284 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2285
2286 // Need a temp for slow-path codepoint compare.
2287 locations->AddTemp(Location::RequiresRegister());
2288}
2289
2290void IntrinsicCodeGeneratorMIPS::VisitStringIndexOfAfter(HInvoke* invoke) {
2291 GenerateStringIndexOf(invoke,
2292 /* start_at_zero */ false,
2293 GetAssembler(),
2294 codegen_,
2295 GetAllocator());
2296}
2297
2298// java.lang.StringFactory.newStringFromBytes(byte[] data, int high, int offset, int byteCount)
2299void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
2300 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002301 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002302 kIntrinsified);
2303 InvokeRuntimeCallingConvention calling_convention;
2304 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2305 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2306 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2307 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
2308 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2309 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2310}
2311
2312void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromBytes(HInvoke* invoke) {
2313 MipsAssembler* assembler = GetAssembler();
2314 LocationSummary* locations = invoke->GetLocations();
2315
2316 Register byte_array = locations->InAt(0).AsRegister<Register>();
2317 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2318 codegen_->AddSlowPath(slow_path);
2319 __ Beqz(byte_array, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002320 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Chris Larsencf283da2016-01-19 16:45:35 -08002321 __ Bind(slow_path->GetExitLabel());
2322}
2323
2324// java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
2325void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
2326 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002327 LocationSummary::kCallOnMainOnly,
Chris Larsencf283da2016-01-19 16:45:35 -08002328 kIntrinsified);
2329 InvokeRuntimeCallingConvention calling_convention;
2330 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2331 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2332 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2333 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2334 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2335}
2336
2337void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromChars(HInvoke* invoke) {
Chris Larsencf283da2016-01-19 16:45:35 -08002338 // No need to emit code checking whether `locations->InAt(2)` is a null
2339 // pointer, as callers of the native method
2340 //
2341 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
2342 //
2343 // all include a null check on `data` before calling that method.
Serban Constantinescufca16662016-07-14 09:21:59 +01002344 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08002345}
2346
2347// java.lang.StringFactory.newStringFromString(String toCopy)
2348void IntrinsicLocationsBuilderMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
2349 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00002350 LocationSummary::kCallOnMainAndSlowPath,
Chris Larsencf283da2016-01-19 16:45:35 -08002351 kIntrinsified);
2352 InvokeRuntimeCallingConvention calling_convention;
2353 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2354 Location outLocation = calling_convention.GetReturnLocation(Primitive::kPrimInt);
2355 locations->SetOut(Location::RegisterLocation(outLocation.AsRegister<Register>()));
2356}
2357
2358void IntrinsicCodeGeneratorMIPS::VisitStringNewStringFromString(HInvoke* invoke) {
2359 MipsAssembler* assembler = GetAssembler();
2360 LocationSummary* locations = invoke->GetLocations();
2361
2362 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
2363 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
2364 codegen_->AddSlowPath(slow_path);
2365 __ Beqz(string_to_copy, slow_path->GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +01002366 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Chris Larsencf283da2016-01-19 16:45:35 -08002367 __ Bind(slow_path->GetExitLabel());
2368}
2369
Chris Larsen2714fe62016-02-11 14:23:53 -08002370static void GenIsInfinite(LocationSummary* locations,
2371 const Primitive::Type type,
2372 const bool isR6,
2373 MipsAssembler* assembler) {
2374 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2375 Register out = locations->Out().AsRegister<Register>();
2376
2377 DCHECK(type == Primitive::kPrimFloat || type == Primitive::kPrimDouble);
2378
2379 if (isR6) {
2380 if (type == Primitive::kPrimDouble) {
2381 __ ClassD(FTMP, in);
2382 } else {
2383 __ ClassS(FTMP, in);
2384 }
2385 __ Mfc1(out, FTMP);
2386 __ Andi(out, out, kPositiveInfinity | kNegativeInfinity);
2387 __ Sltu(out, ZERO, out);
2388 } else {
2389 // If one, or more, of the exponent bits is zero, then the number can't be infinite.
2390 if (type == Primitive::kPrimDouble) {
2391 __ MoveFromFpuHigh(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002392 __ LoadConst32(AT, High32Bits(kPositiveInfinityDouble));
Chris Larsen2714fe62016-02-11 14:23:53 -08002393 } else {
2394 __ Mfc1(TMP, in);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002395 __ LoadConst32(AT, kPositiveInfinityFloat);
Chris Larsen2714fe62016-02-11 14:23:53 -08002396 }
2397 __ Xor(TMP, TMP, AT);
2398
2399 __ Sll(TMP, TMP, 1);
2400
2401 if (type == Primitive::kPrimDouble) {
2402 __ Mfc1(AT, in);
2403 __ Or(TMP, TMP, AT);
2404 }
2405 // If any of the significand bits are one, then the number is not infinite.
2406 __ Sltiu(out, TMP, 1);
2407 }
2408}
2409
2410// boolean java.lang.Float.isInfinite(float)
2411void IntrinsicLocationsBuilderMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
2412 CreateFPToIntLocations(arena_, invoke);
2413}
2414
2415void IntrinsicCodeGeneratorMIPS::VisitFloatIsInfinite(HInvoke* invoke) {
2416 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimFloat, IsR6(), GetAssembler());
2417}
2418
2419// boolean java.lang.Double.isInfinite(double)
2420void IntrinsicLocationsBuilderMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
2421 CreateFPToIntLocations(arena_, invoke);
2422}
2423
2424void IntrinsicCodeGeneratorMIPS::VisitDoubleIsInfinite(HInvoke* invoke) {
2425 GenIsInfinite(invoke->GetLocations(), Primitive::kPrimDouble, IsR6(), GetAssembler());
2426}
2427
Chris Larsen97759342016-02-16 17:10:40 -08002428static void GenHighestOneBit(LocationSummary* locations,
2429 const Primitive::Type type,
2430 bool isR6,
2431 MipsAssembler* assembler) {
2432 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2433
2434 if (type == Primitive::kPrimLong) {
2435 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2436 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2437 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
2438 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
2439
2440 if (isR6) {
2441 __ ClzR6(TMP, in_hi);
2442 } else {
2443 __ ClzR2(TMP, in_hi);
2444 }
2445 __ LoadConst32(AT, 0x80000000);
2446 __ Srlv(out_hi, AT, TMP);
2447 __ And(out_hi, out_hi, in_hi);
2448 if (isR6) {
2449 __ ClzR6(TMP, in_lo);
2450 } else {
2451 __ ClzR2(TMP, in_lo);
2452 }
2453 __ Srlv(out_lo, AT, TMP);
2454 __ And(out_lo, out_lo, in_lo);
2455 if (isR6) {
2456 __ Seleqz(out_lo, out_lo, out_hi);
2457 } else {
2458 __ Movn(out_lo, ZERO, out_hi);
2459 }
2460 } else {
2461 Register in = locations->InAt(0).AsRegister<Register>();
2462 Register out = locations->Out().AsRegister<Register>();
2463
2464 if (isR6) {
2465 __ ClzR6(TMP, in);
2466 } else {
2467 __ ClzR2(TMP, in);
2468 }
2469 __ LoadConst32(AT, 0x80000000);
2470 __ Srlv(AT, AT, TMP); // Srlv shifts in the range of [0;31] bits (lower 5 bits of arg).
2471 __ And(out, AT, in); // So this is required for 0 (=shift by 32).
2472 }
2473}
2474
2475// int java.lang.Integer.highestOneBit(int)
2476void IntrinsicLocationsBuilderMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
2477 CreateIntToIntLocations(arena_, invoke);
2478}
2479
2480void IntrinsicCodeGeneratorMIPS::VisitIntegerHighestOneBit(HInvoke* invoke) {
2481 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
2482}
2483
2484// long java.lang.Long.highestOneBit(long)
2485void IntrinsicLocationsBuilderMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
2486 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
2487}
2488
2489void IntrinsicCodeGeneratorMIPS::VisitLongHighestOneBit(HInvoke* invoke) {
2490 GenHighestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
2491}
2492
2493static void GenLowestOneBit(LocationSummary* locations,
2494 const Primitive::Type type,
2495 bool isR6,
2496 MipsAssembler* assembler) {
2497 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
2498
2499 if (type == Primitive::kPrimLong) {
2500 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2501 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2502 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
2503 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
2504
2505 __ Subu(TMP, ZERO, in_lo);
2506 __ And(out_lo, TMP, in_lo);
2507 __ Subu(TMP, ZERO, in_hi);
2508 __ And(out_hi, TMP, in_hi);
2509 if (isR6) {
2510 __ Seleqz(out_hi, out_hi, out_lo);
2511 } else {
2512 __ Movn(out_hi, ZERO, out_lo);
2513 }
2514 } else {
2515 Register in = locations->InAt(0).AsRegister<Register>();
2516 Register out = locations->Out().AsRegister<Register>();
2517
2518 __ Subu(TMP, ZERO, in);
2519 __ And(out, TMP, in);
2520 }
2521}
2522
2523// int java.lang.Integer.lowestOneBit(int)
2524void IntrinsicLocationsBuilderMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
2525 CreateIntToIntLocations(arena_, invoke);
2526}
2527
2528void IntrinsicCodeGeneratorMIPS::VisitIntegerLowestOneBit(HInvoke* invoke) {
2529 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimInt, IsR6(), GetAssembler());
2530}
2531
2532// long java.lang.Long.lowestOneBit(long)
2533void IntrinsicLocationsBuilderMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
2534 CreateIntToIntLocations(arena_, invoke);
2535}
2536
2537void IntrinsicCodeGeneratorMIPS::VisitLongLowestOneBit(HInvoke* invoke) {
2538 GenLowestOneBit(invoke->GetLocations(), Primitive::kPrimLong, IsR6(), GetAssembler());
2539}
2540
Chris Larsenf09d5322016-04-22 12:06:34 -07002541// int java.lang.Math.round(float)
2542void IntrinsicLocationsBuilderMIPS::VisitMathRoundFloat(HInvoke* invoke) {
2543 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2544 LocationSummary::kNoCall,
2545 kIntrinsified);
2546 locations->SetInAt(0, Location::RequiresFpuRegister());
2547 locations->AddTemp(Location::RequiresFpuRegister());
2548 locations->SetOut(Location::RequiresRegister());
2549}
2550
2551void IntrinsicCodeGeneratorMIPS::VisitMathRoundFloat(HInvoke* invoke) {
2552 LocationSummary* locations = invoke->GetLocations();
2553 MipsAssembler* assembler = GetAssembler();
2554 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2555 FRegister half = locations->GetTemp(0).AsFpuRegister<FRegister>();
2556 Register out = locations->Out().AsRegister<Register>();
2557
2558 MipsLabel done;
Chris Larsenf09d5322016-04-22 12:06:34 -07002559
Chris Larsenf09d5322016-04-22 12:06:34 -07002560 if (IsR6()) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002561 // out = floor(in);
2562 //
2563 // if (out != MAX_VALUE && out != MIN_VALUE) {
2564 // TMP = ((in - out) >= 0.5) ? 1 : 0;
2565 // return out += TMP;
2566 // }
2567 // return out;
Chris Larsenf09d5322016-04-22 12:06:34 -07002568
Lena Djokicf4e23a82017-05-09 15:43:45 +02002569 // out = floor(in);
2570 __ FloorWS(FTMP, in);
2571 __ Mfc1(out, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002572
Lena Djokicf4e23a82017-05-09 15:43:45 +02002573 // if (out != MAX_VALUE && out != MIN_VALUE)
2574 __ Addiu(TMP, out, 1);
2575 __ Aui(TMP, TMP, 0x8000); // TMP = out + 0x8000 0001
2576 // or out - 0x7FFF FFFF.
2577 // IOW, TMP = 1 if out = Int.MIN_VALUE
2578 // or TMP = 0 if out = Int.MAX_VALUE.
2579 __ Srl(TMP, TMP, 1); // TMP = 0 if out = Int.MIN_VALUE
2580 // or out = Int.MAX_VALUE.
2581 __ Beqz(TMP, &done);
Chris Larsenf09d5322016-04-22 12:06:34 -07002582
Lena Djokicf4e23a82017-05-09 15:43:45 +02002583 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2584 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2585 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2586 __ SubS(FTMP, in, FTMP);
2587 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002588
Chris Larsenf09d5322016-04-22 12:06:34 -07002589 __ CmpLeS(FTMP, half, FTMP);
Chris Larsen07f712f2016-06-10 16:06:02 -07002590 __ Mfc1(TMP, FTMP);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002591
2592 // Return out -= TMP.
2593 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002594 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02002595 // if (in.isNaN) {
2596 // return 0;
2597 // }
2598 //
2599 // out = floor.w.s(in);
2600 //
2601 // /*
2602 // * This "if" statement is only needed for the pre-R6 version of floor.w.s
2603 // * which outputs Integer.MAX_VALUE for negative numbers with magnitudes
2604 // * too large to fit in a 32-bit integer.
2605 // */
2606 // if (out == Integer.MAX_VALUE) {
2607 // TMP = (in < 0.0f) ? 1 : 0;
2608 // /*
2609 // * If TMP is 1, then adding it to out will wrap its value from
2610 // * Integer.MAX_VALUE to Integer.MIN_VALUE.
2611 // */
2612 // return out += TMP;
2613 // }
2614 //
2615 // /*
2616 // * For negative values not handled by the previous "if" statement the
2617 // * test here will correctly set the value of TMP.
2618 // */
2619 // TMP = ((in - out) >= 0.5f) ? 1 : 0;
2620 // return out += TMP;
2621
2622 MipsLabel finite;
2623 MipsLabel add;
2624
2625 // Test for NaN.
2626 __ CunS(in, in);
2627
2628 // Return zero for NaN.
2629 __ Move(out, ZERO);
2630 __ Bc1t(&done);
2631
2632 // out = floor(in);
2633 __ FloorWS(FTMP, in);
2634 __ Mfc1(out, FTMP);
2635
2636 __ LoadConst32(TMP, -1);
2637
2638 // TMP = (out = java.lang.Integer.MAX_VALUE) ? -1 : 0;
2639 __ LoadConst32(AT, std::numeric_limits<int32_t>::max());
2640 __ Bne(AT, out, &finite);
2641
2642 __ Mtc1(ZERO, FTMP);
2643 __ ColtS(in, FTMP);
2644
2645 __ B(&add);
2646
2647 __ Bind(&finite);
2648
2649 // TMP = (0.5f <= (in - out)) ? -1 : 0;
2650 __ Cvtsw(FTMP, FTMP); // Convert output of floor.w.s back to "float".
2651 __ LoadConst32(AT, bit_cast<int32_t, float>(0.5f));
2652 __ SubS(FTMP, in, FTMP);
2653 __ Mtc1(AT, half);
Chris Larsenf09d5322016-04-22 12:06:34 -07002654 __ ColeS(half, FTMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002655
Lena Djokicf4e23a82017-05-09 15:43:45 +02002656 __ Bind(&add);
Chris Larsenf09d5322016-04-22 12:06:34 -07002657
Chris Larsenf09d5322016-04-22 12:06:34 -07002658 __ Movf(TMP, ZERO);
Lena Djokicf4e23a82017-05-09 15:43:45 +02002659
2660 // Return out -= TMP.
2661 __ Subu(out, out, TMP);
Chris Larsenf09d5322016-04-22 12:06:34 -07002662 }
Chris Larsenf09d5322016-04-22 12:06:34 -07002663 __ Bind(&done);
2664}
2665
Chris Larsen692235e2016-11-21 16:04:53 -08002666// void java.lang.String.getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin)
2667void IntrinsicLocationsBuilderMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2668 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Chris Larsenfe4ff442017-03-23 11:25:12 -07002669 LocationSummary::kNoCall,
Chris Larsen692235e2016-11-21 16:04:53 -08002670 kIntrinsified);
2671 locations->SetInAt(0, Location::RequiresRegister());
2672 locations->SetInAt(1, Location::RequiresRegister());
2673 locations->SetInAt(2, Location::RequiresRegister());
2674 locations->SetInAt(3, Location::RequiresRegister());
2675 locations->SetInAt(4, Location::RequiresRegister());
2676
Chris Larsenfe4ff442017-03-23 11:25:12 -07002677 locations->AddTemp(Location::RequiresRegister());
2678 locations->AddTemp(Location::RequiresRegister());
2679 locations->AddTemp(Location::RequiresRegister());
Chris Larsen692235e2016-11-21 16:04:53 -08002680}
2681
2682void IntrinsicCodeGeneratorMIPS::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2683 MipsAssembler* assembler = GetAssembler();
2684 LocationSummary* locations = invoke->GetLocations();
2685
2686 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2687 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2688 DCHECK_EQ(char_size, 2u);
2689 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
2690
2691 Register srcObj = locations->InAt(0).AsRegister<Register>();
2692 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2693 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2694 Register dstObj = locations->InAt(3).AsRegister<Register>();
2695 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2696
2697 Register dstPtr = locations->GetTemp(0).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002698 Register srcPtr = locations->GetTemp(1).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002699 Register numChrs = locations->GetTemp(2).AsRegister<Register>();
Chris Larsen692235e2016-11-21 16:04:53 -08002700
2701 MipsLabel done;
Chris Larsenfe4ff442017-03-23 11:25:12 -07002702 MipsLabel loop;
Chris Larsen692235e2016-11-21 16:04:53 -08002703
2704 // Location of data in char array buffer.
2705 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2706
2707 // Get offset of value field within a string object.
2708 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
2709
2710 __ Beq(srcEnd, srcBegin, &done); // No characters to move.
2711
2712 // Calculate number of characters to be copied.
2713 __ Subu(numChrs, srcEnd, srcBegin);
2714
2715 // Calculate destination address.
2716 __ Addiu(dstPtr, dstObj, data_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002717 __ ShiftAndAdd(dstPtr, dstBegin, dstPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002718
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002719 if (mirror::kUseStringCompression) {
2720 MipsLabel uncompressed_copy, compressed_loop;
2721 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2722 // Load count field and extract compression flag.
2723 __ LoadFromOffset(kLoadWord, TMP, srcObj, count_offset);
2724 __ Sll(TMP, TMP, 31);
2725
Chris Larsenfe4ff442017-03-23 11:25:12 -07002726 // If string is uncompressed, use uncompressed path.
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002727 __ Bnez(TMP, &uncompressed_copy);
2728
2729 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2730 __ Addu(srcPtr, srcObj, srcBegin);
2731 __ Bind(&compressed_loop);
2732 __ LoadFromOffset(kLoadUnsignedByte, TMP, srcPtr, value_offset);
2733 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0);
2734 __ Addiu(numChrs, numChrs, -1);
2735 __ Addiu(srcPtr, srcPtr, 1);
2736 __ Addiu(dstPtr, dstPtr, 2);
2737 __ Bnez(numChrs, &compressed_loop);
2738
2739 __ B(&done);
2740 __ Bind(&uncompressed_copy);
2741 }
2742
Chris Larsen692235e2016-11-21 16:04:53 -08002743 // Calculate source address.
2744 __ Addiu(srcPtr, srcObj, value_offset);
Chris Larsencd0295d2017-03-31 15:26:54 -07002745 __ ShiftAndAdd(srcPtr, srcBegin, srcPtr, char_shift);
Chris Larsen692235e2016-11-21 16:04:53 -08002746
Chris Larsenfe4ff442017-03-23 11:25:12 -07002747 __ Bind(&loop);
2748 __ Lh(AT, srcPtr, 0);
2749 __ Addiu(numChrs, numChrs, -1);
2750 __ Addiu(srcPtr, srcPtr, char_size);
2751 __ Sh(AT, dstPtr, 0);
2752 __ Addiu(dstPtr, dstPtr, char_size);
2753 __ Bnez(numChrs, &loop);
Chris Larsen692235e2016-11-21 16:04:53 -08002754
2755 __ Bind(&done);
2756}
2757
Chris Larsenb9005fa2017-03-24 12:11:54 -07002758static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2759 LocationSummary* locations = new (arena) LocationSummary(invoke,
2760 LocationSummary::kCallOnMainOnly,
2761 kIntrinsified);
2762 InvokeRuntimeCallingConvention calling_convention;
2763
2764 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2765 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2766}
2767
2768static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2769 LocationSummary* locations = new (arena) LocationSummary(invoke,
2770 LocationSummary::kCallOnMainOnly,
2771 kIntrinsified);
2772 InvokeRuntimeCallingConvention calling_convention;
2773
2774 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
2775 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
2776 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimDouble));
2777}
2778
2779static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorMIPS* codegen, QuickEntrypointEnum entry) {
2780 LocationSummary* locations = invoke->GetLocations();
2781 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
2782 DCHECK_EQ(in, F12);
2783 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2784 DCHECK_EQ(out, F0);
2785
2786 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2787}
2788
2789static void GenFPFPToFPCall(HInvoke* invoke,
2790 CodeGeneratorMIPS* codegen,
2791 QuickEntrypointEnum entry) {
2792 LocationSummary* locations = invoke->GetLocations();
2793 FRegister in0 = locations->InAt(0).AsFpuRegister<FRegister>();
2794 DCHECK_EQ(in0, F12);
2795 FRegister in1 = locations->InAt(1).AsFpuRegister<FRegister>();
2796 DCHECK_EQ(in1, F14);
2797 FRegister out = locations->Out().AsFpuRegister<FRegister>();
2798 DCHECK_EQ(out, F0);
2799
2800 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
2801}
2802
2803// static double java.lang.Math.cos(double a)
2804void IntrinsicLocationsBuilderMIPS::VisitMathCos(HInvoke* invoke) {
2805 CreateFPToFPCallLocations(arena_, invoke);
2806}
2807
2808void IntrinsicCodeGeneratorMIPS::VisitMathCos(HInvoke* invoke) {
2809 GenFPToFPCall(invoke, codegen_, kQuickCos);
2810}
2811
2812// static double java.lang.Math.sin(double a)
2813void IntrinsicLocationsBuilderMIPS::VisitMathSin(HInvoke* invoke) {
2814 CreateFPToFPCallLocations(arena_, invoke);
2815}
2816
2817void IntrinsicCodeGeneratorMIPS::VisitMathSin(HInvoke* invoke) {
2818 GenFPToFPCall(invoke, codegen_, kQuickSin);
2819}
2820
2821// static double java.lang.Math.acos(double a)
2822void IntrinsicLocationsBuilderMIPS::VisitMathAcos(HInvoke* invoke) {
2823 CreateFPToFPCallLocations(arena_, invoke);
2824}
2825
2826void IntrinsicCodeGeneratorMIPS::VisitMathAcos(HInvoke* invoke) {
2827 GenFPToFPCall(invoke, codegen_, kQuickAcos);
2828}
2829
2830// static double java.lang.Math.asin(double a)
2831void IntrinsicLocationsBuilderMIPS::VisitMathAsin(HInvoke* invoke) {
2832 CreateFPToFPCallLocations(arena_, invoke);
2833}
2834
2835void IntrinsicCodeGeneratorMIPS::VisitMathAsin(HInvoke* invoke) {
2836 GenFPToFPCall(invoke, codegen_, kQuickAsin);
2837}
2838
2839// static double java.lang.Math.atan(double a)
2840void IntrinsicLocationsBuilderMIPS::VisitMathAtan(HInvoke* invoke) {
2841 CreateFPToFPCallLocations(arena_, invoke);
2842}
2843
2844void IntrinsicCodeGeneratorMIPS::VisitMathAtan(HInvoke* invoke) {
2845 GenFPToFPCall(invoke, codegen_, kQuickAtan);
2846}
2847
2848// static double java.lang.Math.atan2(double y, double x)
2849void IntrinsicLocationsBuilderMIPS::VisitMathAtan2(HInvoke* invoke) {
2850 CreateFPFPToFPCallLocations(arena_, invoke);
2851}
2852
2853void IntrinsicCodeGeneratorMIPS::VisitMathAtan2(HInvoke* invoke) {
2854 GenFPFPToFPCall(invoke, codegen_, kQuickAtan2);
2855}
2856
2857// static double java.lang.Math.cbrt(double a)
2858void IntrinsicLocationsBuilderMIPS::VisitMathCbrt(HInvoke* invoke) {
2859 CreateFPToFPCallLocations(arena_, invoke);
2860}
2861
2862void IntrinsicCodeGeneratorMIPS::VisitMathCbrt(HInvoke* invoke) {
2863 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
2864}
2865
2866// static double java.lang.Math.cosh(double x)
2867void IntrinsicLocationsBuilderMIPS::VisitMathCosh(HInvoke* invoke) {
2868 CreateFPToFPCallLocations(arena_, invoke);
2869}
2870
2871void IntrinsicCodeGeneratorMIPS::VisitMathCosh(HInvoke* invoke) {
2872 GenFPToFPCall(invoke, codegen_, kQuickCosh);
2873}
2874
2875// static double java.lang.Math.exp(double a)
2876void IntrinsicLocationsBuilderMIPS::VisitMathExp(HInvoke* invoke) {
2877 CreateFPToFPCallLocations(arena_, invoke);
2878}
2879
2880void IntrinsicCodeGeneratorMIPS::VisitMathExp(HInvoke* invoke) {
2881 GenFPToFPCall(invoke, codegen_, kQuickExp);
2882}
2883
2884// static double java.lang.Math.expm1(double x)
2885void IntrinsicLocationsBuilderMIPS::VisitMathExpm1(HInvoke* invoke) {
2886 CreateFPToFPCallLocations(arena_, invoke);
2887}
2888
2889void IntrinsicCodeGeneratorMIPS::VisitMathExpm1(HInvoke* invoke) {
2890 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
2891}
2892
2893// static double java.lang.Math.hypot(double x, double y)
2894void IntrinsicLocationsBuilderMIPS::VisitMathHypot(HInvoke* invoke) {
2895 CreateFPFPToFPCallLocations(arena_, invoke);
2896}
2897
2898void IntrinsicCodeGeneratorMIPS::VisitMathHypot(HInvoke* invoke) {
2899 GenFPFPToFPCall(invoke, codegen_, kQuickHypot);
2900}
2901
2902// static double java.lang.Math.log(double a)
2903void IntrinsicLocationsBuilderMIPS::VisitMathLog(HInvoke* invoke) {
2904 CreateFPToFPCallLocations(arena_, invoke);
2905}
2906
2907void IntrinsicCodeGeneratorMIPS::VisitMathLog(HInvoke* invoke) {
2908 GenFPToFPCall(invoke, codegen_, kQuickLog);
2909}
2910
2911// static double java.lang.Math.log10(double x)
2912void IntrinsicLocationsBuilderMIPS::VisitMathLog10(HInvoke* invoke) {
2913 CreateFPToFPCallLocations(arena_, invoke);
2914}
2915
2916void IntrinsicCodeGeneratorMIPS::VisitMathLog10(HInvoke* invoke) {
2917 GenFPToFPCall(invoke, codegen_, kQuickLog10);
2918}
2919
2920// static double java.lang.Math.nextAfter(double start, double direction)
2921void IntrinsicLocationsBuilderMIPS::VisitMathNextAfter(HInvoke* invoke) {
2922 CreateFPFPToFPCallLocations(arena_, invoke);
2923}
2924
2925void IntrinsicCodeGeneratorMIPS::VisitMathNextAfter(HInvoke* invoke) {
2926 GenFPFPToFPCall(invoke, codegen_, kQuickNextAfter);
2927}
2928
2929// static double java.lang.Math.sinh(double x)
2930void IntrinsicLocationsBuilderMIPS::VisitMathSinh(HInvoke* invoke) {
2931 CreateFPToFPCallLocations(arena_, invoke);
2932}
2933
2934void IntrinsicCodeGeneratorMIPS::VisitMathSinh(HInvoke* invoke) {
2935 GenFPToFPCall(invoke, codegen_, kQuickSinh);
2936}
2937
2938// static double java.lang.Math.tan(double a)
2939void IntrinsicLocationsBuilderMIPS::VisitMathTan(HInvoke* invoke) {
2940 CreateFPToFPCallLocations(arena_, invoke);
2941}
2942
2943void IntrinsicCodeGeneratorMIPS::VisitMathTan(HInvoke* invoke) {
2944 GenFPToFPCall(invoke, codegen_, kQuickTan);
2945}
2946
2947// static double java.lang.Math.tanh(double x)
2948void IntrinsicLocationsBuilderMIPS::VisitMathTanh(HInvoke* invoke) {
2949 CreateFPToFPCallLocations(arena_, invoke);
2950}
2951
2952void IntrinsicCodeGeneratorMIPS::VisitMathTanh(HInvoke* invoke) {
2953 GenFPToFPCall(invoke, codegen_, kQuickTanh);
2954}
2955
Chris Larsen2f6ad9d2017-03-23 15:37:03 -07002956// static void java.lang.System.arraycopy(Object src, int srcPos,
2957// Object dest, int destPos,
2958// int length)
2959void IntrinsicLocationsBuilderMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
2960 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2961 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2962 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2963
2964 // As long as we are checking, we might as well check to see if the src and dest
2965 // positions are >= 0.
2966 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2967 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2968 // We will have to fail anyways.
2969 return;
2970 }
2971
2972 // And since we are already checking, check the length too.
2973 if (length != nullptr) {
2974 int32_t len = length->GetValue();
2975 if (len < 0) {
2976 // Just call as normal.
2977 return;
2978 }
2979 }
2980
2981 // Okay, it is safe to generate inline code.
2982 LocationSummary* locations =
2983 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
2984 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
2985 locations->SetInAt(0, Location::RequiresRegister());
2986 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
2987 locations->SetInAt(2, Location::RequiresRegister());
2988 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
2989 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
2990
2991 locations->AddTemp(Location::RequiresRegister());
2992 locations->AddTemp(Location::RequiresRegister());
2993 locations->AddTemp(Location::RequiresRegister());
2994}
2995
2996// Utility routine to verify that "length(input) - pos >= length"
2997static void EnoughItems(MipsAssembler* assembler,
2998 Register length_input_minus_pos,
2999 Location length,
3000 SlowPathCodeMIPS* slow_path) {
3001 if (length.IsConstant()) {
3002 int32_t length_constant = length.GetConstant()->AsIntConstant()->GetValue();
3003
3004 if (IsInt<16>(length_constant)) {
3005 __ Slti(TMP, length_input_minus_pos, length_constant);
3006 __ Bnez(TMP, slow_path->GetEntryLabel());
3007 } else {
3008 __ LoadConst32(TMP, length_constant);
3009 __ Blt(length_input_minus_pos, TMP, slow_path->GetEntryLabel());
3010 }
3011 } else {
3012 __ Blt(length_input_minus_pos, length.AsRegister<Register>(), slow_path->GetEntryLabel());
3013 }
3014}
3015
3016static void CheckPosition(MipsAssembler* assembler,
3017 Location pos,
3018 Register input,
3019 Location length,
3020 SlowPathCodeMIPS* slow_path,
3021 bool length_is_input_length = false) {
3022 // Where is the length in the Array?
3023 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
3024
3025 // Calculate length(input) - pos.
3026 if (pos.IsConstant()) {
3027 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
3028 if (pos_const == 0) {
3029 if (!length_is_input_length) {
3030 // Check that length(input) >= length.
3031 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3032 EnoughItems(assembler, AT, length, slow_path);
3033 }
3034 } else {
3035 // Check that (length(input) - pos) >= zero.
3036 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3037 DCHECK_GT(pos_const, 0);
3038 __ Addiu32(AT, AT, -pos_const, TMP);
3039 __ Bltz(AT, slow_path->GetEntryLabel());
3040
3041 // Verify that (length(input) - pos) >= length.
3042 EnoughItems(assembler, AT, length, slow_path);
3043 }
3044 } else if (length_is_input_length) {
3045 // The only way the copy can succeed is if pos is zero.
3046 Register pos_reg = pos.AsRegister<Register>();
3047 __ Bnez(pos_reg, slow_path->GetEntryLabel());
3048 } else {
3049 // Verify that pos >= 0.
3050 Register pos_reg = pos.AsRegister<Register>();
3051 __ Bltz(pos_reg, slow_path->GetEntryLabel());
3052
3053 // Check that (length(input) - pos) >= zero.
3054 __ LoadFromOffset(kLoadWord, AT, input, length_offset);
3055 __ Subu(AT, AT, pos_reg);
3056 __ Bltz(AT, slow_path->GetEntryLabel());
3057
3058 // Verify that (length(input) - pos) >= length.
3059 EnoughItems(assembler, AT, length, slow_path);
3060 }
3061}
3062
3063void IntrinsicCodeGeneratorMIPS::VisitSystemArrayCopyChar(HInvoke* invoke) {
3064 MipsAssembler* assembler = GetAssembler();
3065 LocationSummary* locations = invoke->GetLocations();
3066
3067 Register src = locations->InAt(0).AsRegister<Register>();
3068 Location src_pos = locations->InAt(1);
3069 Register dest = locations->InAt(2).AsRegister<Register>();
3070 Location dest_pos = locations->InAt(3);
3071 Location length = locations->InAt(4);
3072
3073 MipsLabel loop;
3074
3075 Register dest_base = locations->GetTemp(0).AsRegister<Register>();
3076 Register src_base = locations->GetTemp(1).AsRegister<Register>();
3077 Register count = locations->GetTemp(2).AsRegister<Register>();
3078
3079 SlowPathCodeMIPS* slow_path = new (GetAllocator()) IntrinsicSlowPathMIPS(invoke);
3080 codegen_->AddSlowPath(slow_path);
3081
3082 // Bail out if the source and destination are the same (to handle overlap).
3083 __ Beq(src, dest, slow_path->GetEntryLabel());
3084
3085 // Bail out if the source is null.
3086 __ Beqz(src, slow_path->GetEntryLabel());
3087
3088 // Bail out if the destination is null.
3089 __ Beqz(dest, slow_path->GetEntryLabel());
3090
3091 // Load length into register for count.
3092 if (length.IsConstant()) {
3093 __ LoadConst32(count, length.GetConstant()->AsIntConstant()->GetValue());
3094 } else {
3095 // If the length is negative, bail out.
3096 // We have already checked in the LocationsBuilder for the constant case.
3097 __ Bltz(length.AsRegister<Register>(), slow_path->GetEntryLabel());
3098
3099 __ Move(count, length.AsRegister<Register>());
3100 }
3101
3102 // Validity checks: source.
3103 CheckPosition(assembler, src_pos, src, Location::RegisterLocation(count), slow_path);
3104
3105 // Validity checks: dest.
3106 CheckPosition(assembler, dest_pos, dest, Location::RegisterLocation(count), slow_path);
3107
3108 // If count is zero, we're done.
3109 __ Beqz(count, slow_path->GetExitLabel());
3110
3111 // Okay, everything checks out. Finally time to do the copy.
3112 // Check assumption that sizeof(Char) is 2 (used in scaling below).
3113 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
3114 DCHECK_EQ(char_size, 2u);
3115
3116 const size_t char_shift = Primitive::ComponentSizeShift(Primitive::kPrimChar);
3117
3118 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
3119
3120 // Calculate source and destination addresses.
3121 if (src_pos.IsConstant()) {
3122 int32_t src_pos_const = src_pos.GetConstant()->AsIntConstant()->GetValue();
3123
3124 __ Addiu32(src_base, src, data_offset + char_size * src_pos_const, TMP);
3125 } else {
3126 __ Addiu32(src_base, src, data_offset, TMP);
3127 __ ShiftAndAdd(src_base, src_pos.AsRegister<Register>(), src_base, char_shift);
3128 }
3129 if (dest_pos.IsConstant()) {
3130 int32_t dest_pos_const = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3131
3132 __ Addiu32(dest_base, dest, data_offset + char_size * dest_pos_const, TMP);
3133 } else {
3134 __ Addiu32(dest_base, dest, data_offset, TMP);
3135 __ ShiftAndAdd(dest_base, dest_pos.AsRegister<Register>(), dest_base, char_shift);
3136 }
3137
3138 __ Bind(&loop);
3139 __ Lh(TMP, src_base, 0);
3140 __ Addiu(src_base, src_base, char_size);
3141 __ Addiu(count, count, -1);
3142 __ Sh(TMP, dest_base, 0);
3143 __ Addiu(dest_base, dest_base, char_size);
3144 __ Bnez(count, &loop);
3145
3146 __ Bind(slow_path->GetExitLabel());
3147}
3148
Chris Larsen5633ce72017-04-10 15:47:40 -07003149// long java.lang.Integer.valueOf(long)
3150void IntrinsicLocationsBuilderMIPS::VisitIntegerValueOf(HInvoke* invoke) {
3151 InvokeRuntimeCallingConvention calling_convention;
3152 IntrinsicVisitor::ComputeIntegerValueOfLocations(
3153 invoke,
3154 codegen_,
3155 calling_convention.GetReturnLocation(Primitive::kPrimNot),
3156 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3157}
3158
3159void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) {
3160 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
3161 LocationSummary* locations = invoke->GetLocations();
3162 MipsAssembler* assembler = GetAssembler();
3163 InstructionCodeGeneratorMIPS* icodegen =
3164 down_cast<InstructionCodeGeneratorMIPS*>(codegen_->GetInstructionVisitor());
3165
3166 Register out = locations->Out().AsRegister<Register>();
3167 InvokeRuntimeCallingConvention calling_convention;
3168 if (invoke->InputAt(0)->IsConstant()) {
3169 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
3170 if (value >= info.low && value <= info.high) {
3171 // Just embed the j.l.Integer in the code.
3172 ScopedObjectAccess soa(Thread::Current());
3173 mirror::Object* boxed = info.cache->Get(value + (-info.low));
3174 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
3175 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
3176 __ LoadConst32(out, address);
3177 } else {
3178 // Allocate and initialize a new j.l.Integer.
3179 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
3180 // JIT object table.
3181 uint32_t address =
3182 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3183 __ LoadConst32(calling_convention.GetRegisterAt(0), address);
3184 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3185 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3186 __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP);
3187 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
3188 // one.
3189 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3190 }
3191 } else {
3192 Register in = locations->InAt(0).AsRegister<Register>();
3193 MipsLabel allocate, done;
3194 int32_t count = static_cast<uint32_t>(info.high) - info.low + 1;
3195
3196 // Is (info.low <= in) && (in <= info.high)?
3197 __ Addiu32(out, in, -info.low);
3198 // As unsigned quantities is out < (info.high - info.low + 1)?
3199 if (IsInt<16>(count)) {
3200 __ Sltiu(AT, out, count);
3201 } else {
3202 __ LoadConst32(AT, count);
3203 __ Sltu(AT, out, AT);
3204 }
3205 // Branch if out >= (info.high - info.low + 1).
3206 // This means that "in" is outside of the range [info.low, info.high].
3207 __ Beqz(AT, &allocate);
3208
3209 // If the value is within the bounds, load the j.l.Integer directly from the array.
3210 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3211 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
3212 __ LoadConst32(TMP, data_offset + address);
3213 __ ShiftAndAdd(out, out, TMP, TIMES_4);
3214 __ Lw(out, out, 0);
3215 __ MaybeUnpoisonHeapReference(out);
3216 __ B(&done);
3217
3218 __ Bind(&allocate);
3219 // Otherwise allocate and initialize a new j.l.Integer.
3220 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3221 __ LoadConst32(calling_convention.GetRegisterAt(0), address);
3222 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3223 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3224 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
3225 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
3226 // one.
3227 icodegen->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
3228 __ Bind(&done);
3229 }
3230}
3231
Chris Larsen2714fe62016-02-11 14:23:53 -08003232// Unimplemented intrinsics.
3233
Aart Bik2f9fcc92016-03-01 15:16:54 -08003234UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil)
3235UNIMPLEMENTED_INTRINSIC(MIPS, MathFloor)
3236UNIMPLEMENTED_INTRINSIC(MIPS, MathRint)
3237UNIMPLEMENTED_INTRINSIC(MIPS, MathRoundDouble)
Alexey Frunze15958152017-02-09 19:08:30 -08003238UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetLongVolatile);
3239UNIMPLEMENTED_INTRINSIC(MIPS, UnsafePutLongVolatile);
Aart Bik2f9fcc92016-03-01 15:16:54 -08003240UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeCASLong)
Chris Larsen701566a2015-10-27 15:29:13 -07003241
Aart Bik2f9fcc92016-03-01 15:16:54 -08003242UNIMPLEMENTED_INTRINSIC(MIPS, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003243UNIMPLEMENTED_INTRINSIC(MIPS, SystemArrayCopy)
Aart Bik3f67e692016-01-15 14:35:12 -08003244
Aart Bikff7d89c2016-11-07 08:49:28 -08003245UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOf);
3246UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003247UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferAppend);
3248UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferLength);
3249UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferToString);
3250UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppend);
3251UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderLength);
3252UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003253
Aart Bik0e54c012016-03-04 12:08:31 -08003254// 1.8.
3255UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddInt)
3256UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddLong)
3257UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetInt)
3258UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetLong)
3259UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndSetObject)
Chris Larsen701566a2015-10-27 15:29:13 -07003260
Nicolas Geoffray365719c2017-03-08 13:11:50 +00003261UNIMPLEMENTED_INTRINSIC(MIPS, ThreadInterrupted)
3262
Aart Bik0e54c012016-03-04 12:08:31 -08003263UNREACHABLE_INTRINSICS(MIPS)
Chris Larsen2714fe62016-02-11 14:23:53 -08003264
Chris Larsen701566a2015-10-27 15:29:13 -07003265#undef __
3266
3267} // namespace mips
3268} // namespace art