blob: 8c16b39d115454b71625543b995cc1c08bda0c26 [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips/assembler_mips.h"
28#include "utils/mips/constants_mips.h"
29
30namespace art {
31
32namespace mips {
33
34IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
39 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
46#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 Register trg_reg = trg.AsRegister<Register>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
76 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
89 public:
90 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : invoke_(invoke) { }
91
92 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
93 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
94
95 __ Bind(GetEntryLabel());
96
97 SaveLiveRegisters(codegen, invoke_->GetLocations());
98
99 MoveArguments(invoke_, codegen);
100
101 if (invoke_->IsInvokeStaticOrDirect()) {
102 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
103 Location::RegisterLocation(A0));
104 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
105 } else {
106 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
107 UNREACHABLE();
108 }
109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
119 __ B(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
129};
130
131#undef __
132
133bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
Chris Larsen3f8bf652015-10-28 10:08:56 -0700141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
150 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
151
152 if (is64bit) {
153 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
154 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
155
156 __ Mfc1(out_lo, in);
157 __ Mfhc1(out_hi, in);
158 } else {
159 Register out = locations->Out().AsRegister<Register>();
160
161 __ Mfc1(out, in);
162 }
163}
164
165// long java.lang.Double.doubleToRawLongBits(double)
166void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
167 CreateFPToIntLocations(arena_, invoke);
168}
169
170void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
171 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
172}
173
174// int java.lang.Float.floatToRawIntBits(float)
175void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
176 CreateFPToIntLocations(arena_, invoke);
177}
178
179void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
180 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
181}
182
183static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
184 LocationSummary* locations = new (arena) LocationSummary(invoke,
185 LocationSummary::kNoCall,
186 kIntrinsified);
187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
192 FRegister out = locations->Out().AsFpuRegister<FRegister>();
193
194 if (is64bit) {
195 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
196 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
197
198 __ Mtc1(in_lo, out);
199 __ Mthc1(in_hi, out);
200 } else {
201 Register in = locations->InAt(0).AsRegister<Register>();
202
203 __ Mtc1(in, out);
204 }
205}
206
207// double java.lang.Double.longBitsToDouble(long)
208void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
209 CreateIntToFPLocations(arena_, invoke);
210}
211
212void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
213 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
214}
215
216// float java.lang.Float.intBitsToFloat(int)
217void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
218 CreateIntToFPLocations(arena_, invoke);
219}
220
221void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
222 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
223}
224
Chris Larsen86829602015-11-18 12:27:52 -0800225static void CreateIntToIntLocations(ArenaAllocator* arena,
226 HInvoke* invoke,
227 Location::OutputOverlap overlaps = Location::kNoOutputOverlap) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700228 LocationSummary* locations = new (arena) LocationSummary(invoke,
229 LocationSummary::kNoCall,
230 kIntrinsified);
231 locations->SetInAt(0, Location::RequiresRegister());
Chris Larsen86829602015-11-18 12:27:52 -0800232 locations->SetOut(Location::RequiresRegister(), overlaps);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700233}
234
Chris Larsen70014c82015-11-18 12:26:08 -0800235static void GenReverse(LocationSummary* locations,
236 Primitive::Type type,
237 bool isR2OrNewer,
238 bool isR6,
239 bool reverseBits,
240 MipsAssembler* assembler) {
Chris Larsen3f8bf652015-10-28 10:08:56 -0700241 DCHECK(type == Primitive::kPrimShort ||
242 type == Primitive::kPrimInt ||
243 type == Primitive::kPrimLong);
Chris Larsen70014c82015-11-18 12:26:08 -0800244 DCHECK(type != Primitive::kPrimShort || !reverseBits);
Chris Larsen3f8bf652015-10-28 10:08:56 -0700245
246 if (type == Primitive::kPrimShort) {
247 Register in = locations->InAt(0).AsRegister<Register>();
248 Register out = locations->Out().AsRegister<Register>();
249
250 if (isR2OrNewer) {
251 __ Wsbh(out, in);
252 __ Seh(out, out);
253 } else {
254 __ Sll(TMP, in, 24);
255 __ Sra(TMP, TMP, 16);
256 __ Sll(out, in, 16);
257 __ Srl(out, out, 24);
258 __ Or(out, out, TMP);
259 }
260 } else if (type == Primitive::kPrimInt) {
261 Register in = locations->InAt(0).AsRegister<Register>();
262 Register out = locations->Out().AsRegister<Register>();
263
264 if (isR2OrNewer) {
265 __ Rotr(out, in, 16);
266 __ Wsbh(out, out);
267 } else {
268 // MIPS32r1
269 // __ Rotr(out, in, 16);
270 __ Sll(TMP, in, 16);
271 __ Srl(out, in, 16);
272 __ Or(out, out, TMP);
273 // __ Wsbh(out, out);
274 __ LoadConst32(AT, 0x00FF00FF);
275 __ And(TMP, out, AT);
276 __ Sll(TMP, TMP, 8);
277 __ Srl(out, out, 8);
278 __ And(out, out, AT);
279 __ Or(out, out, TMP);
280 }
Chris Larsen70014c82015-11-18 12:26:08 -0800281 if (reverseBits) {
282 if (isR6) {
283 __ Bitswap(out, out);
284 } else {
285 __ LoadConst32(AT, 0x0F0F0F0F);
286 __ And(TMP, out, AT);
287 __ Sll(TMP, TMP, 4);
288 __ Srl(out, out, 4);
289 __ And(out, out, AT);
290 __ Or(out, TMP, out);
291 __ LoadConst32(AT, 0x33333333);
292 __ And(TMP, out, AT);
293 __ Sll(TMP, TMP, 2);
294 __ Srl(out, out, 2);
295 __ And(out, out, AT);
296 __ Or(out, TMP, out);
297 __ LoadConst32(AT, 0x55555555);
298 __ And(TMP, out, AT);
299 __ Sll(TMP, TMP, 1);
300 __ Srl(out, out, 1);
301 __ And(out, out, AT);
302 __ Or(out, TMP, out);
303 }
304 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700305 } else if (type == Primitive::kPrimLong) {
306 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
307 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
308 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
309 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
310
311 if (isR2OrNewer) {
312 __ Rotr(AT, in_hi, 16);
313 __ Rotr(TMP, in_lo, 16);
314 __ Wsbh(out_lo, AT);
315 __ Wsbh(out_hi, TMP);
316 } else {
317 // When calling CreateIntToIntLocations() we promised that the
318 // use of the out_lo/out_hi wouldn't overlap with the use of
319 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
320 // until we're completely done reading from in_lo/in_hi.
321 // __ Rotr(TMP, in_lo, 16);
322 __ Sll(TMP, in_lo, 16);
323 __ Srl(AT, in_lo, 16);
324 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
325 // to write to out_hi.
326 // __ Rotr(out_lo, in_hi, 16);
327 __ Sll(AT, in_hi, 16);
328 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
329 // from in_lo/in_hi so it's okay to
330 // write to out_lo/out_hi.
331 __ Or(out_lo, out_lo, AT);
332 // __ Wsbh(out_hi, out_hi);
333 __ LoadConst32(AT, 0x00FF00FF);
334 __ And(out_hi, TMP, AT);
335 __ Sll(out_hi, out_hi, 8);
336 __ Srl(TMP, TMP, 8);
337 __ And(TMP, TMP, AT);
338 __ Or(out_hi, out_hi, TMP);
339 // __ Wsbh(out_lo, out_lo);
340 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
341 __ Sll(TMP, TMP, 8);
342 __ Srl(out_lo, out_lo, 8);
343 __ And(out_lo, out_lo, AT);
344 __ Or(out_lo, out_lo, TMP);
345 }
Chris Larsen70014c82015-11-18 12:26:08 -0800346 if (reverseBits) {
347 if (isR6) {
348 __ Bitswap(out_hi, out_hi);
349 __ Bitswap(out_lo, out_lo);
350 } else {
351 __ LoadConst32(AT, 0x0F0F0F0F);
352 __ And(TMP, out_hi, AT);
353 __ Sll(TMP, TMP, 4);
354 __ Srl(out_hi, out_hi, 4);
355 __ And(out_hi, out_hi, AT);
356 __ Or(out_hi, TMP, out_hi);
357 __ And(TMP, out_lo, AT);
358 __ Sll(TMP, TMP, 4);
359 __ Srl(out_lo, out_lo, 4);
360 __ And(out_lo, out_lo, AT);
361 __ Or(out_lo, TMP, out_lo);
362 __ LoadConst32(AT, 0x33333333);
363 __ And(TMP, out_hi, AT);
364 __ Sll(TMP, TMP, 2);
365 __ Srl(out_hi, out_hi, 2);
366 __ And(out_hi, out_hi, AT);
367 __ Or(out_hi, TMP, out_hi);
368 __ And(TMP, out_lo, AT);
369 __ Sll(TMP, TMP, 2);
370 __ Srl(out_lo, out_lo, 2);
371 __ And(out_lo, out_lo, AT);
372 __ Or(out_lo, TMP, out_lo);
373 __ LoadConst32(AT, 0x55555555);
374 __ And(TMP, out_hi, AT);
375 __ Sll(TMP, TMP, 1);
376 __ Srl(out_hi, out_hi, 1);
377 __ And(out_hi, out_hi, AT);
378 __ Or(out_hi, TMP, out_hi);
379 __ And(TMP, out_lo, AT);
380 __ Sll(TMP, TMP, 1);
381 __ Srl(out_lo, out_lo, 1);
382 __ And(out_lo, out_lo, AT);
383 __ Or(out_lo, TMP, out_lo);
384 }
385 }
Chris Larsen3f8bf652015-10-28 10:08:56 -0700386 }
387}
388
389// int java.lang.Integer.reverseBytes(int)
390void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
391 CreateIntToIntLocations(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800395 GenReverse(invoke->GetLocations(),
396 Primitive::kPrimInt,
397 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
398 codegen_->GetInstructionSetFeatures().IsR6(),
399 false,
400 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700401}
402
403// long java.lang.Long.reverseBytes(long)
404void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
405 CreateIntToIntLocations(arena_, invoke);
406}
407
408void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800409 GenReverse(invoke->GetLocations(),
410 Primitive::kPrimLong,
411 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
412 codegen_->GetInstructionSetFeatures().IsR6(),
413 false,
414 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700415}
416
417// short java.lang.Short.reverseBytes(short)
418void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
419 CreateIntToIntLocations(arena_, invoke);
420}
421
422void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
Chris Larsen70014c82015-11-18 12:26:08 -0800423 GenReverse(invoke->GetLocations(),
424 Primitive::kPrimShort,
425 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
426 codegen_->GetInstructionSetFeatures().IsR6(),
427 false,
428 GetAssembler());
429}
430
Chris Larsene3845472015-11-18 12:27:15 -0800431static void GenNumberOfLeadingZeroes(LocationSummary* locations,
432 bool is64bit,
433 bool isR6,
434 MipsAssembler* assembler) {
435 Register out = locations->Out().AsRegister<Register>();
436 if (is64bit) {
437 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
438 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
439
440 if (isR6) {
441 __ ClzR6(AT, in_hi);
442 __ ClzR6(TMP, in_lo);
443 __ Seleqz(TMP, TMP, in_hi);
444 } else {
445 __ ClzR2(AT, in_hi);
446 __ ClzR2(TMP, in_lo);
447 __ Movn(TMP, ZERO, in_hi);
448 }
449 __ Addu(out, AT, TMP);
450 } else {
451 Register in = locations->InAt(0).AsRegister<Register>();
452
453 if (isR6) {
454 __ ClzR6(out, in);
455 } else {
456 __ ClzR2(out, in);
457 }
458 }
459}
460
461// int java.lang.Integer.numberOfLeadingZeros(int i)
462void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
463 CreateIntToIntLocations(arena_, invoke);
464}
465
466void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
467 GenNumberOfLeadingZeroes(invoke->GetLocations(),
468 false,
469 codegen_->GetInstructionSetFeatures().IsR6(),
470 GetAssembler());
471}
472
473// int java.lang.Long.numberOfLeadingZeros(long i)
474void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
475 CreateIntToIntLocations(arena_, invoke);
476}
477
478void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
479 GenNumberOfLeadingZeroes(invoke->GetLocations(),
480 true,
481 codegen_->GetInstructionSetFeatures().IsR6(),
482 GetAssembler());
483}
484
Chris Larsen86829602015-11-18 12:27:52 -0800485static void GenNumberOfTrailingZeroes(LocationSummary* locations,
486 bool is64bit,
487 bool isR6,
488 bool isR2OrNewer,
489 MipsAssembler* assembler) {
490 Register out = locations->Out().AsRegister<Register>();
491 Register in_lo;
492 Register in;
493
494 if (is64bit) {
495 MipsLabel done;
496 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
497
498 in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
499
500 // If in_lo is zero then count the number of trailing zeroes in in_hi;
501 // otherwise count the number of trailing zeroes in in_lo.
502 // AT = in_lo ? in_lo : in_hi;
503 if (isR6) {
504 __ Seleqz(out, in_hi, in_lo);
505 __ Selnez(TMP, in_lo, in_lo);
506 __ Or(out, out, TMP);
507 } else {
508 __ Movz(out, in_hi, in_lo);
509 __ Movn(out, in_lo, in_lo);
510 }
511
512 in = out;
513 } else {
514 in = locations->InAt(0).AsRegister<Register>();
515 // Give in_lo a dummy value to keep the compiler from complaining.
516 // Since we only get here in the 32-bit case, this value will never
517 // be used.
518 in_lo = in;
519 }
520
521 // We don't have an instruction to count the number of trailing zeroes.
522 // Start by flipping the bits end-for-end so we can count the number of
523 // leading zeroes instead.
524 if (isR2OrNewer) {
525 __ Rotr(out, in, 16);
526 __ Wsbh(out, out);
527 } else {
528 // MIPS32r1
529 // __ Rotr(out, in, 16);
530 __ Sll(TMP, in, 16);
531 __ Srl(out, in, 16);
532 __ Or(out, out, TMP);
533 // __ Wsbh(out, out);
534 __ LoadConst32(AT, 0x00FF00FF);
535 __ And(TMP, out, AT);
536 __ Sll(TMP, TMP, 8);
537 __ Srl(out, out, 8);
538 __ And(out, out, AT);
539 __ Or(out, out, TMP);
540 }
541
542 if (isR6) {
543 __ Bitswap(out, out);
544 __ ClzR6(out, out);
545 } else {
546 __ LoadConst32(AT, 0x0F0F0F0F);
547 __ And(TMP, out, AT);
548 __ Sll(TMP, TMP, 4);
549 __ Srl(out, out, 4);
550 __ And(out, out, AT);
551 __ Or(out, TMP, out);
552 __ LoadConst32(AT, 0x33333333);
553 __ And(TMP, out, AT);
554 __ Sll(TMP, TMP, 2);
555 __ Srl(out, out, 2);
556 __ And(out, out, AT);
557 __ Or(out, TMP, out);
558 __ LoadConst32(AT, 0x55555555);
559 __ And(TMP, out, AT);
560 __ Sll(TMP, TMP, 1);
561 __ Srl(out, out, 1);
562 __ And(out, out, AT);
563 __ Or(out, TMP, out);
564 __ ClzR2(out, out);
565 }
566
567 if (is64bit) {
568 // If in_lo is zero, then we counted the number of trailing zeroes in in_hi so we must add the
569 // number of trailing zeroes in in_lo (32) to get the correct final count
570 __ LoadConst32(TMP, 32);
571 if (isR6) {
572 __ Seleqz(TMP, TMP, in_lo);
573 } else {
574 __ Movn(TMP, ZERO, in_lo);
575 }
576 __ Addu(out, out, TMP);
577 }
578}
579
580// int java.lang.Integer.numberOfTrailingZeros(int i)
581void IntrinsicLocationsBuilderMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
582 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
583}
584
585void IntrinsicCodeGeneratorMIPS::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
586 GenNumberOfTrailingZeroes(invoke->GetLocations(),
587 false,
588 codegen_->GetInstructionSetFeatures().IsR6(),
589 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
590 GetAssembler());
591}
592
593// int java.lang.Long.numberOfTrailingZeros(long i)
594void IntrinsicLocationsBuilderMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
595 CreateIntToIntLocations(arena_, invoke, Location::kOutputOverlap);
596}
597
598void IntrinsicCodeGeneratorMIPS::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
599 GenNumberOfTrailingZeroes(invoke->GetLocations(),
600 true,
601 codegen_->GetInstructionSetFeatures().IsR6(),
602 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
603 GetAssembler());
604}
605
Chris Larsen70014c82015-11-18 12:26:08 -0800606// int java.lang.Integer.reverse(int)
607void IntrinsicLocationsBuilderMIPS::VisitIntegerReverse(HInvoke* invoke) {
608 CreateIntToIntLocations(arena_, invoke);
609}
610
611void IntrinsicCodeGeneratorMIPS::VisitIntegerReverse(HInvoke* invoke) {
612 GenReverse(invoke->GetLocations(),
613 Primitive::kPrimInt,
614 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
615 codegen_->GetInstructionSetFeatures().IsR6(),
616 true,
617 GetAssembler());
618}
619
620// long java.lang.Long.reverse(long)
621void IntrinsicLocationsBuilderMIPS::VisitLongReverse(HInvoke* invoke) {
622 CreateIntToIntLocations(arena_, invoke);
623}
624
625void IntrinsicCodeGeneratorMIPS::VisitLongReverse(HInvoke* invoke) {
626 GenReverse(invoke->GetLocations(),
627 Primitive::kPrimLong,
628 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2(),
629 codegen_->GetInstructionSetFeatures().IsR6(),
630 true,
631 GetAssembler());
Chris Larsen3f8bf652015-10-28 10:08:56 -0700632}
633
Chris Larsen16ba2b42015-11-02 10:58:31 -0800634// boolean java.lang.String.equals(Object anObject)
635void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
636 LocationSummary* locations = new (arena_) LocationSummary(invoke,
637 LocationSummary::kNoCall,
638 kIntrinsified);
639 locations->SetInAt(0, Location::RequiresRegister());
640 locations->SetInAt(1, Location::RequiresRegister());
641 locations->SetOut(Location::RequiresRegister());
642
643 // Temporary registers to store lengths of strings and for calculations.
644 locations->AddTemp(Location::RequiresRegister());
645 locations->AddTemp(Location::RequiresRegister());
646 locations->AddTemp(Location::RequiresRegister());
647}
648
649void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
650 MipsAssembler* assembler = GetAssembler();
651 LocationSummary* locations = invoke->GetLocations();
652
653 Register str = locations->InAt(0).AsRegister<Register>();
654 Register arg = locations->InAt(1).AsRegister<Register>();
655 Register out = locations->Out().AsRegister<Register>();
656
657 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
658 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
659 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
660
661 MipsLabel loop;
662 MipsLabel end;
663 MipsLabel return_true;
664 MipsLabel return_false;
665
666 // Get offsets of count, value, and class fields within a string object.
667 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
668 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
669 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
670
671 // Note that the null check must have been done earlier.
672 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
673
674 // If the register containing the pointer to "this", and the register
675 // containing the pointer to "anObject" are the same register then
676 // "this", and "anObject" are the same object and we can
677 // short-circuit the logic to a true result.
678 if (str == arg) {
679 __ LoadConst32(out, 1);
680 return;
681 }
682
683 // Check if input is null, return false if it is.
684 __ Beqz(arg, &return_false);
685
686 // Reference equality check, return true if same reference.
687 __ Beq(str, arg, &return_true);
688
689 // Instanceof check for the argument by comparing class fields.
690 // All string objects must have the same type since String cannot be subclassed.
691 // Receiver must be a string object, so its class field is equal to all strings' class fields.
692 // If the argument is a string object, its class field must be equal to receiver's class field.
693 __ Lw(temp1, str, class_offset);
694 __ Lw(temp2, arg, class_offset);
695 __ Bne(temp1, temp2, &return_false);
696
697 // Load lengths of this and argument strings.
698 __ Lw(temp1, str, count_offset);
699 __ Lw(temp2, arg, count_offset);
700 // Check if lengths are equal, return false if they're not.
701 __ Bne(temp1, temp2, &return_false);
702 // Return true if both strings are empty.
703 __ Beqz(temp1, &return_true);
704
705 // Don't overwrite input registers
706 __ Move(TMP, str);
707 __ Move(temp3, arg);
708
709 // Assertions that must hold in order to compare strings 2 characters at a time.
710 DCHECK_ALIGNED(value_offset, 4);
711 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
712
713 // Loop to compare strings 2 characters at a time starting at the beginning of the string.
714 // Ok to do this because strings are zero-padded.
715 __ Bind(&loop);
716 __ Lw(out, TMP, value_offset);
717 __ Lw(temp2, temp3, value_offset);
718 __ Bne(out, temp2, &return_false);
719 __ Addiu(TMP, TMP, 4);
720 __ Addiu(temp3, temp3, 4);
721 __ Addiu(temp1, temp1, -2);
722 __ Bgtz(temp1, &loop);
723
724 // Return true and exit the function.
725 // If loop does not result in returning false, we return true.
726 __ Bind(&return_true);
727 __ LoadConst32(out, 1);
728 __ B(&end);
729
730 // Return false and exit the function.
731 __ Bind(&return_false);
732 __ LoadConst32(out, 0);
733 __ Bind(&end);
734}
735
Chris Larsen701566a2015-10-27 15:29:13 -0700736// Unimplemented intrinsics.
737
738#define UNIMPLEMENTED_INTRINSIC(Name) \
739void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
740} \
741void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
742}
743
Chris Larsen701566a2015-10-27 15:29:13 -0700744UNIMPLEMENTED_INTRINSIC(MathAbsDouble)
745UNIMPLEMENTED_INTRINSIC(MathAbsFloat)
746UNIMPLEMENTED_INTRINSIC(MathAbsInt)
747UNIMPLEMENTED_INTRINSIC(MathAbsLong)
748UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
749UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
750UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
751UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
752UNIMPLEMENTED_INTRINSIC(MathMinIntInt)
753UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
754UNIMPLEMENTED_INTRINSIC(MathMaxIntInt)
755UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
756UNIMPLEMENTED_INTRINSIC(MathSqrt)
757UNIMPLEMENTED_INTRINSIC(MathCeil)
758UNIMPLEMENTED_INTRINSIC(MathFloor)
759UNIMPLEMENTED_INTRINSIC(MathRint)
760UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
761UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
762UNIMPLEMENTED_INTRINSIC(MemoryPeekByte)
763UNIMPLEMENTED_INTRINSIC(MemoryPeekIntNative)
764UNIMPLEMENTED_INTRINSIC(MemoryPeekLongNative)
765UNIMPLEMENTED_INTRINSIC(MemoryPeekShortNative)
766UNIMPLEMENTED_INTRINSIC(MemoryPokeByte)
767UNIMPLEMENTED_INTRINSIC(MemoryPokeIntNative)
768UNIMPLEMENTED_INTRINSIC(MemoryPokeLongNative)
769UNIMPLEMENTED_INTRINSIC(MemoryPokeShortNative)
770UNIMPLEMENTED_INTRINSIC(ThreadCurrentThread)
771UNIMPLEMENTED_INTRINSIC(UnsafeGet)
772UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile)
773UNIMPLEMENTED_INTRINSIC(UnsafeGetLong)
774UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile)
775UNIMPLEMENTED_INTRINSIC(UnsafeGetObject)
776UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile)
777UNIMPLEMENTED_INTRINSIC(UnsafePut)
778UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered)
779UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile)
780UNIMPLEMENTED_INTRINSIC(UnsafePutObject)
781UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered)
782UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile)
783UNIMPLEMENTED_INTRINSIC(UnsafePutLong)
784UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered)
785UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
786UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
787UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
788UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
789UNIMPLEMENTED_INTRINSIC(StringCharAt)
790UNIMPLEMENTED_INTRINSIC(StringCompareTo)
Chris Larsen701566a2015-10-27 15:29:13 -0700791UNIMPLEMENTED_INTRINSIC(StringIndexOf)
792UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
793UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
794UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
795UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
796UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
797UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Chris Larsen701566a2015-10-27 15:29:13 -0700798UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
799UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
Chris Larsen701566a2015-10-27 15:29:13 -0700800
801UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
802UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
803UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
804UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
805
806#undef UNIMPLEMENTED_INTRINSIC
807
808#undef __
809
810} // namespace mips
811} // namespace art