blob: 326844526e85bee5579aacb4789f2319e20f19fe [file] [log] [blame]
Chris Larsen701566a2015-10-27 15:29:13 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_mips.h"
18
19#include "arch/mips/instruction_set_features_mips.h"
20#include "art_method.h"
21#include "code_generator_mips.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/string.h"
26#include "thread.h"
27#include "utils/mips/assembler_mips.h"
28#include "utils/mips/constants_mips.h"
29
30namespace art {
31
32namespace mips {
33
34IntrinsicLocationsBuilderMIPS::IntrinsicLocationsBuilderMIPS(CodeGeneratorMIPS* codegen)
35 : arena_(codegen->GetGraph()->GetArena()) {
36}
37
38MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() {
39 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler());
40}
41
42ArenaAllocator* IntrinsicCodeGeneratorMIPS::GetAllocator() {
43 return codegen_->GetGraph()->GetArena();
44}
45
46#define __ codegen->GetAssembler()->
47
48static void MoveFromReturnRegister(Location trg,
49 Primitive::Type type,
50 CodeGeneratorMIPS* codegen) {
51 if (!trg.IsValid()) {
52 DCHECK_EQ(type, Primitive::kPrimVoid);
53 return;
54 }
55
56 DCHECK_NE(type, Primitive::kPrimVoid);
57
58 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
59 Register trg_reg = trg.AsRegister<Register>();
60 if (trg_reg != V0) {
61 __ Move(V0, trg_reg);
62 }
63 } else {
64 FRegister trg_reg = trg.AsFpuRegister<FRegister>();
65 if (trg_reg != F0) {
66 if (type == Primitive::kPrimFloat) {
67 __ MovS(F0, trg_reg);
68 } else {
69 __ MovD(F0, trg_reg);
70 }
71 }
72 }
73}
74
75static void MoveArguments(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
76 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
77 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
78}
79
80// Slow-path for fallback (calling the managed code to handle the
81// intrinsic) in an intrinsified call. This will copy the arguments
82// into the positions for a regular call.
83//
84// Note: The actual parameters are required to be in the locations
85// given by the invoke's location summary. If an intrinsic
86// modifies those locations before a slowpath call, they must be
87// restored!
88class IntrinsicSlowPathMIPS : public SlowPathCodeMIPS {
89 public:
90 explicit IntrinsicSlowPathMIPS(HInvoke* invoke) : invoke_(invoke) { }
91
92 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
93 CodeGeneratorMIPS* codegen = down_cast<CodeGeneratorMIPS*>(codegen_in);
94
95 __ Bind(GetEntryLabel());
96
97 SaveLiveRegisters(codegen, invoke_->GetLocations());
98
99 MoveArguments(invoke_, codegen);
100
101 if (invoke_->IsInvokeStaticOrDirect()) {
102 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
103 Location::RegisterLocation(A0));
104 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
105 } else {
106 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
107 UNREACHABLE();
108 }
109
110 // Copy the result back to the expected output.
111 Location out = invoke_->GetLocations()->Out();
112 if (out.IsValid()) {
113 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
114 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
115 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
116 }
117
118 RestoreLiveRegisters(codegen, invoke_->GetLocations());
119 __ B(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathMIPS"; }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathMIPS);
129};
130
131#undef __
132
133bool IntrinsicLocationsBuilderMIPS::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
Chris Larsen3f8bf652015-10-28 10:08:56 -0700141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
150 FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
151
152 if (is64bit) {
153 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
154 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
155
156 __ Mfc1(out_lo, in);
157 __ Mfhc1(out_hi, in);
158 } else {
159 Register out = locations->Out().AsRegister<Register>();
160
161 __ Mfc1(out, in);
162 }
163}
164
165// long java.lang.Double.doubleToRawLongBits(double)
166void IntrinsicLocationsBuilderMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
167 CreateFPToIntLocations(arena_, invoke);
168}
169
170void IntrinsicCodeGeneratorMIPS::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
171 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
172}
173
174// int java.lang.Float.floatToRawIntBits(float)
175void IntrinsicLocationsBuilderMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
176 CreateFPToIntLocations(arena_, invoke);
177}
178
179void IntrinsicCodeGeneratorMIPS::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
180 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
181}
182
183static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
184 LocationSummary* locations = new (arena) LocationSummary(invoke,
185 LocationSummary::kNoCall,
186 kIntrinsified);
187 locations->SetInAt(0, Location::RequiresRegister());
188 locations->SetOut(Location::RequiresFpuRegister());
189}
190
191static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) {
192 FRegister out = locations->Out().AsFpuRegister<FRegister>();
193
194 if (is64bit) {
195 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
196 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
197
198 __ Mtc1(in_lo, out);
199 __ Mthc1(in_hi, out);
200 } else {
201 Register in = locations->InAt(0).AsRegister<Register>();
202
203 __ Mtc1(in, out);
204 }
205}
206
207// double java.lang.Double.longBitsToDouble(long)
208void IntrinsicLocationsBuilderMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
209 CreateIntToFPLocations(arena_, invoke);
210}
211
212void IntrinsicCodeGeneratorMIPS::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
213 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
214}
215
216// float java.lang.Float.intBitsToFloat(int)
217void IntrinsicLocationsBuilderMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
218 CreateIntToFPLocations(arena_, invoke);
219}
220
221void IntrinsicCodeGeneratorMIPS::VisitFloatIntBitsToFloat(HInvoke* invoke) {
222 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226 LocationSummary* locations = new (arena) LocationSummary(invoke,
227 LocationSummary::kNoCall,
228 kIntrinsified);
229 locations->SetInAt(0, Location::RequiresRegister());
230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234 Primitive::Type type,
235 MipsAssembler* assembler,
236 bool isR2OrNewer) {
237 DCHECK(type == Primitive::kPrimShort ||
238 type == Primitive::kPrimInt ||
239 type == Primitive::kPrimLong);
240
241 if (type == Primitive::kPrimShort) {
242 Register in = locations->InAt(0).AsRegister<Register>();
243 Register out = locations->Out().AsRegister<Register>();
244
245 if (isR2OrNewer) {
246 __ Wsbh(out, in);
247 __ Seh(out, out);
248 } else {
249 __ Sll(TMP, in, 24);
250 __ Sra(TMP, TMP, 16);
251 __ Sll(out, in, 16);
252 __ Srl(out, out, 24);
253 __ Or(out, out, TMP);
254 }
255 } else if (type == Primitive::kPrimInt) {
256 Register in = locations->InAt(0).AsRegister<Register>();
257 Register out = locations->Out().AsRegister<Register>();
258
259 if (isR2OrNewer) {
260 __ Rotr(out, in, 16);
261 __ Wsbh(out, out);
262 } else {
263 // MIPS32r1
264 // __ Rotr(out, in, 16);
265 __ Sll(TMP, in, 16);
266 __ Srl(out, in, 16);
267 __ Or(out, out, TMP);
268 // __ Wsbh(out, out);
269 __ LoadConst32(AT, 0x00FF00FF);
270 __ And(TMP, out, AT);
271 __ Sll(TMP, TMP, 8);
272 __ Srl(out, out, 8);
273 __ And(out, out, AT);
274 __ Or(out, out, TMP);
275 }
276 } else if (type == Primitive::kPrimLong) {
277 Register in_lo = locations->InAt(0).AsRegisterPairLow<Register>();
278 Register in_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
279 Register out_lo = locations->Out().AsRegisterPairLow<Register>();
280 Register out_hi = locations->Out().AsRegisterPairHigh<Register>();
281
282 if (isR2OrNewer) {
283 __ Rotr(AT, in_hi, 16);
284 __ Rotr(TMP, in_lo, 16);
285 __ Wsbh(out_lo, AT);
286 __ Wsbh(out_hi, TMP);
287 } else {
288 // When calling CreateIntToIntLocations() we promised that the
289 // use of the out_lo/out_hi wouldn't overlap with the use of
290 // in_lo/in_hi. Be very careful not to write to out_lo/out_hi
291 // until we're completely done reading from in_lo/in_hi.
292 // __ Rotr(TMP, in_lo, 16);
293 __ Sll(TMP, in_lo, 16);
294 __ Srl(AT, in_lo, 16);
295 __ Or(TMP, TMP, AT); // Hold in TMP until it's safe
296 // to write to out_hi.
297 // __ Rotr(out_lo, in_hi, 16);
298 __ Sll(AT, in_hi, 16);
299 __ Srl(out_lo, in_hi, 16); // Here we are finally done reading
300 // from in_lo/in_hi so it's okay to
301 // write to out_lo/out_hi.
302 __ Or(out_lo, out_lo, AT);
303 // __ Wsbh(out_hi, out_hi);
304 __ LoadConst32(AT, 0x00FF00FF);
305 __ And(out_hi, TMP, AT);
306 __ Sll(out_hi, out_hi, 8);
307 __ Srl(TMP, TMP, 8);
308 __ And(TMP, TMP, AT);
309 __ Or(out_hi, out_hi, TMP);
310 // __ Wsbh(out_lo, out_lo);
311 __ And(TMP, out_lo, AT); // AT already holds the correct mask value
312 __ Sll(TMP, TMP, 8);
313 __ Srl(out_lo, out_lo, 8);
314 __ And(out_lo, out_lo, AT);
315 __ Or(out_lo, out_lo, TMP);
316 }
317 }
318}
319
320// int java.lang.Integer.reverseBytes(int)
321void IntrinsicLocationsBuilderMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
322 CreateIntToIntLocations(arena_, invoke);
323}
324
325void IntrinsicCodeGeneratorMIPS::VisitIntegerReverseBytes(HInvoke* invoke) {
326 GenReverseBytes(invoke->GetLocations(),
327 Primitive::kPrimInt,
328 GetAssembler(),
329 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2());
330}
331
332// long java.lang.Long.reverseBytes(long)
333void IntrinsicLocationsBuilderMIPS::VisitLongReverseBytes(HInvoke* invoke) {
334 CreateIntToIntLocations(arena_, invoke);
335}
336
337void IntrinsicCodeGeneratorMIPS::VisitLongReverseBytes(HInvoke* invoke) {
338 GenReverseBytes(invoke->GetLocations(),
339 Primitive::kPrimLong,
340 GetAssembler(),
341 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2());
342}
343
344// short java.lang.Short.reverseBytes(short)
345void IntrinsicLocationsBuilderMIPS::VisitShortReverseBytes(HInvoke* invoke) {
346 CreateIntToIntLocations(arena_, invoke);
347}
348
349void IntrinsicCodeGeneratorMIPS::VisitShortReverseBytes(HInvoke* invoke) {
350 GenReverseBytes(invoke->GetLocations(),
351 Primitive::kPrimShort,
352 GetAssembler(),
353 codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2());
354}
355
Chris Larsen16ba2b42015-11-02 10:58:31 -0800356// boolean java.lang.String.equals(Object anObject)
357void IntrinsicLocationsBuilderMIPS::VisitStringEquals(HInvoke* invoke) {
358 LocationSummary* locations = new (arena_) LocationSummary(invoke,
359 LocationSummary::kNoCall,
360 kIntrinsified);
361 locations->SetInAt(0, Location::RequiresRegister());
362 locations->SetInAt(1, Location::RequiresRegister());
363 locations->SetOut(Location::RequiresRegister());
364
365 // Temporary registers to store lengths of strings and for calculations.
366 locations->AddTemp(Location::RequiresRegister());
367 locations->AddTemp(Location::RequiresRegister());
368 locations->AddTemp(Location::RequiresRegister());
369}
370
371void IntrinsicCodeGeneratorMIPS::VisitStringEquals(HInvoke* invoke) {
372 MipsAssembler* assembler = GetAssembler();
373 LocationSummary* locations = invoke->GetLocations();
374
375 Register str = locations->InAt(0).AsRegister<Register>();
376 Register arg = locations->InAt(1).AsRegister<Register>();
377 Register out = locations->Out().AsRegister<Register>();
378
379 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
380 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
381 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
382
383 MipsLabel loop;
384 MipsLabel end;
385 MipsLabel return_true;
386 MipsLabel return_false;
387
388 // Get offsets of count, value, and class fields within a string object.
389 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
390 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
391 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
392
393 // Note that the null check must have been done earlier.
394 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
395
396 // If the register containing the pointer to "this", and the register
397 // containing the pointer to "anObject" are the same register then
398 // "this", and "anObject" are the same object and we can
399 // short-circuit the logic to a true result.
400 if (str == arg) {
401 __ LoadConst32(out, 1);
402 return;
403 }
404
405 // Check if input is null, return false if it is.
406 __ Beqz(arg, &return_false);
407
408 // Reference equality check, return true if same reference.
409 __ Beq(str, arg, &return_true);
410
411 // Instanceof check for the argument by comparing class fields.
412 // All string objects must have the same type since String cannot be subclassed.
413 // Receiver must be a string object, so its class field is equal to all strings' class fields.
414 // If the argument is a string object, its class field must be equal to receiver's class field.
415 __ Lw(temp1, str, class_offset);
416 __ Lw(temp2, arg, class_offset);
417 __ Bne(temp1, temp2, &return_false);
418
419 // Load lengths of this and argument strings.
420 __ Lw(temp1, str, count_offset);
421 __ Lw(temp2, arg, count_offset);
422 // Check if lengths are equal, return false if they're not.
423 __ Bne(temp1, temp2, &return_false);
424 // Return true if both strings are empty.
425 __ Beqz(temp1, &return_true);
426
427 // Don't overwrite input registers
428 __ Move(TMP, str);
429 __ Move(temp3, arg);
430
431 // Assertions that must hold in order to compare strings 2 characters at a time.
432 DCHECK_ALIGNED(value_offset, 4);
433 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
434
435 // Loop to compare strings 2 characters at a time starting at the beginning of the string.
436 // Ok to do this because strings are zero-padded.
437 __ Bind(&loop);
438 __ Lw(out, TMP, value_offset);
439 __ Lw(temp2, temp3, value_offset);
440 __ Bne(out, temp2, &return_false);
441 __ Addiu(TMP, TMP, 4);
442 __ Addiu(temp3, temp3, 4);
443 __ Addiu(temp1, temp1, -2);
444 __ Bgtz(temp1, &loop);
445
446 // Return true and exit the function.
447 // If loop does not result in returning false, we return true.
448 __ Bind(&return_true);
449 __ LoadConst32(out, 1);
450 __ B(&end);
451
452 // Return false and exit the function.
453 __ Bind(&return_false);
454 __ LoadConst32(out, 0);
455 __ Bind(&end);
456}
457
Chris Larsen701566a2015-10-27 15:29:13 -0700458// Unimplemented intrinsics.
459
460#define UNIMPLEMENTED_INTRINSIC(Name) \
461void IntrinsicLocationsBuilderMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
462} \
463void IntrinsicCodeGeneratorMIPS::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
464}
465
466UNIMPLEMENTED_INTRINSIC(IntegerReverse)
467UNIMPLEMENTED_INTRINSIC(LongReverse)
Chris Larsen701566a2015-10-27 15:29:13 -0700468UNIMPLEMENTED_INTRINSIC(LongNumberOfLeadingZeros)
469UNIMPLEMENTED_INTRINSIC(IntegerNumberOfLeadingZeros)
Chris Larsen701566a2015-10-27 15:29:13 -0700470UNIMPLEMENTED_INTRINSIC(MathAbsDouble)
471UNIMPLEMENTED_INTRINSIC(MathAbsFloat)
472UNIMPLEMENTED_INTRINSIC(MathAbsInt)
473UNIMPLEMENTED_INTRINSIC(MathAbsLong)
474UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
475UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
476UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
477UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
478UNIMPLEMENTED_INTRINSIC(MathMinIntInt)
479UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
480UNIMPLEMENTED_INTRINSIC(MathMaxIntInt)
481UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
482UNIMPLEMENTED_INTRINSIC(MathSqrt)
483UNIMPLEMENTED_INTRINSIC(MathCeil)
484UNIMPLEMENTED_INTRINSIC(MathFloor)
485UNIMPLEMENTED_INTRINSIC(MathRint)
486UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
487UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
488UNIMPLEMENTED_INTRINSIC(MemoryPeekByte)
489UNIMPLEMENTED_INTRINSIC(MemoryPeekIntNative)
490UNIMPLEMENTED_INTRINSIC(MemoryPeekLongNative)
491UNIMPLEMENTED_INTRINSIC(MemoryPeekShortNative)
492UNIMPLEMENTED_INTRINSIC(MemoryPokeByte)
493UNIMPLEMENTED_INTRINSIC(MemoryPokeIntNative)
494UNIMPLEMENTED_INTRINSIC(MemoryPokeLongNative)
495UNIMPLEMENTED_INTRINSIC(MemoryPokeShortNative)
496UNIMPLEMENTED_INTRINSIC(ThreadCurrentThread)
497UNIMPLEMENTED_INTRINSIC(UnsafeGet)
498UNIMPLEMENTED_INTRINSIC(UnsafeGetVolatile)
499UNIMPLEMENTED_INTRINSIC(UnsafeGetLong)
500UNIMPLEMENTED_INTRINSIC(UnsafeGetLongVolatile)
501UNIMPLEMENTED_INTRINSIC(UnsafeGetObject)
502UNIMPLEMENTED_INTRINSIC(UnsafeGetObjectVolatile)
503UNIMPLEMENTED_INTRINSIC(UnsafePut)
504UNIMPLEMENTED_INTRINSIC(UnsafePutOrdered)
505UNIMPLEMENTED_INTRINSIC(UnsafePutVolatile)
506UNIMPLEMENTED_INTRINSIC(UnsafePutObject)
507UNIMPLEMENTED_INTRINSIC(UnsafePutObjectOrdered)
508UNIMPLEMENTED_INTRINSIC(UnsafePutObjectVolatile)
509UNIMPLEMENTED_INTRINSIC(UnsafePutLong)
510UNIMPLEMENTED_INTRINSIC(UnsafePutLongOrdered)
511UNIMPLEMENTED_INTRINSIC(UnsafePutLongVolatile)
512UNIMPLEMENTED_INTRINSIC(UnsafeCASInt)
513UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)
514UNIMPLEMENTED_INTRINSIC(UnsafeCASObject)
515UNIMPLEMENTED_INTRINSIC(StringCharAt)
516UNIMPLEMENTED_INTRINSIC(StringCompareTo)
Chris Larsen701566a2015-10-27 15:29:13 -0700517UNIMPLEMENTED_INTRINSIC(StringIndexOf)
518UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
519UNIMPLEMENTED_INTRINSIC(StringNewStringFromBytes)
520UNIMPLEMENTED_INTRINSIC(StringNewStringFromChars)
521UNIMPLEMENTED_INTRINSIC(StringNewStringFromString)
522UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
523UNIMPLEMENTED_INTRINSIC(LongRotateRight)
524UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
525UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
526UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
527UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros)
528
529UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
530UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
531UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
532UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
533
534#undef UNIMPLEMENTED_INTRINSIC
535
536#undef __
537
538} // namespace mips
539} // namespace art