blob: 1a895679913b190d086c8e6674b08adae143b019 [file] [log] [blame]
xueliang.zhongf7caf682017-03-01 16:07:02 +00001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "arch/arm/instruction_set_features_arm.h"
18#include "code_generator_utils.h"
19#include "common_arm.h"
20#include "mirror/array-inl.h"
21#include "scheduler_arm.h"
22
23namespace art {
24namespace arm {
25
26using helpers::Int32ConstantFrom;
27using helpers::Uint64ConstantFrom;
28
29void SchedulingLatencyVisitorARM::HandleBinaryOperationLantencies(HBinaryOperation* instr) {
30 switch (instr->GetResultType()) {
31 case Primitive::kPrimLong:
32 // HAdd and HSub long operations translate to ADDS+ADC or SUBS+SBC pairs,
33 // so a bubble (kArmNopLatency) is added to represent the internal carry flag
34 // dependency inside these pairs.
35 last_visited_internal_latency_ = kArmIntegerOpLatency + kArmNopLatency;
36 last_visited_latency_ = kArmIntegerOpLatency;
37 break;
38 case Primitive::kPrimFloat:
39 case Primitive::kPrimDouble:
40 last_visited_latency_ = kArmFloatingPointOpLatency;
41 break;
42 default:
43 last_visited_latency_ = kArmIntegerOpLatency;
44 break;
45 }
46}
47
48void SchedulingLatencyVisitorARM::VisitAdd(HAdd* instr) {
49 HandleBinaryOperationLantencies(instr);
50}
51
52void SchedulingLatencyVisitorARM::VisitSub(HSub* instr) {
53 HandleBinaryOperationLantencies(instr);
54}
55
56void SchedulingLatencyVisitorARM::VisitMul(HMul* instr) {
57 switch (instr->GetResultType()) {
58 case Primitive::kPrimLong:
59 last_visited_internal_latency_ = 3 * kArmMulIntegerLatency;
60 last_visited_latency_ = kArmIntegerOpLatency;
61 break;
62 case Primitive::kPrimFloat:
63 case Primitive::kPrimDouble:
64 last_visited_latency_ = kArmMulFloatingPointLatency;
65 break;
66 default:
67 last_visited_latency_ = kArmMulIntegerLatency;
68 break;
69 }
70}
71
72void SchedulingLatencyVisitorARM::HandleBitwiseOperationLantencies(HBinaryOperation* instr) {
73 switch (instr->GetResultType()) {
74 case Primitive::kPrimLong:
75 last_visited_internal_latency_ = kArmIntegerOpLatency;
76 last_visited_latency_ = kArmIntegerOpLatency;
77 break;
78 case Primitive::kPrimFloat:
79 case Primitive::kPrimDouble:
80 last_visited_latency_ = kArmFloatingPointOpLatency;
81 break;
82 default:
83 last_visited_latency_ = kArmIntegerOpLatency;
84 break;
85 }
86}
87
88void SchedulingLatencyVisitorARM::VisitAnd(HAnd* instr) {
89 HandleBitwiseOperationLantencies(instr);
90}
91
92void SchedulingLatencyVisitorARM::VisitOr(HOr* instr) {
93 HandleBitwiseOperationLantencies(instr);
94}
95
96void SchedulingLatencyVisitorARM::VisitXor(HXor* instr) {
97 HandleBitwiseOperationLantencies(instr);
98}
99
100void SchedulingLatencyVisitorARM::VisitRor(HRor* instr) {
101 switch (instr->GetResultType()) {
102 case Primitive::kPrimInt:
103 last_visited_latency_ = kArmIntegerOpLatency;
104 break;
105 case Primitive::kPrimLong: {
106 // HandleLongRotate
107 HInstruction* rhs = instr->GetRight();
108 if (rhs->IsConstant()) {
109 uint64_t rot = Uint64ConstantFrom(rhs->AsConstant()) & kMaxLongShiftDistance;
110 if (rot != 0u) {
111 last_visited_internal_latency_ = 3 * kArmIntegerOpLatency;
112 last_visited_latency_ = kArmIntegerOpLatency;
113 } else {
114 last_visited_internal_latency_ = kArmIntegerOpLatency;
115 last_visited_latency_ = kArmIntegerOpLatency;
116 }
117 } else {
118 last_visited_internal_latency_ = 9 * kArmIntegerOpLatency + kArmBranchLatency;
119 last_visited_latency_ = kArmBranchLatency;
120 }
121 break;
122 }
123 default:
124 LOG(FATAL) << "Unexpected operation type " << instr->GetResultType();
125 UNREACHABLE();
126 }
127}
128
129void SchedulingLatencyVisitorARM::HandleShiftLatencies(HBinaryOperation* instr) {
130 Primitive::Type type = instr->GetResultType();
131 HInstruction* rhs = instr->GetRight();
132 switch (type) {
133 case Primitive::kPrimInt:
134 if (!rhs->IsConstant()) {
135 last_visited_internal_latency_ = kArmIntegerOpLatency;
136 }
137 last_visited_latency_ = kArmIntegerOpLatency;
138 break;
139 case Primitive::kPrimLong:
140 if (!rhs->IsConstant()) {
141 last_visited_internal_latency_ = 8 * kArmIntegerOpLatency;
142 } else {
143 uint32_t shift_value = Int32ConstantFrom(rhs->AsConstant()) & kMaxLongShiftDistance;
144 if (shift_value == 1 || shift_value >= 32) {
145 last_visited_internal_latency_ = kArmIntegerOpLatency;
146 } else {
147 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
148 }
149 }
150 last_visited_latency_ = kArmIntegerOpLatency;
151 break;
152 default:
153 LOG(FATAL) << "Unexpected operation type " << type;
154 UNREACHABLE();
155 }
156}
157
158void SchedulingLatencyVisitorARM::VisitShl(HShl* instr) {
159 HandleShiftLatencies(instr);
160}
161
162void SchedulingLatencyVisitorARM::VisitShr(HShr* instr) {
163 HandleShiftLatencies(instr);
164}
165
166void SchedulingLatencyVisitorARM::VisitUShr(HUShr* instr) {
167 HandleShiftLatencies(instr);
168}
169
170void SchedulingLatencyVisitorARM::VisitCondition(HCondition* instr) {
171 switch (instr->GetLeft()->GetType()) {
172 case Primitive::kPrimLong:
173 last_visited_internal_latency_ = 4 * kArmIntegerOpLatency;
174 break;
175 case Primitive::kPrimFloat:
176 case Primitive::kPrimDouble:
177 last_visited_internal_latency_ = 2 * kArmFloatingPointOpLatency;
178 break;
179 default:
180 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
181 break;
182 }
183 last_visited_latency_ = kArmIntegerOpLatency;
184}
185
186void SchedulingLatencyVisitorARM::VisitCompare(HCompare* instr) {
187 Primitive::Type type = instr->InputAt(0)->GetType();
188 switch (type) {
189 case Primitive::kPrimBoolean:
190 case Primitive::kPrimByte:
191 case Primitive::kPrimShort:
192 case Primitive::kPrimChar:
193 case Primitive::kPrimInt:
194 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
195 break;
196 case Primitive::kPrimLong:
197 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency + 3 * kArmBranchLatency;
198 break;
199 case Primitive::kPrimFloat:
200 case Primitive::kPrimDouble:
201 last_visited_internal_latency_ = kArmIntegerOpLatency + 2 * kArmFloatingPointOpLatency;
202 break;
203 default:
204 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
205 break;
206 }
207 last_visited_latency_ = kArmIntegerOpLatency;
208}
209
210void SchedulingLatencyVisitorARM::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
211 if (instruction->GetResultType() == Primitive::kPrimInt) {
212 last_visited_latency_ = kArmIntegerOpLatency;
213 } else {
214 last_visited_internal_latency_ = kArmIntegerOpLatency;
215 last_visited_latency_ = kArmIntegerOpLatency;
216 }
217}
218
219void SchedulingLatencyVisitorARM::HandleGenerateDataProcInstruction(bool internal_latency) {
220 if (internal_latency) {
221 last_visited_internal_latency_ += kArmIntegerOpLatency;
222 } else {
223 last_visited_latency_ = kArmDataProcWithShifterOpLatency;
224 }
225}
226
227void SchedulingLatencyVisitorARM::HandleGenerateDataProc(HDataProcWithShifterOp* instruction) {
228 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
229 if (kind == HInstruction::kAdd) {
230 last_visited_internal_latency_ = kArmIntegerOpLatency;
231 last_visited_latency_ = kArmIntegerOpLatency;
232 } else if (kind == HInstruction::kSub) {
233 last_visited_internal_latency_ = kArmIntegerOpLatency;
234 last_visited_latency_ = kArmIntegerOpLatency;
235 } else {
236 HandleGenerateDataProcInstruction(/* internal_latency */ true);
237 HandleGenerateDataProcInstruction();
238 }
239}
240
241void SchedulingLatencyVisitorARM::HandleGenerateLongDataProc(HDataProcWithShifterOp* instruction) {
242 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
243 DCHECK(HDataProcWithShifterOp::IsShiftOp(instruction->GetOpKind()));
244
245 const uint32_t shift_value = instruction->GetShiftAmount();
246 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
247
248 if (shift_value >= 32) {
249 // Different shift types actually generate similar code here,
250 // no need to differentiate shift types like the codegen pass does,
251 // which also avoids handling shift types from different ARM backends.
252 HandleGenerateDataProc(instruction);
253 } else {
254 DCHECK_GT(shift_value, 1U);
255 DCHECK_LT(shift_value, 32U);
256
257 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
258 HandleGenerateDataProcInstruction(/* internal_latency */ true);
259 HandleGenerateDataProcInstruction(/* internal_latency */ true);
260 HandleGenerateDataProcInstruction();
261 } else {
262 last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
263 HandleGenerateDataProc(instruction);
264 }
265 }
266}
267
268void SchedulingLatencyVisitorARM::VisitDataProcWithShifterOp(HDataProcWithShifterOp* instruction) {
269 const HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
270
271 if (instruction->GetType() == Primitive::kPrimInt) {
272 DCHECK(!HDataProcWithShifterOp::IsExtensionOp(op_kind));
273 HandleGenerateDataProcInstruction();
274 } else {
275 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
276 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
277 HandleGenerateDataProc(instruction);
278 } else {
279 HandleGenerateLongDataProc(instruction);
280 }
281 }
282}
283
284void SchedulingLatencyVisitorARM::VisitIntermediateAddress(HIntermediateAddress* ATTRIBUTE_UNUSED) {
285 // Although the code generated is a simple `add` instruction, we found through empirical results
286 // that spacing it from its use in memory accesses was beneficial.
287 last_visited_internal_latency_ = kArmNopLatency;
288 last_visited_latency_ = kArmIntegerOpLatency;
289}
290
291void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
292 last_visited_latency_ = kArmMulIntegerLatency;
293}
294
295void SchedulingLatencyVisitorARM::VisitArrayGet(HArrayGet* instruction) {
296 Primitive::Type type = instruction->GetType();
297 const bool maybe_compressed_char_at =
298 mirror::kUseStringCompression && instruction->IsStringCharAt();
299 HInstruction* array_instr = instruction->GetArray();
300 bool has_intermediate_address = array_instr->IsIntermediateAddress();
301 HInstruction* index = instruction->InputAt(1);
302
303 switch (type) {
304 case Primitive::kPrimBoolean:
305 case Primitive::kPrimByte:
306 case Primitive::kPrimShort:
307 case Primitive::kPrimChar:
308 case Primitive::kPrimInt: {
309 if (maybe_compressed_char_at) {
310 last_visited_internal_latency_ += kArmMemoryLoadLatency;
311 }
312 if (index->IsConstant()) {
313 if (maybe_compressed_char_at) {
314 last_visited_internal_latency_ +=
315 kArmIntegerOpLatency + kArmBranchLatency + kArmMemoryLoadLatency;
316 last_visited_latency_ = kArmBranchLatency;
317 } else {
318 last_visited_latency_ += kArmMemoryLoadLatency;
319 }
320 } else {
321 if (has_intermediate_address) {
322 } else {
323 last_visited_internal_latency_ += kArmIntegerOpLatency;
324 }
325 if (maybe_compressed_char_at) {
326 last_visited_internal_latency_ +=
327 kArmIntegerOpLatency + kArmBranchLatency + kArmMemoryLoadLatency;
328 last_visited_latency_ = kArmBranchLatency;
329 } else {
330 last_visited_latency_ += kArmMemoryLoadLatency;
331 }
332 }
333 break;
334 }
335
336 case Primitive::kPrimNot: {
337 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
338 last_visited_latency_ = kArmLoadWithBakerReadBarrierLatency;
339 } else {
340 if (index->IsConstant()) {
341 last_visited_latency_ = kArmMemoryLoadLatency;
342 } else {
343 if (has_intermediate_address) {
344 } else {
345 last_visited_internal_latency_ += kArmIntegerOpLatency;
346 }
347 last_visited_internal_latency_ = kArmMemoryLoadLatency;
348 }
349 }
350 break;
351 }
352
353 case Primitive::kPrimLong: {
354 if (index->IsConstant()) {
355 last_visited_latency_ = kArmMemoryLoadLatency;
356 } else {
357 last_visited_internal_latency_ += kArmIntegerOpLatency;
358 last_visited_latency_ = kArmMemoryLoadLatency;
359 }
360 break;
361 }
362
363 case Primitive::kPrimFloat: {
364 if (index->IsConstant()) {
365 last_visited_latency_ = kArmMemoryLoadLatency;
366 } else {
367 last_visited_internal_latency_ += kArmIntegerOpLatency;
368 last_visited_latency_ = kArmMemoryLoadLatency;
369 }
370 break;
371 }
372
373 case Primitive::kPrimDouble: {
374 if (index->IsConstant()) {
375 last_visited_latency_ = kArmMemoryLoadLatency;
376 } else {
377 last_visited_internal_latency_ += kArmIntegerOpLatency;
378 last_visited_latency_ = kArmMemoryLoadLatency;
379 }
380 break;
381 }
382
383 default:
384 LOG(FATAL) << "Unreachable type " << type;
385 UNREACHABLE();
386 }
387}
388
389void SchedulingLatencyVisitorARM::VisitArrayLength(HArrayLength* instruction) {
390 last_visited_latency_ = kArmMemoryLoadLatency;
391 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
392 last_visited_internal_latency_ = kArmMemoryLoadLatency;
393 last_visited_latency_ = kArmIntegerOpLatency;
394 }
395}
396
397void SchedulingLatencyVisitorARM::VisitArraySet(HArraySet* instruction) {
398 HInstruction* index = instruction->InputAt(1);
399 Primitive::Type value_type = instruction->GetComponentType();
400 HInstruction* array_instr = instruction->GetArray();
401 bool has_intermediate_address = array_instr->IsIntermediateAddress();
402
403 switch (value_type) {
404 case Primitive::kPrimBoolean:
405 case Primitive::kPrimByte:
406 case Primitive::kPrimShort:
407 case Primitive::kPrimChar:
408 case Primitive::kPrimInt: {
409 if (index->IsConstant()) {
410 last_visited_latency_ = kArmMemoryStoreLatency;
411 } else {
412 if (has_intermediate_address) {
413 } else {
414 last_visited_internal_latency_ = kArmIntegerOpLatency;
415 }
416 last_visited_latency_ = kArmMemoryStoreLatency;
417 }
418 break;
419 }
420
421 case Primitive::kPrimNot: {
422 if (instruction->InputAt(2)->IsNullConstant()) {
423 if (index->IsConstant()) {
424 last_visited_latency_ = kArmMemoryStoreLatency;
425 } else {
426 last_visited_internal_latency_ = kArmIntegerOpLatency;
427 last_visited_latency_ = kArmMemoryStoreLatency;
428 }
429 } else {
430 // Following the exact instructions of runtime type checks is too complicated,
431 // just giving it a simple slow latency.
432 last_visited_latency_ = kArmRuntimeTypeCheckLatency;
433 }
434 break;
435 }
436
437 case Primitive::kPrimLong: {
438 if (index->IsConstant()) {
439 last_visited_latency_ = kArmMemoryLoadLatency;
440 } else {
441 last_visited_internal_latency_ = kArmIntegerOpLatency;
442 last_visited_latency_ = kArmMemoryLoadLatency;
443 }
444 break;
445 }
446
447 case Primitive::kPrimFloat: {
448 if (index->IsConstant()) {
449 last_visited_latency_ = kArmMemoryLoadLatency;
450 } else {
451 last_visited_internal_latency_ = kArmIntegerOpLatency;
452 last_visited_latency_ = kArmMemoryLoadLatency;
453 }
454 break;
455 }
456
457 case Primitive::kPrimDouble: {
458 if (index->IsConstant()) {
459 last_visited_latency_ = kArmMemoryLoadLatency;
460 } else {
461 last_visited_internal_latency_ = kArmIntegerOpLatency;
462 last_visited_latency_ = kArmMemoryLoadLatency;
463 }
464 break;
465 }
466
467 default:
468 LOG(FATAL) << "Unreachable type " << value_type;
469 UNREACHABLE();
470 }
471}
472
473void SchedulingLatencyVisitorARM::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
474 last_visited_internal_latency_ = kArmIntegerOpLatency;
475 // Users do not use any data results.
476 last_visited_latency_ = 0;
477}
478
479void SchedulingLatencyVisitorARM::HandleDivRemConstantIntegralLatencies(int32_t imm) {
480 if (imm == 0) {
481 last_visited_internal_latency_ = 0;
482 last_visited_latency_ = 0;
483 } else if (imm == 1 || imm == -1) {
484 last_visited_latency_ = kArmIntegerOpLatency;
485 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
486 last_visited_internal_latency_ = 3 * kArmIntegerOpLatency;
487 last_visited_latency_ = kArmIntegerOpLatency;
488 } else {
489 last_visited_internal_latency_ = kArmMulIntegerLatency + 2 * kArmIntegerOpLatency;
490 last_visited_latency_ = kArmIntegerOpLatency;
491 }
492}
493
494void SchedulingLatencyVisitorARM::VisitDiv(HDiv* instruction) {
495 Primitive::Type type = instruction->GetResultType();
496 switch (type) {
497 case Primitive::kPrimInt: {
498 HInstruction* rhs = instruction->GetRight();
499 if (rhs->IsConstant()) {
500 int32_t imm = Int32ConstantFrom(rhs->AsConstant());
501 HandleDivRemConstantIntegralLatencies(imm);
502 } else {
503 last_visited_latency_ = kArmDivIntegerLatency;
504 }
505 break;
506 }
507 case Primitive::kPrimFloat:
508 last_visited_latency_ = kArmDivFloatLatency;
509 break;
510 case Primitive::kPrimDouble:
511 last_visited_latency_ = kArmDivDoubleLatency;
512 break;
513 default:
514 last_visited_internal_latency_ = kArmCallInternalLatency;
515 last_visited_latency_ = kArmCallLatency;
516 break;
517 }
518}
519
520void SchedulingLatencyVisitorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
521 HandleFieldGetLatencies(instruction, instruction->GetFieldInfo());
522}
523
524void SchedulingLatencyVisitorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
525 HandleFieldSetLatencies(instruction, instruction->GetFieldInfo());
526}
527
528void SchedulingLatencyVisitorARM::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
529 last_visited_internal_latency_ = kArmCallInternalLatency;
530 last_visited_latency_ = kArmIntegerOpLatency;
531}
532
533void SchedulingLatencyVisitorARM::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
534 last_visited_internal_latency_ = kArmCallInternalLatency;
535 last_visited_latency_ = kArmCallLatency;
536}
537
538void SchedulingLatencyVisitorARM::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
539 last_visited_internal_latency_ = kArmLoadStringInternalLatency;
540 last_visited_latency_ = kArmMemoryLoadLatency;
541}
542
543void SchedulingLatencyVisitorARM::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
544 last_visited_internal_latency_ = kArmIntegerOpLatency + kArmCallInternalLatency;
545 last_visited_latency_ = kArmCallLatency;
546}
547
548void SchedulingLatencyVisitorARM::VisitNewInstance(HNewInstance* instruction) {
549 if (instruction->IsStringAlloc()) {
550 last_visited_internal_latency_ = 2 * kArmMemoryLoadLatency + kArmCallInternalLatency;
551 } else {
552 last_visited_internal_latency_ = kArmCallInternalLatency;
553 }
554 last_visited_latency_ = kArmCallLatency;
555}
556
557void SchedulingLatencyVisitorARM::VisitRem(HRem* instruction) {
558 Primitive::Type type = instruction->GetResultType();
559 switch (type) {
560 case Primitive::kPrimInt: {
561 HInstruction* rhs = instruction->GetRight();
562 if (rhs->IsConstant()) {
563 int32_t imm = Int32ConstantFrom(rhs->AsConstant());
564 HandleDivRemConstantIntegralLatencies(imm);
565 } else {
566 last_visited_internal_latency_ = kArmDivIntegerLatency;
567 last_visited_latency_ = kArmMulIntegerLatency;
568 }
569 break;
570 }
571 default:
572 last_visited_internal_latency_ = kArmCallInternalLatency;
573 last_visited_latency_ = kArmCallLatency;
574 break;
575 }
576}
577
578void SchedulingLatencyVisitorARM::HandleFieldGetLatencies(HInstruction* instruction,
579 const FieldInfo& field_info) {
580 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
581 DCHECK(codegen_ != nullptr);
582 bool is_volatile = field_info.IsVolatile();
583 Primitive::Type field_type = field_info.GetFieldType();
584 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
585
586 switch (field_type) {
587 case Primitive::kPrimBoolean:
588 case Primitive::kPrimByte:
589 case Primitive::kPrimShort:
590 case Primitive::kPrimChar:
591 case Primitive::kPrimInt:
592 last_visited_latency_ = kArmMemoryLoadLatency;
593 break;
594
595 case Primitive::kPrimNot:
596 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
597 last_visited_internal_latency_ = kArmMemoryLoadLatency + kArmIntegerOpLatency;
598 last_visited_latency_ = kArmMemoryLoadLatency;
599 } else {
600 last_visited_latency_ = kArmMemoryLoadLatency;
601 }
602 break;
603
604 case Primitive::kPrimLong:
605 if (is_volatile && !atomic_ldrd_strd) {
606 last_visited_internal_latency_ = kArmMemoryLoadLatency + kArmIntegerOpLatency;
607 last_visited_latency_ = kArmMemoryLoadLatency;
608 } else {
609 last_visited_latency_ = kArmMemoryLoadLatency;
610 }
611 break;
612
613 case Primitive::kPrimFloat:
614 last_visited_latency_ = kArmMemoryLoadLatency;
615 break;
616
617 case Primitive::kPrimDouble:
618 if (is_volatile && !atomic_ldrd_strd) {
619 last_visited_internal_latency_ =
620 kArmMemoryLoadLatency + kArmIntegerOpLatency + kArmMemoryLoadLatency;
621 last_visited_latency_ = kArmIntegerOpLatency;
622 } else {
623 last_visited_latency_ = kArmMemoryLoadLatency;
624 }
625 break;
626
627 default:
628 last_visited_latency_ = kArmMemoryLoadLatency;
629 break;
630 }
631
632 if (is_volatile) {
633 last_visited_internal_latency_ += kArmMemoryBarrierLatency;
634 }
635}
636
637void SchedulingLatencyVisitorARM::HandleFieldSetLatencies(HInstruction* instruction,
638 const FieldInfo& field_info) {
639 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
640 DCHECK(codegen_ != nullptr);
641 bool is_volatile = field_info.IsVolatile();
642 Primitive::Type field_type = field_info.GetFieldType();
643 bool needs_write_barrier =
644 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
645 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
646
647 switch (field_type) {
648 case Primitive::kPrimBoolean:
649 case Primitive::kPrimByte:
650 case Primitive::kPrimShort:
651 case Primitive::kPrimChar:
652 if (is_volatile) {
653 last_visited_internal_latency_ = kArmMemoryBarrierLatency + kArmMemoryStoreLatency;
654 last_visited_latency_ = kArmMemoryBarrierLatency;
655 } else {
656 last_visited_latency_ = kArmMemoryStoreLatency;
657 }
658 break;
659
660 case Primitive::kPrimInt:
661 case Primitive::kPrimNot:
662 if (kPoisonHeapReferences && needs_write_barrier) {
663 last_visited_internal_latency_ += kArmIntegerOpLatency * 2;
664 }
665 last_visited_latency_ = kArmMemoryStoreLatency;
666 break;
667
668 case Primitive::kPrimLong:
669 if (is_volatile && !atomic_ldrd_strd) {
670 last_visited_internal_latency_ =
671 kArmIntegerOpLatency + kArmMemoryLoadLatency + kArmMemoryStoreLatency;
672 last_visited_latency_ = kArmIntegerOpLatency;
673 } else {
674 last_visited_latency_ = kArmMemoryStoreLatency;
675 }
676 break;
677
678 case Primitive::kPrimFloat:
679 last_visited_latency_ = kArmMemoryStoreLatency;
680 break;
681
682 case Primitive::kPrimDouble:
683 if (is_volatile && !atomic_ldrd_strd) {
684 last_visited_internal_latency_ = kArmIntegerOpLatency +
685 kArmIntegerOpLatency + kArmMemoryLoadLatency + kArmMemoryStoreLatency;
686 last_visited_latency_ = kArmIntegerOpLatency;
687 } else {
688 last_visited_latency_ = kArmMemoryStoreLatency;
689 }
690 break;
691
692 default:
693 last_visited_latency_ = kArmMemoryStoreLatency;
694 break;
695 }
696}
697
698void SchedulingLatencyVisitorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
699 HandleFieldGetLatencies(instruction, instruction->GetFieldInfo());
700}
701
702void SchedulingLatencyVisitorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
703 HandleFieldSetLatencies(instruction, instruction->GetFieldInfo());
704}
705
706void SchedulingLatencyVisitorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
707 HBasicBlock* block = instruction->GetBlock();
708 DCHECK((block->GetLoopInformation() != nullptr) ||
709 (block->IsEntryBlock() && instruction->GetNext()->IsGoto()));
710 // Users do not use any data results.
711 last_visited_latency_ = 0;
712}
713
714void SchedulingLatencyVisitorARM::VisitTypeConversion(HTypeConversion* instr) {
715 Primitive::Type result_type = instr->GetResultType();
716 Primitive::Type input_type = instr->GetInputType();
717
718 switch (result_type) {
719 case Primitive::kPrimByte:
720 case Primitive::kPrimChar:
721 case Primitive::kPrimShort:
722 last_visited_latency_ = kArmIntegerOpLatency; // SBFX or UBFX
723 break;
724
725 case Primitive::kPrimInt:
726 switch (input_type) {
727 case Primitive::kPrimLong:
728 last_visited_latency_ = kArmIntegerOpLatency; // MOV
729 break;
730 case Primitive::kPrimFloat:
731 case Primitive::kPrimDouble:
732 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
733 last_visited_latency_ = kArmFloatingPointOpLatency;
734 break;
735 default:
736 last_visited_latency_ = kArmIntegerOpLatency;
737 break;
738 }
739 break;
740
741 case Primitive::kPrimLong:
742 switch (input_type) {
743 case Primitive::kPrimBoolean:
744 case Primitive::kPrimByte:
745 case Primitive::kPrimChar:
746 case Primitive::kPrimShort:
747 case Primitive::kPrimInt:
748 // MOV and extension
749 last_visited_internal_latency_ = kArmIntegerOpLatency;
750 last_visited_latency_ = kArmIntegerOpLatency;
751 break;
752 case Primitive::kPrimFloat:
753 case Primitive::kPrimDouble:
754 // invokes runtime
755 last_visited_internal_latency_ = kArmCallInternalLatency;
756 break;
757 default:
758 last_visited_internal_latency_ = kArmIntegerOpLatency;
759 last_visited_latency_ = kArmIntegerOpLatency;
760 break;
761 }
762 break;
763
764 case Primitive::kPrimFloat:
765 switch (input_type) {
766 case Primitive::kPrimBoolean:
767 case Primitive::kPrimByte:
768 case Primitive::kPrimChar:
769 case Primitive::kPrimShort:
770 case Primitive::kPrimInt:
771 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
772 last_visited_latency_ = kArmFloatingPointOpLatency;
773 break;
774 case Primitive::kPrimLong:
775 // invokes runtime
776 last_visited_internal_latency_ = kArmCallInternalLatency;
777 break;
778 case Primitive::kPrimDouble:
779 last_visited_latency_ = kArmFloatingPointOpLatency;
780 break;
781 default:
782 last_visited_latency_ = kArmFloatingPointOpLatency;
783 break;
784 }
785 break;
786
787 case Primitive::kPrimDouble:
788 switch (input_type) {
789 case Primitive::kPrimBoolean:
790 case Primitive::kPrimByte:
791 case Primitive::kPrimChar:
792 case Primitive::kPrimShort:
793 case Primitive::kPrimInt:
794 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
795 last_visited_latency_ = kArmFloatingPointOpLatency;
796 break;
797 case Primitive::kPrimLong:
798 last_visited_internal_latency_ = 5 * kArmFloatingPointOpLatency;
799 last_visited_latency_ = kArmFloatingPointOpLatency;
800 break;
801 case Primitive::kPrimFloat:
802 last_visited_latency_ = kArmFloatingPointOpLatency;
803 break;
804 default:
805 last_visited_latency_ = kArmFloatingPointOpLatency;
806 break;
807 }
808 break;
809
810 default:
811 last_visited_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
812 break;
813 }
814}
815
816void SchedulingLatencyVisitorARM::VisitArmDexCacheArraysBase(art::HArmDexCacheArraysBase*) {
817 last_visited_internal_latency_ = kArmIntegerOpLatency;
818 last_visited_latency_ = kArmIntegerOpLatency;
819}
820
821} // namespace arm
822} // namespace art