blob: 832a7e15717310723fd9367e6ee1bfcfc1cc8853 [file] [log] [blame]
xueliang.zhongf7caf682017-03-01 16:07:02 +00001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "arch/arm/instruction_set_features_arm.h"
18#include "code_generator_utils.h"
19#include "common_arm.h"
20#include "mirror/array-inl.h"
21#include "scheduler_arm.h"
22
23namespace art {
24namespace arm {
25
26using helpers::Int32ConstantFrom;
27using helpers::Uint64ConstantFrom;
28
29void SchedulingLatencyVisitorARM::HandleBinaryOperationLantencies(HBinaryOperation* instr) {
30 switch (instr->GetResultType()) {
31 case Primitive::kPrimLong:
32 // HAdd and HSub long operations translate to ADDS+ADC or SUBS+SBC pairs,
33 // so a bubble (kArmNopLatency) is added to represent the internal carry flag
34 // dependency inside these pairs.
35 last_visited_internal_latency_ = kArmIntegerOpLatency + kArmNopLatency;
36 last_visited_latency_ = kArmIntegerOpLatency;
37 break;
38 case Primitive::kPrimFloat:
39 case Primitive::kPrimDouble:
40 last_visited_latency_ = kArmFloatingPointOpLatency;
41 break;
42 default:
43 last_visited_latency_ = kArmIntegerOpLatency;
44 break;
45 }
46}
47
48void SchedulingLatencyVisitorARM::VisitAdd(HAdd* instr) {
49 HandleBinaryOperationLantencies(instr);
50}
51
52void SchedulingLatencyVisitorARM::VisitSub(HSub* instr) {
53 HandleBinaryOperationLantencies(instr);
54}
55
56void SchedulingLatencyVisitorARM::VisitMul(HMul* instr) {
57 switch (instr->GetResultType()) {
58 case Primitive::kPrimLong:
59 last_visited_internal_latency_ = 3 * kArmMulIntegerLatency;
60 last_visited_latency_ = kArmIntegerOpLatency;
61 break;
62 case Primitive::kPrimFloat:
63 case Primitive::kPrimDouble:
64 last_visited_latency_ = kArmMulFloatingPointLatency;
65 break;
66 default:
67 last_visited_latency_ = kArmMulIntegerLatency;
68 break;
69 }
70}
71
72void SchedulingLatencyVisitorARM::HandleBitwiseOperationLantencies(HBinaryOperation* instr) {
73 switch (instr->GetResultType()) {
74 case Primitive::kPrimLong:
75 last_visited_internal_latency_ = kArmIntegerOpLatency;
76 last_visited_latency_ = kArmIntegerOpLatency;
77 break;
78 case Primitive::kPrimFloat:
79 case Primitive::kPrimDouble:
80 last_visited_latency_ = kArmFloatingPointOpLatency;
81 break;
82 default:
83 last_visited_latency_ = kArmIntegerOpLatency;
84 break;
85 }
86}
87
88void SchedulingLatencyVisitorARM::VisitAnd(HAnd* instr) {
89 HandleBitwiseOperationLantencies(instr);
90}
91
92void SchedulingLatencyVisitorARM::VisitOr(HOr* instr) {
93 HandleBitwiseOperationLantencies(instr);
94}
95
96void SchedulingLatencyVisitorARM::VisitXor(HXor* instr) {
97 HandleBitwiseOperationLantencies(instr);
98}
99
100void SchedulingLatencyVisitorARM::VisitRor(HRor* instr) {
101 switch (instr->GetResultType()) {
102 case Primitive::kPrimInt:
103 last_visited_latency_ = kArmIntegerOpLatency;
104 break;
105 case Primitive::kPrimLong: {
106 // HandleLongRotate
107 HInstruction* rhs = instr->GetRight();
108 if (rhs->IsConstant()) {
109 uint64_t rot = Uint64ConstantFrom(rhs->AsConstant()) & kMaxLongShiftDistance;
110 if (rot != 0u) {
111 last_visited_internal_latency_ = 3 * kArmIntegerOpLatency;
112 last_visited_latency_ = kArmIntegerOpLatency;
113 } else {
114 last_visited_internal_latency_ = kArmIntegerOpLatency;
115 last_visited_latency_ = kArmIntegerOpLatency;
116 }
117 } else {
118 last_visited_internal_latency_ = 9 * kArmIntegerOpLatency + kArmBranchLatency;
119 last_visited_latency_ = kArmBranchLatency;
120 }
121 break;
122 }
123 default:
124 LOG(FATAL) << "Unexpected operation type " << instr->GetResultType();
125 UNREACHABLE();
126 }
127}
128
129void SchedulingLatencyVisitorARM::HandleShiftLatencies(HBinaryOperation* instr) {
130 Primitive::Type type = instr->GetResultType();
131 HInstruction* rhs = instr->GetRight();
132 switch (type) {
133 case Primitive::kPrimInt:
134 if (!rhs->IsConstant()) {
135 last_visited_internal_latency_ = kArmIntegerOpLatency;
136 }
137 last_visited_latency_ = kArmIntegerOpLatency;
138 break;
139 case Primitive::kPrimLong:
140 if (!rhs->IsConstant()) {
141 last_visited_internal_latency_ = 8 * kArmIntegerOpLatency;
142 } else {
143 uint32_t shift_value = Int32ConstantFrom(rhs->AsConstant()) & kMaxLongShiftDistance;
144 if (shift_value == 1 || shift_value >= 32) {
145 last_visited_internal_latency_ = kArmIntegerOpLatency;
146 } else {
147 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
148 }
149 }
150 last_visited_latency_ = kArmIntegerOpLatency;
151 break;
152 default:
153 LOG(FATAL) << "Unexpected operation type " << type;
154 UNREACHABLE();
155 }
156}
157
158void SchedulingLatencyVisitorARM::VisitShl(HShl* instr) {
159 HandleShiftLatencies(instr);
160}
161
162void SchedulingLatencyVisitorARM::VisitShr(HShr* instr) {
163 HandleShiftLatencies(instr);
164}
165
166void SchedulingLatencyVisitorARM::VisitUShr(HUShr* instr) {
167 HandleShiftLatencies(instr);
168}
169
170void SchedulingLatencyVisitorARM::VisitCondition(HCondition* instr) {
171 switch (instr->GetLeft()->GetType()) {
172 case Primitive::kPrimLong:
173 last_visited_internal_latency_ = 4 * kArmIntegerOpLatency;
174 break;
175 case Primitive::kPrimFloat:
176 case Primitive::kPrimDouble:
177 last_visited_internal_latency_ = 2 * kArmFloatingPointOpLatency;
178 break;
179 default:
180 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
181 break;
182 }
183 last_visited_latency_ = kArmIntegerOpLatency;
184}
185
186void SchedulingLatencyVisitorARM::VisitCompare(HCompare* instr) {
187 Primitive::Type type = instr->InputAt(0)->GetType();
188 switch (type) {
189 case Primitive::kPrimBoolean:
190 case Primitive::kPrimByte:
191 case Primitive::kPrimShort:
192 case Primitive::kPrimChar:
193 case Primitive::kPrimInt:
194 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
195 break;
196 case Primitive::kPrimLong:
197 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency + 3 * kArmBranchLatency;
198 break;
199 case Primitive::kPrimFloat:
200 case Primitive::kPrimDouble:
201 last_visited_internal_latency_ = kArmIntegerOpLatency + 2 * kArmFloatingPointOpLatency;
202 break;
203 default:
204 last_visited_internal_latency_ = 2 * kArmIntegerOpLatency;
205 break;
206 }
207 last_visited_latency_ = kArmIntegerOpLatency;
208}
209
210void SchedulingLatencyVisitorARM::VisitBitwiseNegatedRight(HBitwiseNegatedRight* instruction) {
211 if (instruction->GetResultType() == Primitive::kPrimInt) {
212 last_visited_latency_ = kArmIntegerOpLatency;
213 } else {
214 last_visited_internal_latency_ = kArmIntegerOpLatency;
215 last_visited_latency_ = kArmIntegerOpLatency;
216 }
217}
218
219void SchedulingLatencyVisitorARM::HandleGenerateDataProcInstruction(bool internal_latency) {
220 if (internal_latency) {
221 last_visited_internal_latency_ += kArmIntegerOpLatency;
222 } else {
223 last_visited_latency_ = kArmDataProcWithShifterOpLatency;
224 }
225}
226
227void SchedulingLatencyVisitorARM::HandleGenerateDataProc(HDataProcWithShifterOp* instruction) {
228 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
229 if (kind == HInstruction::kAdd) {
230 last_visited_internal_latency_ = kArmIntegerOpLatency;
231 last_visited_latency_ = kArmIntegerOpLatency;
232 } else if (kind == HInstruction::kSub) {
233 last_visited_internal_latency_ = kArmIntegerOpLatency;
234 last_visited_latency_ = kArmIntegerOpLatency;
235 } else {
236 HandleGenerateDataProcInstruction(/* internal_latency */ true);
237 HandleGenerateDataProcInstruction();
238 }
239}
240
241void SchedulingLatencyVisitorARM::HandleGenerateLongDataProc(HDataProcWithShifterOp* instruction) {
242 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
243 DCHECK(HDataProcWithShifterOp::IsShiftOp(instruction->GetOpKind()));
244
245 const uint32_t shift_value = instruction->GetShiftAmount();
246 const HInstruction::InstructionKind kind = instruction->GetInstrKind();
247
248 if (shift_value >= 32) {
249 // Different shift types actually generate similar code here,
250 // no need to differentiate shift types like the codegen pass does,
251 // which also avoids handling shift types from different ARM backends.
252 HandleGenerateDataProc(instruction);
253 } else {
254 DCHECK_GT(shift_value, 1U);
255 DCHECK_LT(shift_value, 32U);
256
257 if (kind == HInstruction::kOr || kind == HInstruction::kXor) {
258 HandleGenerateDataProcInstruction(/* internal_latency */ true);
259 HandleGenerateDataProcInstruction(/* internal_latency */ true);
260 HandleGenerateDataProcInstruction();
261 } else {
262 last_visited_internal_latency_ += 2 * kArmIntegerOpLatency;
263 HandleGenerateDataProc(instruction);
264 }
265 }
266}
267
268void SchedulingLatencyVisitorARM::VisitDataProcWithShifterOp(HDataProcWithShifterOp* instruction) {
269 const HDataProcWithShifterOp::OpKind op_kind = instruction->GetOpKind();
270
271 if (instruction->GetType() == Primitive::kPrimInt) {
272 DCHECK(!HDataProcWithShifterOp::IsExtensionOp(op_kind));
273 HandleGenerateDataProcInstruction();
274 } else {
275 DCHECK_EQ(instruction->GetType(), Primitive::kPrimLong);
276 if (HDataProcWithShifterOp::IsExtensionOp(op_kind)) {
277 HandleGenerateDataProc(instruction);
278 } else {
279 HandleGenerateLongDataProc(instruction);
280 }
281 }
282}
283
284void SchedulingLatencyVisitorARM::VisitIntermediateAddress(HIntermediateAddress* ATTRIBUTE_UNUSED) {
285 // Although the code generated is a simple `add` instruction, we found through empirical results
286 // that spacing it from its use in memory accesses was beneficial.
287 last_visited_internal_latency_ = kArmNopLatency;
288 last_visited_latency_ = kArmIntegerOpLatency;
289}
290
Artem Serovf0fc4c62017-05-03 15:07:15 +0100291void SchedulingLatencyVisitorARM::VisitIntermediateAddressIndex(
292 HIntermediateAddressIndex* ATTRIBUTE_UNUSED) {
293 UNIMPLEMENTED(FATAL) << "IntermediateAddressIndex is not implemented for ARM";
294}
295
xueliang.zhongf7caf682017-03-01 16:07:02 +0000296void SchedulingLatencyVisitorARM::VisitMultiplyAccumulate(HMultiplyAccumulate* ATTRIBUTE_UNUSED) {
297 last_visited_latency_ = kArmMulIntegerLatency;
298}
299
300void SchedulingLatencyVisitorARM::VisitArrayGet(HArrayGet* instruction) {
301 Primitive::Type type = instruction->GetType();
302 const bool maybe_compressed_char_at =
303 mirror::kUseStringCompression && instruction->IsStringCharAt();
304 HInstruction* array_instr = instruction->GetArray();
305 bool has_intermediate_address = array_instr->IsIntermediateAddress();
306 HInstruction* index = instruction->InputAt(1);
307
308 switch (type) {
309 case Primitive::kPrimBoolean:
310 case Primitive::kPrimByte:
311 case Primitive::kPrimShort:
312 case Primitive::kPrimChar:
313 case Primitive::kPrimInt: {
314 if (maybe_compressed_char_at) {
315 last_visited_internal_latency_ += kArmMemoryLoadLatency;
316 }
317 if (index->IsConstant()) {
318 if (maybe_compressed_char_at) {
319 last_visited_internal_latency_ +=
320 kArmIntegerOpLatency + kArmBranchLatency + kArmMemoryLoadLatency;
321 last_visited_latency_ = kArmBranchLatency;
322 } else {
323 last_visited_latency_ += kArmMemoryLoadLatency;
324 }
325 } else {
326 if (has_intermediate_address) {
327 } else {
328 last_visited_internal_latency_ += kArmIntegerOpLatency;
329 }
330 if (maybe_compressed_char_at) {
331 last_visited_internal_latency_ +=
332 kArmIntegerOpLatency + kArmBranchLatency + kArmMemoryLoadLatency;
333 last_visited_latency_ = kArmBranchLatency;
334 } else {
335 last_visited_latency_ += kArmMemoryLoadLatency;
336 }
337 }
338 break;
339 }
340
341 case Primitive::kPrimNot: {
342 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
343 last_visited_latency_ = kArmLoadWithBakerReadBarrierLatency;
344 } else {
345 if (index->IsConstant()) {
346 last_visited_latency_ = kArmMemoryLoadLatency;
347 } else {
348 if (has_intermediate_address) {
349 } else {
350 last_visited_internal_latency_ += kArmIntegerOpLatency;
351 }
352 last_visited_internal_latency_ = kArmMemoryLoadLatency;
353 }
354 }
355 break;
356 }
357
358 case Primitive::kPrimLong: {
359 if (index->IsConstant()) {
360 last_visited_latency_ = kArmMemoryLoadLatency;
361 } else {
362 last_visited_internal_latency_ += kArmIntegerOpLatency;
363 last_visited_latency_ = kArmMemoryLoadLatency;
364 }
365 break;
366 }
367
368 case Primitive::kPrimFloat: {
369 if (index->IsConstant()) {
370 last_visited_latency_ = kArmMemoryLoadLatency;
371 } else {
372 last_visited_internal_latency_ += kArmIntegerOpLatency;
373 last_visited_latency_ = kArmMemoryLoadLatency;
374 }
375 break;
376 }
377
378 case Primitive::kPrimDouble: {
379 if (index->IsConstant()) {
380 last_visited_latency_ = kArmMemoryLoadLatency;
381 } else {
382 last_visited_internal_latency_ += kArmIntegerOpLatency;
383 last_visited_latency_ = kArmMemoryLoadLatency;
384 }
385 break;
386 }
387
388 default:
389 LOG(FATAL) << "Unreachable type " << type;
390 UNREACHABLE();
391 }
392}
393
394void SchedulingLatencyVisitorARM::VisitArrayLength(HArrayLength* instruction) {
395 last_visited_latency_ = kArmMemoryLoadLatency;
396 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
397 last_visited_internal_latency_ = kArmMemoryLoadLatency;
398 last_visited_latency_ = kArmIntegerOpLatency;
399 }
400}
401
402void SchedulingLatencyVisitorARM::VisitArraySet(HArraySet* instruction) {
403 HInstruction* index = instruction->InputAt(1);
404 Primitive::Type value_type = instruction->GetComponentType();
405 HInstruction* array_instr = instruction->GetArray();
406 bool has_intermediate_address = array_instr->IsIntermediateAddress();
407
408 switch (value_type) {
409 case Primitive::kPrimBoolean:
410 case Primitive::kPrimByte:
411 case Primitive::kPrimShort:
412 case Primitive::kPrimChar:
413 case Primitive::kPrimInt: {
414 if (index->IsConstant()) {
415 last_visited_latency_ = kArmMemoryStoreLatency;
416 } else {
417 if (has_intermediate_address) {
418 } else {
419 last_visited_internal_latency_ = kArmIntegerOpLatency;
420 }
421 last_visited_latency_ = kArmMemoryStoreLatency;
422 }
423 break;
424 }
425
426 case Primitive::kPrimNot: {
427 if (instruction->InputAt(2)->IsNullConstant()) {
428 if (index->IsConstant()) {
429 last_visited_latency_ = kArmMemoryStoreLatency;
430 } else {
431 last_visited_internal_latency_ = kArmIntegerOpLatency;
432 last_visited_latency_ = kArmMemoryStoreLatency;
433 }
434 } else {
435 // Following the exact instructions of runtime type checks is too complicated,
436 // just giving it a simple slow latency.
437 last_visited_latency_ = kArmRuntimeTypeCheckLatency;
438 }
439 break;
440 }
441
442 case Primitive::kPrimLong: {
443 if (index->IsConstant()) {
444 last_visited_latency_ = kArmMemoryLoadLatency;
445 } else {
446 last_visited_internal_latency_ = kArmIntegerOpLatency;
447 last_visited_latency_ = kArmMemoryLoadLatency;
448 }
449 break;
450 }
451
452 case Primitive::kPrimFloat: {
453 if (index->IsConstant()) {
454 last_visited_latency_ = kArmMemoryLoadLatency;
455 } else {
456 last_visited_internal_latency_ = kArmIntegerOpLatency;
457 last_visited_latency_ = kArmMemoryLoadLatency;
458 }
459 break;
460 }
461
462 case Primitive::kPrimDouble: {
463 if (index->IsConstant()) {
464 last_visited_latency_ = kArmMemoryLoadLatency;
465 } else {
466 last_visited_internal_latency_ = kArmIntegerOpLatency;
467 last_visited_latency_ = kArmMemoryLoadLatency;
468 }
469 break;
470 }
471
472 default:
473 LOG(FATAL) << "Unreachable type " << value_type;
474 UNREACHABLE();
475 }
476}
477
478void SchedulingLatencyVisitorARM::VisitBoundsCheck(HBoundsCheck* ATTRIBUTE_UNUSED) {
479 last_visited_internal_latency_ = kArmIntegerOpLatency;
480 // Users do not use any data results.
481 last_visited_latency_ = 0;
482}
483
484void SchedulingLatencyVisitorARM::HandleDivRemConstantIntegralLatencies(int32_t imm) {
485 if (imm == 0) {
486 last_visited_internal_latency_ = 0;
487 last_visited_latency_ = 0;
488 } else if (imm == 1 || imm == -1) {
489 last_visited_latency_ = kArmIntegerOpLatency;
490 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
491 last_visited_internal_latency_ = 3 * kArmIntegerOpLatency;
492 last_visited_latency_ = kArmIntegerOpLatency;
493 } else {
494 last_visited_internal_latency_ = kArmMulIntegerLatency + 2 * kArmIntegerOpLatency;
495 last_visited_latency_ = kArmIntegerOpLatency;
496 }
497}
498
499void SchedulingLatencyVisitorARM::VisitDiv(HDiv* instruction) {
500 Primitive::Type type = instruction->GetResultType();
501 switch (type) {
502 case Primitive::kPrimInt: {
503 HInstruction* rhs = instruction->GetRight();
504 if (rhs->IsConstant()) {
505 int32_t imm = Int32ConstantFrom(rhs->AsConstant());
506 HandleDivRemConstantIntegralLatencies(imm);
507 } else {
508 last_visited_latency_ = kArmDivIntegerLatency;
509 }
510 break;
511 }
512 case Primitive::kPrimFloat:
513 last_visited_latency_ = kArmDivFloatLatency;
514 break;
515 case Primitive::kPrimDouble:
516 last_visited_latency_ = kArmDivDoubleLatency;
517 break;
518 default:
519 last_visited_internal_latency_ = kArmCallInternalLatency;
520 last_visited_latency_ = kArmCallLatency;
521 break;
522 }
523}
524
525void SchedulingLatencyVisitorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
526 HandleFieldGetLatencies(instruction, instruction->GetFieldInfo());
527}
528
529void SchedulingLatencyVisitorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
530 HandleFieldSetLatencies(instruction, instruction->GetFieldInfo());
531}
532
533void SchedulingLatencyVisitorARM::VisitInstanceOf(HInstanceOf* ATTRIBUTE_UNUSED) {
534 last_visited_internal_latency_ = kArmCallInternalLatency;
535 last_visited_latency_ = kArmIntegerOpLatency;
536}
537
538void SchedulingLatencyVisitorARM::VisitInvoke(HInvoke* ATTRIBUTE_UNUSED) {
539 last_visited_internal_latency_ = kArmCallInternalLatency;
540 last_visited_latency_ = kArmCallLatency;
541}
542
543void SchedulingLatencyVisitorARM::VisitLoadString(HLoadString* ATTRIBUTE_UNUSED) {
544 last_visited_internal_latency_ = kArmLoadStringInternalLatency;
545 last_visited_latency_ = kArmMemoryLoadLatency;
546}
547
548void SchedulingLatencyVisitorARM::VisitNewArray(HNewArray* ATTRIBUTE_UNUSED) {
549 last_visited_internal_latency_ = kArmIntegerOpLatency + kArmCallInternalLatency;
550 last_visited_latency_ = kArmCallLatency;
551}
552
553void SchedulingLatencyVisitorARM::VisitNewInstance(HNewInstance* instruction) {
554 if (instruction->IsStringAlloc()) {
555 last_visited_internal_latency_ = 2 * kArmMemoryLoadLatency + kArmCallInternalLatency;
556 } else {
557 last_visited_internal_latency_ = kArmCallInternalLatency;
558 }
559 last_visited_latency_ = kArmCallLatency;
560}
561
562void SchedulingLatencyVisitorARM::VisitRem(HRem* instruction) {
563 Primitive::Type type = instruction->GetResultType();
564 switch (type) {
565 case Primitive::kPrimInt: {
566 HInstruction* rhs = instruction->GetRight();
567 if (rhs->IsConstant()) {
568 int32_t imm = Int32ConstantFrom(rhs->AsConstant());
569 HandleDivRemConstantIntegralLatencies(imm);
570 } else {
571 last_visited_internal_latency_ = kArmDivIntegerLatency;
572 last_visited_latency_ = kArmMulIntegerLatency;
573 }
574 break;
575 }
576 default:
577 last_visited_internal_latency_ = kArmCallInternalLatency;
578 last_visited_latency_ = kArmCallLatency;
579 break;
580 }
581}
582
583void SchedulingLatencyVisitorARM::HandleFieldGetLatencies(HInstruction* instruction,
584 const FieldInfo& field_info) {
585 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
586 DCHECK(codegen_ != nullptr);
587 bool is_volatile = field_info.IsVolatile();
588 Primitive::Type field_type = field_info.GetFieldType();
589 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
590
591 switch (field_type) {
592 case Primitive::kPrimBoolean:
593 case Primitive::kPrimByte:
594 case Primitive::kPrimShort:
595 case Primitive::kPrimChar:
596 case Primitive::kPrimInt:
597 last_visited_latency_ = kArmMemoryLoadLatency;
598 break;
599
600 case Primitive::kPrimNot:
601 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
602 last_visited_internal_latency_ = kArmMemoryLoadLatency + kArmIntegerOpLatency;
603 last_visited_latency_ = kArmMemoryLoadLatency;
604 } else {
605 last_visited_latency_ = kArmMemoryLoadLatency;
606 }
607 break;
608
609 case Primitive::kPrimLong:
610 if (is_volatile && !atomic_ldrd_strd) {
611 last_visited_internal_latency_ = kArmMemoryLoadLatency + kArmIntegerOpLatency;
612 last_visited_latency_ = kArmMemoryLoadLatency;
613 } else {
614 last_visited_latency_ = kArmMemoryLoadLatency;
615 }
616 break;
617
618 case Primitive::kPrimFloat:
619 last_visited_latency_ = kArmMemoryLoadLatency;
620 break;
621
622 case Primitive::kPrimDouble:
623 if (is_volatile && !atomic_ldrd_strd) {
624 last_visited_internal_latency_ =
625 kArmMemoryLoadLatency + kArmIntegerOpLatency + kArmMemoryLoadLatency;
626 last_visited_latency_ = kArmIntegerOpLatency;
627 } else {
628 last_visited_latency_ = kArmMemoryLoadLatency;
629 }
630 break;
631
632 default:
633 last_visited_latency_ = kArmMemoryLoadLatency;
634 break;
635 }
636
637 if (is_volatile) {
638 last_visited_internal_latency_ += kArmMemoryBarrierLatency;
639 }
640}
641
642void SchedulingLatencyVisitorARM::HandleFieldSetLatencies(HInstruction* instruction,
643 const FieldInfo& field_info) {
644 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
645 DCHECK(codegen_ != nullptr);
646 bool is_volatile = field_info.IsVolatile();
647 Primitive::Type field_type = field_info.GetFieldType();
648 bool needs_write_barrier =
649 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
650 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures().HasAtomicLdrdAndStrd();
651
652 switch (field_type) {
653 case Primitive::kPrimBoolean:
654 case Primitive::kPrimByte:
655 case Primitive::kPrimShort:
656 case Primitive::kPrimChar:
657 if (is_volatile) {
658 last_visited_internal_latency_ = kArmMemoryBarrierLatency + kArmMemoryStoreLatency;
659 last_visited_latency_ = kArmMemoryBarrierLatency;
660 } else {
661 last_visited_latency_ = kArmMemoryStoreLatency;
662 }
663 break;
664
665 case Primitive::kPrimInt:
666 case Primitive::kPrimNot:
667 if (kPoisonHeapReferences && needs_write_barrier) {
668 last_visited_internal_latency_ += kArmIntegerOpLatency * 2;
669 }
670 last_visited_latency_ = kArmMemoryStoreLatency;
671 break;
672
673 case Primitive::kPrimLong:
674 if (is_volatile && !atomic_ldrd_strd) {
675 last_visited_internal_latency_ =
676 kArmIntegerOpLatency + kArmMemoryLoadLatency + kArmMemoryStoreLatency;
677 last_visited_latency_ = kArmIntegerOpLatency;
678 } else {
679 last_visited_latency_ = kArmMemoryStoreLatency;
680 }
681 break;
682
683 case Primitive::kPrimFloat:
684 last_visited_latency_ = kArmMemoryStoreLatency;
685 break;
686
687 case Primitive::kPrimDouble:
688 if (is_volatile && !atomic_ldrd_strd) {
689 last_visited_internal_latency_ = kArmIntegerOpLatency +
690 kArmIntegerOpLatency + kArmMemoryLoadLatency + kArmMemoryStoreLatency;
691 last_visited_latency_ = kArmIntegerOpLatency;
692 } else {
693 last_visited_latency_ = kArmMemoryStoreLatency;
694 }
695 break;
696
697 default:
698 last_visited_latency_ = kArmMemoryStoreLatency;
699 break;
700 }
701}
702
703void SchedulingLatencyVisitorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) {
704 HandleFieldGetLatencies(instruction, instruction->GetFieldInfo());
705}
706
707void SchedulingLatencyVisitorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) {
708 HandleFieldSetLatencies(instruction, instruction->GetFieldInfo());
709}
710
711void SchedulingLatencyVisitorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
712 HBasicBlock* block = instruction->GetBlock();
713 DCHECK((block->GetLoopInformation() != nullptr) ||
714 (block->IsEntryBlock() && instruction->GetNext()->IsGoto()));
715 // Users do not use any data results.
716 last_visited_latency_ = 0;
717}
718
719void SchedulingLatencyVisitorARM::VisitTypeConversion(HTypeConversion* instr) {
720 Primitive::Type result_type = instr->GetResultType();
721 Primitive::Type input_type = instr->GetInputType();
722
723 switch (result_type) {
724 case Primitive::kPrimByte:
725 case Primitive::kPrimChar:
726 case Primitive::kPrimShort:
727 last_visited_latency_ = kArmIntegerOpLatency; // SBFX or UBFX
728 break;
729
730 case Primitive::kPrimInt:
731 switch (input_type) {
732 case Primitive::kPrimLong:
733 last_visited_latency_ = kArmIntegerOpLatency; // MOV
734 break;
735 case Primitive::kPrimFloat:
736 case Primitive::kPrimDouble:
737 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
738 last_visited_latency_ = kArmFloatingPointOpLatency;
739 break;
740 default:
741 last_visited_latency_ = kArmIntegerOpLatency;
742 break;
743 }
744 break;
745
746 case Primitive::kPrimLong:
747 switch (input_type) {
748 case Primitive::kPrimBoolean:
749 case Primitive::kPrimByte:
750 case Primitive::kPrimChar:
751 case Primitive::kPrimShort:
752 case Primitive::kPrimInt:
753 // MOV and extension
754 last_visited_internal_latency_ = kArmIntegerOpLatency;
755 last_visited_latency_ = kArmIntegerOpLatency;
756 break;
757 case Primitive::kPrimFloat:
758 case Primitive::kPrimDouble:
759 // invokes runtime
760 last_visited_internal_latency_ = kArmCallInternalLatency;
761 break;
762 default:
763 last_visited_internal_latency_ = kArmIntegerOpLatency;
764 last_visited_latency_ = kArmIntegerOpLatency;
765 break;
766 }
767 break;
768
769 case Primitive::kPrimFloat:
770 switch (input_type) {
771 case Primitive::kPrimBoolean:
772 case Primitive::kPrimByte:
773 case Primitive::kPrimChar:
774 case Primitive::kPrimShort:
775 case Primitive::kPrimInt:
776 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
777 last_visited_latency_ = kArmFloatingPointOpLatency;
778 break;
779 case Primitive::kPrimLong:
780 // invokes runtime
781 last_visited_internal_latency_ = kArmCallInternalLatency;
782 break;
783 case Primitive::kPrimDouble:
784 last_visited_latency_ = kArmFloatingPointOpLatency;
785 break;
786 default:
787 last_visited_latency_ = kArmFloatingPointOpLatency;
788 break;
789 }
790 break;
791
792 case Primitive::kPrimDouble:
793 switch (input_type) {
794 case Primitive::kPrimBoolean:
795 case Primitive::kPrimByte:
796 case Primitive::kPrimChar:
797 case Primitive::kPrimShort:
798 case Primitive::kPrimInt:
799 last_visited_internal_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
800 last_visited_latency_ = kArmFloatingPointOpLatency;
801 break;
802 case Primitive::kPrimLong:
803 last_visited_internal_latency_ = 5 * kArmFloatingPointOpLatency;
804 last_visited_latency_ = kArmFloatingPointOpLatency;
805 break;
806 case Primitive::kPrimFloat:
807 last_visited_latency_ = kArmFloatingPointOpLatency;
808 break;
809 default:
810 last_visited_latency_ = kArmFloatingPointOpLatency;
811 break;
812 }
813 break;
814
815 default:
816 last_visited_latency_ = kArmTypeConversionFloatingPointIntegerLatency;
817 break;
818 }
819}
820
821void SchedulingLatencyVisitorARM::VisitArmDexCacheArraysBase(art::HArmDexCacheArraysBase*) {
822 last_visited_internal_latency_ = kArmIntegerOpLatency;
823 last_visited_latency_ = kArmIntegerOpLatency;
824}
825
826} // namespace arm
827} // namespace art