blob: 6425e1313ff12333111c4f2aacd41f5332189e42 [file] [log] [blame]
Andreas Gampe71fb52f2014-12-29 17:43:08 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_INTRINSICS_H_
18#define ART_COMPILER_OPTIMIZING_INTRINSICS_H_
19
Roland Levillainec525fc2015-04-28 15:50:20 +010020#include "code_generator.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080021#include "nodes.h"
22#include "optimization.h"
Roland Levillainec525fc2015-04-28 15:50:20 +010023#include "parallel_move_resolver.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080024
25namespace art {
26
27class CompilerDriver;
28class DexFile;
29
Anton Kirilova3ffea22016-04-07 17:02:37 +010030// Positive floating-point infinities.
31static constexpr uint32_t kPositiveInfinityFloat = 0x7f800000U;
32static constexpr uint64_t kPositiveInfinityDouble = UINT64_C(0x7ff0000000000000);
33
xueliang.zhongc032e742016-03-28 16:44:32 +010034static constexpr uint32_t kNanFloat = 0x7fc00000U;
35static constexpr uint64_t kNanDouble = 0x7ff8000000000000;
36
Andreas Gampe71fb52f2014-12-29 17:43:08 -080037// Recognize intrinsics from HInvoke nodes.
38class IntrinsicsRecognizer : public HOptimization {
39 public:
Nicolas Geoffray762869d2016-07-15 15:28:35 +010040 IntrinsicsRecognizer(HGraph* graph, OptimizingCompilerStats* stats)
41 : HOptimization(graph, kIntrinsicsRecognizerPassName, stats) {}
Andreas Gampe71fb52f2014-12-29 17:43:08 -080042
43 void Run() OVERRIDE;
44
Andreas Gampe7c3952f2015-02-19 18:21:24 -080045 static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition";
46
Andreas Gampe71fb52f2014-12-29 17:43:08 -080047 private:
Andreas Gampe71fb52f2014-12-29 17:43:08 -080048 DISALLOW_COPY_AND_ASSIGN(IntrinsicsRecognizer);
49};
50
51class IntrinsicVisitor : public ValueObject {
52 public:
53 virtual ~IntrinsicVisitor() {}
54
55 // Dispatch logic.
56
57 void Dispatch(HInvoke* invoke) {
58 switch (invoke->GetIntrinsic()) {
59 case Intrinsics::kNone:
60 return;
Nicolas Geoffray762869d2016-07-15 15:28:35 +010061#define OPTIMIZING_INTRINSICS(Name, ...) \
Aart Bik5d75afe2015-12-14 11:57:01 -080062 case Intrinsics::k ## Name: \
63 Visit ## Name(invoke); \
Andreas Gampe71fb52f2014-12-29 17:43:08 -080064 return;
65#include "intrinsics_list.h"
66INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
67#undef INTRINSICS_LIST
68#undef OPTIMIZING_INTRINSICS
69
70 // Do not put a default case. That way the compiler will complain if we missed a case.
71 }
72 }
73
74 // Define visitor methods.
75
Nicolas Geoffray762869d2016-07-15 15:28:35 +010076#define OPTIMIZING_INTRINSICS(Name, ...) \
Andreas Gampe71fb52f2014-12-29 17:43:08 -080077 virtual void Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
78 }
79#include "intrinsics_list.h"
80INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
81#undef INTRINSICS_LIST
82#undef OPTIMIZING_INTRINSICS
83
Roland Levillainec525fc2015-04-28 15:50:20 +010084 static void MoveArguments(HInvoke* invoke,
85 CodeGenerator* codegen,
86 InvokeDexCallingConventionVisitor* calling_convention_visitor) {
87 if (kIsDebugBuild && invoke->IsInvokeStaticOrDirect()) {
88 HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect();
David Brazdil58282f42016-01-14 12:45:10 +000089 // Explicit clinit checks triggered by static invokes must have been
90 // pruned by art::PrepareForRegisterAllocation.
91 DCHECK(!invoke_static_or_direct->IsStaticWithExplicitClinitCheck());
Roland Levillainec525fc2015-04-28 15:50:20 +010092 }
93
94 if (invoke->GetNumberOfArguments() == 0) {
95 // No argument to move.
96 return;
97 }
98
99 LocationSummary* locations = invoke->GetLocations();
100
101 // We're moving potentially two or more locations to locations that could overlap, so we need
102 // a parallel move resolver.
103 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
104
105 for (size_t i = 0; i < invoke->GetNumberOfArguments(); i++) {
106 HInstruction* input = invoke->InputAt(i);
107 Location cc_loc = calling_convention_visitor->GetNextLocation(input->GetType());
108 Location actual_loc = locations->InAt(i);
109
110 parallel_move.AddMove(actual_loc, cc_loc, input->GetType(), nullptr);
111 }
112
113 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
114 }
115
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800116 protected:
117 IntrinsicVisitor() {}
118
119 private:
120 DISALLOW_COPY_AND_ASSIGN(IntrinsicVisitor);
121};
122
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100123#define GENERIC_OPTIMIZATION(name, bit) \
Nicolas Geoffray12be6622015-10-07 11:52:21 +0100124public: \
125void Set##name() { SetBit(k##name); } \
126bool Get##name() const { return IsBitSet(k##name); } \
127private: \
Roland Levillainebea3d22016-04-12 15:42:57 +0100128static constexpr size_t k##name = bit
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100129
130class IntrinsicOptimizations : public ValueObject {
131 public:
Roland Levillainebea3d22016-04-12 15:42:57 +0100132 explicit IntrinsicOptimizations(HInvoke* invoke)
133 : value_(invoke->GetIntrinsicOptimizations()) {}
Nicolas Geoffray12be6622015-10-07 11:52:21 +0100134 explicit IntrinsicOptimizations(const HInvoke& invoke)
135 : value_(invoke.GetIntrinsicOptimizations()) {}
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100136
137 static constexpr int kNumberOfGenericOptimizations = 2;
138 GENERIC_OPTIMIZATION(DoesNotNeedDexCache, 0);
139 GENERIC_OPTIMIZATION(DoesNotNeedEnvironment, 1);
140
141 protected:
142 bool IsBitSet(uint32_t bit) const {
Roland Levillainebea3d22016-04-12 15:42:57 +0100143 DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100144 return (*value_ & (1 << bit)) != 0u;
145 }
146
147 void SetBit(uint32_t bit) {
Roland Levillainebea3d22016-04-12 15:42:57 +0100148 DCHECK_LT(bit, sizeof(uint32_t) * kBitsPerByte);
149 *(const_cast<uint32_t* const>(value_)) |= (1 << bit);
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100150 }
151
152 private:
Roland Levillainebea3d22016-04-12 15:42:57 +0100153 const uint32_t* const value_;
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100154
155 DISALLOW_COPY_AND_ASSIGN(IntrinsicOptimizations);
156};
157
158#undef GENERIC_OPTIMIZATION
159
160#define INTRINSIC_OPTIMIZATION(name, bit) \
Nicolas Geoffray12be6622015-10-07 11:52:21 +0100161public: \
162void Set##name() { SetBit(k##name); } \
163bool Get##name() const { return IsBitSet(k##name); } \
164private: \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700165static constexpr size_t k##name = (bit) + kNumberOfGenericOptimizations
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100166
167class StringEqualsOptimizations : public IntrinsicOptimizations {
168 public:
Nicolas Geoffray12be6622015-10-07 11:52:21 +0100169 explicit StringEqualsOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100170
171 INTRINSIC_OPTIMIZATION(ArgumentNotNull, 0);
172 INTRINSIC_OPTIMIZATION(ArgumentIsString, 1);
173
174 private:
175 DISALLOW_COPY_AND_ASSIGN(StringEqualsOptimizations);
176};
177
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +0100178class SystemArrayCopyOptimizations : public IntrinsicOptimizations {
179 public:
180 explicit SystemArrayCopyOptimizations(HInvoke* invoke) : IntrinsicOptimizations(invoke) {}
181
182 INTRINSIC_OPTIMIZATION(SourceIsNotNull, 0);
183 INTRINSIC_OPTIMIZATION(DestinationIsNotNull, 1);
184 INTRINSIC_OPTIMIZATION(DestinationIsSource, 2);
185 INTRINSIC_OPTIMIZATION(CountIsSourceLength, 3);
186 INTRINSIC_OPTIMIZATION(CountIsDestinationLength, 4);
187 INTRINSIC_OPTIMIZATION(DoesNotNeedTypeCheck, 5);
188 INTRINSIC_OPTIMIZATION(DestinationIsTypedObjectArray, 6);
189 INTRINSIC_OPTIMIZATION(DestinationIsNonPrimitiveArray, 7);
190 INTRINSIC_OPTIMIZATION(DestinationIsPrimitiveArray, 8);
191 INTRINSIC_OPTIMIZATION(SourceIsNonPrimitiveArray, 9);
192 INTRINSIC_OPTIMIZATION(SourceIsPrimitiveArray, 10);
193
194 private:
195 DISALLOW_COPY_AND_ASSIGN(SystemArrayCopyOptimizations);
196};
197
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +0100198#undef INTRISIC_OPTIMIZATION
199
Aart Bik2f9fcc92016-03-01 15:16:54 -0800200//
201// Macros for use in the intrinsics code generators.
202//
203
204// Defines an unimplemented intrinsic: that is, a method call that is recognized as an
205// intrinsic to exploit e.g. no side-effects or exceptions, but otherwise not handled
206// by this architecture-specific intrinsics code generator. Eventually it is implemented
207// as a true method call.
208#define UNIMPLEMENTED_INTRINSIC(Arch, Name) \
209void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
210} \
211void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
212}
213
214// Defines a list of unreached intrinsics: that is, method calls that are recognized as
215// an intrinsic, and then always converted into HIR instructions before they reach any
216// architecture-specific intrinsics code generator.
217#define UNREACHABLE_INTRINSIC(Arch, Name) \
218void IntrinsicLocationsBuilder ## Arch::Visit ## Name(HInvoke* invoke) { \
219 LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic() \
220 << " should have been converted to HIR"; \
221} \
222void IntrinsicCodeGenerator ## Arch::Visit ## Name(HInvoke* invoke) { \
223 LOG(FATAL) << "Unreachable: intrinsic " << invoke->GetIntrinsic() \
224 << " should have been converted to HIR"; \
225}
226#define UNREACHABLE_INTRINSICS(Arch) \
227UNREACHABLE_INTRINSIC(Arch, FloatFloatToIntBits) \
228UNREACHABLE_INTRINSIC(Arch, DoubleDoubleToLongBits) \
229UNREACHABLE_INTRINSIC(Arch, FloatIsNaN) \
230UNREACHABLE_INTRINSIC(Arch, DoubleIsNaN) \
231UNREACHABLE_INTRINSIC(Arch, IntegerRotateLeft) \
232UNREACHABLE_INTRINSIC(Arch, LongRotateLeft) \
233UNREACHABLE_INTRINSIC(Arch, IntegerRotateRight) \
234UNREACHABLE_INTRINSIC(Arch, LongRotateRight) \
235UNREACHABLE_INTRINSIC(Arch, IntegerCompare) \
236UNREACHABLE_INTRINSIC(Arch, LongCompare) \
237UNREACHABLE_INTRINSIC(Arch, IntegerSignum) \
Aart Bik11932592016-03-08 12:42:25 -0800238UNREACHABLE_INTRINSIC(Arch, LongSignum) \
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100239UNREACHABLE_INTRINSIC(Arch, StringCharAt) \
Vladimir Markodce016e2016-04-28 13:10:02 +0100240UNREACHABLE_INTRINSIC(Arch, StringIsEmpty) \
241UNREACHABLE_INTRINSIC(Arch, StringLength) \
Aart Bik11932592016-03-08 12:42:25 -0800242UNREACHABLE_INTRINSIC(Arch, UnsafeLoadFence) \
243UNREACHABLE_INTRINSIC(Arch, UnsafeStoreFence) \
244UNREACHABLE_INTRINSIC(Arch, UnsafeFullFence)
Aart Bik2f9fcc92016-03-01 15:16:54 -0800245
Vladimir Marko68c981f2016-08-26 13:13:33 +0100246template <typename IntrinsicLocationsBuilder, typename Codegenerator>
247bool IsCallFreeIntrinsic(HInvoke* invoke, Codegenerator* codegen) {
248 if (invoke->GetIntrinsic() != Intrinsics::kNone) {
249 // This invoke may have intrinsic code generation defined. However, we must
250 // now also determine if this code generation is truly there and call-free
251 // (not unimplemented, no bail on instruction features, or call on slow path).
252 // This is done by actually calling the locations builder on the instruction
253 // and clearing out the locations once result is known. We assume this
254 // call only has creating locations as side effects!
255 // TODO: Avoid wasting Arena memory.
256 IntrinsicLocationsBuilder builder(codegen);
257 bool success = builder.TryDispatch(invoke) && !invoke->GetLocations()->CanCall();
258 invoke->SetLocations(nullptr);
259 return success;
260 }
261 return false;
262}
263
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800264} // namespace art
265
266#endif // ART_COMPILER_OPTIMIZING_INTRINSICS_H_