blob: b7380b0a4911228184a93ece5f6271020285d587 [file] [log] [blame]
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <memory>
18#include <vector>
19
20#include "arch/instruction_set.h"
21#include "cfi_test.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010022#include "driver/compiler_options.h"
David Srbeckyc6b4dd82015-04-07 20:32:43 +010023#include "gtest/gtest.h"
24#include "optimizing/code_generator.h"
Nicolas Geoffray0a23d742015-05-07 11:57:35 +010025#include "optimizing/optimizing_unit_test.h"
Andreas Gampe217488a2017-09-18 08:34:42 -070026#include "read_barrier_config.h"
Nicolas Geoffray467d94a2017-03-16 10:24:17 +000027#include "utils/arm/assembler_arm_vixl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070028#include "utils/assembler.h"
Vladimir Marko10ef6942015-10-22 15:25:54 +010029#include "utils/mips/assembler_mips.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070030#include "utils/mips64/assembler_mips64.h"
David Srbeckyc6b4dd82015-04-07 20:32:43 +010031
32#include "optimizing/optimizing_cfi_test_expected.inc"
33
Scott Wakeling90ab6732016-12-08 10:25:03 +000034namespace vixl32 = vixl::aarch32;
35
36using vixl32::r0;
Scott Wakeling90ab6732016-12-08 10:25:03 +000037
David Srbeckyc6b4dd82015-04-07 20:32:43 +010038namespace art {
39
40// Run the tests only on host.
Bilyan Borisovbb661c02016-04-04 16:27:32 +010041#ifndef ART_TARGET_ANDROID
David Srbeckyc6b4dd82015-04-07 20:32:43 +010042
Mathieu Chartiere401d142015-04-22 13:56:20 -070043class OptimizingCFITest : public CFITest {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010044 public:
45 // Enable this flag to generate the expected outputs.
46 static constexpr bool kGenerateExpected = false;
47
Vladimir Marko10ef6942015-10-22 15:25:54 +010048 OptimizingCFITest()
Vladimir Markoca6fff82017-10-03 14:49:14 +010049 : pool_and_allocator_(),
Vladimir Marko10ef6942015-10-22 15:25:54 +010050 opts_(),
51 isa_features_(),
52 graph_(nullptr),
53 code_gen_(),
Vladimir Markoca6fff82017-10-03 14:49:14 +010054 blocks_(GetAllocator()->Adapter()) {}
55
56 ArenaAllocator* GetAllocator() { return pool_and_allocator_.GetAllocator(); }
Vladimir Marko10ef6942015-10-22 15:25:54 +010057
58 void SetUpFrame(InstructionSet isa) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010059 // Setup simple context.
David Srbeckyc6b4dd82015-04-07 20:32:43 +010060 std::string error;
Andreas Gampe0415b4e2015-01-06 15:17:07 -080061 isa_features_ = InstructionSetFeatures::FromVariant(isa, "default", &error);
Vladimir Markoca6fff82017-10-03 14:49:14 +010062 graph_ = CreateGraph(&pool_and_allocator_);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010063 // Generate simple frame with some spills.
Vladimir Markod58b8372016-04-12 18:51:43 +010064 code_gen_ = CodeGenerator::Create(graph_, isa, *isa_features_, opts_);
Vladimir Marko10ef6942015-10-22 15:25:54 +010065 code_gen_->GetAssembler()->cfi().SetEnabled(true);
Vladimir Marko174b2e22017-10-12 13:34:49 +010066 code_gen_->InitializeCodeGenerationData();
David Srbeckyc6b4dd82015-04-07 20:32:43 +010067 const int frame_size = 64;
68 int core_reg = 0;
69 int fp_reg = 0;
70 for (int i = 0; i < 2; i++) { // Two registers of each kind.
71 for (; core_reg < 32; core_reg++) {
Vladimir Marko10ef6942015-10-22 15:25:54 +010072 if (code_gen_->IsCoreCalleeSaveRegister(core_reg)) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010073 auto location = Location::RegisterLocation(core_reg);
Vladimir Marko10ef6942015-10-22 15:25:54 +010074 code_gen_->AddAllocatedRegister(location);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010075 core_reg++;
76 break;
77 }
78 }
79 for (; fp_reg < 32; fp_reg++) {
Vladimir Marko10ef6942015-10-22 15:25:54 +010080 if (code_gen_->IsFloatingPointCalleeSaveRegister(fp_reg)) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +010081 auto location = Location::FpuRegisterLocation(fp_reg);
Vladimir Marko10ef6942015-10-22 15:25:54 +010082 code_gen_->AddAllocatedRegister(location);
David Srbeckyc6b4dd82015-04-07 20:32:43 +010083 fp_reg++;
84 break;
85 }
86 }
87 }
Vladimir Marko10ef6942015-10-22 15:25:54 +010088 code_gen_->block_order_ = &blocks_;
89 code_gen_->ComputeSpillMask();
90 code_gen_->SetFrameSize(frame_size);
91 code_gen_->GenerateFrameEntry();
92 }
93
94 void Finish() {
95 code_gen_->GenerateFrameExit();
96 code_gen_->Finalize(&code_allocator_);
97 }
98
99 void Check(InstructionSet isa,
100 const char* isa_str,
101 const std::vector<uint8_t>& expected_asm,
102 const std::vector<uint8_t>& expected_cfi) {
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100103 // Get the outputs.
Vladimir Marko10ef6942015-10-22 15:25:54 +0100104 const std::vector<uint8_t>& actual_asm = code_allocator_.GetMemory();
105 Assembler* opt_asm = code_gen_->GetAssembler();
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100106 const std::vector<uint8_t>& actual_cfi = *(opt_asm->cfi().data());
107
108 if (kGenerateExpected) {
109 GenerateExpected(stdout, isa, isa_str, actual_asm, actual_cfi);
110 } else {
111 EXPECT_EQ(expected_asm, actual_asm);
112 EXPECT_EQ(expected_cfi, actual_cfi);
113 }
114 }
David Srbecky46325a02015-04-09 22:51:56 +0100115
Vladimir Marko10ef6942015-10-22 15:25:54 +0100116 void TestImpl(InstructionSet isa, const char*
117 isa_str,
118 const std::vector<uint8_t>& expected_asm,
119 const std::vector<uint8_t>& expected_cfi) {
120 SetUpFrame(isa);
121 Finish();
122 Check(isa, isa_str, expected_asm, expected_cfi);
123 }
124
125 CodeGenerator* GetCodeGenerator() {
126 return code_gen_.get();
127 }
128
David Srbecky46325a02015-04-09 22:51:56 +0100129 private:
130 class InternalCodeAllocator : public CodeAllocator {
131 public:
132 InternalCodeAllocator() {}
133
134 virtual uint8_t* Allocate(size_t size) {
135 memory_.resize(size);
136 return memory_.data();
137 }
138
139 const std::vector<uint8_t>& GetMemory() { return memory_; }
140
141 private:
142 std::vector<uint8_t> memory_;
143
144 DISALLOW_COPY_AND_ASSIGN(InternalCodeAllocator);
145 };
Vladimir Marko10ef6942015-10-22 15:25:54 +0100146
Vladimir Markoca6fff82017-10-03 14:49:14 +0100147 ArenaPoolAndAllocator pool_and_allocator_;
Vladimir Marko10ef6942015-10-22 15:25:54 +0100148 CompilerOptions opts_;
149 std::unique_ptr<const InstructionSetFeatures> isa_features_;
150 HGraph* graph_;
151 std::unique_ptr<CodeGenerator> code_gen_;
152 ArenaVector<HBasicBlock*> blocks_;
153 InternalCodeAllocator code_allocator_;
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100154};
155
Vladimir Marko10ef6942015-10-22 15:25:54 +0100156#define TEST_ISA(isa) \
157 TEST_F(OptimizingCFITest, isa) { \
158 std::vector<uint8_t> expected_asm( \
159 expected_asm_##isa, \
160 expected_asm_##isa + arraysize(expected_asm_##isa)); \
161 std::vector<uint8_t> expected_cfi( \
162 expected_cfi_##isa, \
163 expected_cfi_##isa + arraysize(expected_cfi_##isa)); \
164 TestImpl(isa, #isa, expected_asm, expected_cfi); \
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100165 }
166
Scott Wakeling90ab6732016-12-08 10:25:03 +0000167#ifdef ART_ENABLE_CODEGEN_arm
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100168TEST_ISA(kThumb2)
Colin Crossa75b01a2016-08-18 13:45:24 -0700169#endif
Roland Levillainaf24def2017-07-12 13:18:01 +0100170
Colin Crossa75b01a2016-08-18 13:45:24 -0700171#ifdef ART_ENABLE_CODEGEN_arm64
Roland Levillainaf24def2017-07-12 13:18:01 +0100172// Run the tests for ARM64 only with Baker read barriers, as the
173// expected generated code saves and restore X21 and X22 (instead of
174// X20 and X21), as X20 is used as Marking Register in the Baker read
175// barrier configuration, and as such is removed from the set of
176// callee-save registers in the ARM64 code generator of the Optimizing
177// compiler.
178#if defined(USE_READ_BARRIER) && defined(USE_BAKER_READ_BARRIER)
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100179TEST_ISA(kArm64)
Colin Crossa75b01a2016-08-18 13:45:24 -0700180#endif
Roland Levillainaf24def2017-07-12 13:18:01 +0100181#endif
182
Colin Crossa75b01a2016-08-18 13:45:24 -0700183#ifdef ART_ENABLE_CODEGEN_x86
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100184TEST_ISA(kX86)
Colin Crossa75b01a2016-08-18 13:45:24 -0700185#endif
Roland Levillainaf24def2017-07-12 13:18:01 +0100186
Colin Crossa75b01a2016-08-18 13:45:24 -0700187#ifdef ART_ENABLE_CODEGEN_x86_64
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100188TEST_ISA(kX86_64)
Colin Crossa75b01a2016-08-18 13:45:24 -0700189#endif
Roland Levillainaf24def2017-07-12 13:18:01 +0100190
Colin Crossa75b01a2016-08-18 13:45:24 -0700191#ifdef ART_ENABLE_CODEGEN_mips
Vladimir Marko10ef6942015-10-22 15:25:54 +0100192TEST_ISA(kMips)
Colin Crossa75b01a2016-08-18 13:45:24 -0700193#endif
Roland Levillainaf24def2017-07-12 13:18:01 +0100194
Colin Crossa75b01a2016-08-18 13:45:24 -0700195#ifdef ART_ENABLE_CODEGEN_mips64
Vladimir Marko10ef6942015-10-22 15:25:54 +0100196TEST_ISA(kMips64)
Colin Crossa75b01a2016-08-18 13:45:24 -0700197#endif
Vladimir Marko10ef6942015-10-22 15:25:54 +0100198
Scott Wakeling90ab6732016-12-08 10:25:03 +0000199#ifdef ART_ENABLE_CODEGEN_arm
Vladimir Marko10ef6942015-10-22 15:25:54 +0100200TEST_F(OptimizingCFITest, kThumb2Adjust) {
201 std::vector<uint8_t> expected_asm(
202 expected_asm_kThumb2_adjust,
203 expected_asm_kThumb2_adjust + arraysize(expected_asm_kThumb2_adjust));
204 std::vector<uint8_t> expected_cfi(
205 expected_cfi_kThumb2_adjust,
206 expected_cfi_kThumb2_adjust + arraysize(expected_cfi_kThumb2_adjust));
207 SetUpFrame(kThumb2);
Scott Wakeling90ab6732016-12-08 10:25:03 +0000208#define __ down_cast<arm::ArmVIXLAssembler*>(GetCodeGenerator() \
209 ->GetAssembler())->GetVIXLAssembler()->
210 vixl32::Label target;
211 __ CompareAndBranchIfZero(r0, &target);
212 // Push the target out of range of CBZ.
213 for (size_t i = 0; i != 65; ++i) {
214 __ Ldr(r0, vixl32::MemOperand(r0));
215 }
Vladimir Marko10ef6942015-10-22 15:25:54 +0100216 __ Bind(&target);
217#undef __
218 Finish();
219 Check(kThumb2, "kThumb2_adjust", expected_asm, expected_cfi);
220}
Colin Crossa75b01a2016-08-18 13:45:24 -0700221#endif
Vladimir Marko10ef6942015-10-22 15:25:54 +0100222
Colin Crossa75b01a2016-08-18 13:45:24 -0700223#ifdef ART_ENABLE_CODEGEN_mips
Vladimir Marko10ef6942015-10-22 15:25:54 +0100224TEST_F(OptimizingCFITest, kMipsAdjust) {
225 // One NOP in delay slot, 1 << 15 NOPS have size 1 << 17 which exceeds 18-bit signed maximum.
226 static constexpr size_t kNumNops = 1u + (1u << 15);
227 std::vector<uint8_t> expected_asm(
228 expected_asm_kMips_adjust_head,
229 expected_asm_kMips_adjust_head + arraysize(expected_asm_kMips_adjust_head));
230 expected_asm.resize(expected_asm.size() + kNumNops * 4u, 0u);
231 expected_asm.insert(
232 expected_asm.end(),
233 expected_asm_kMips_adjust_tail,
234 expected_asm_kMips_adjust_tail + arraysize(expected_asm_kMips_adjust_tail));
235 std::vector<uint8_t> expected_cfi(
236 expected_cfi_kMips_adjust,
237 expected_cfi_kMips_adjust + arraysize(expected_cfi_kMips_adjust));
238 SetUpFrame(kMips);
239#define __ down_cast<mips::MipsAssembler*>(GetCodeGenerator()->GetAssembler())->
240 mips::MipsLabel target;
241 __ Beqz(mips::A0, &target);
242 // Push the target out of range of BEQZ.
243 for (size_t i = 0; i != kNumNops; ++i) {
244 __ Nop();
245 }
246 __ Bind(&target);
247#undef __
248 Finish();
249 Check(kMips, "kMips_adjust", expected_asm, expected_cfi);
250}
Colin Crossa75b01a2016-08-18 13:45:24 -0700251#endif
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100252
Colin Crossa75b01a2016-08-18 13:45:24 -0700253#ifdef ART_ENABLE_CODEGEN_mips64
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700254TEST_F(OptimizingCFITest, kMips64Adjust) {
255 // One NOP in forbidden slot, 1 << 15 NOPS have size 1 << 17 which exceeds 18-bit signed maximum.
256 static constexpr size_t kNumNops = 1u + (1u << 15);
257 std::vector<uint8_t> expected_asm(
258 expected_asm_kMips64_adjust_head,
259 expected_asm_kMips64_adjust_head + arraysize(expected_asm_kMips64_adjust_head));
260 expected_asm.resize(expected_asm.size() + kNumNops * 4u, 0u);
261 expected_asm.insert(
262 expected_asm.end(),
263 expected_asm_kMips64_adjust_tail,
264 expected_asm_kMips64_adjust_tail + arraysize(expected_asm_kMips64_adjust_tail));
265 std::vector<uint8_t> expected_cfi(
266 expected_cfi_kMips64_adjust,
267 expected_cfi_kMips64_adjust + arraysize(expected_cfi_kMips64_adjust));
268 SetUpFrame(kMips64);
269#define __ down_cast<mips64::Mips64Assembler*>(GetCodeGenerator()->GetAssembler())->
270 mips64::Mips64Label target;
271 __ Beqc(mips64::A1, mips64::A2, &target);
272 // Push the target out of range of BEQC.
273 for (size_t i = 0; i != kNumNops; ++i) {
274 __ Nop();
275 }
276 __ Bind(&target);
277#undef __
278 Finish();
279 Check(kMips64, "kMips64_adjust", expected_asm, expected_cfi);
280}
Colin Crossa75b01a2016-08-18 13:45:24 -0700281#endif
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700282
Bilyan Borisovbb661c02016-04-04 16:27:32 +0100283#endif // ART_TARGET_ANDROID
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100284
285} // namespace art