blob: 12ecfff9358c53591ad049c15537cb437e945f6b [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070020#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "mir_to_lir-inl.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000022#include "dex/quick/dex_file_method_inliner.h"
23#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000024#include "dex/verified_methods_data.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include "verifier/dex_gc_map.h"
26#include "verifier/method_verifier.h"
27
28namespace art {
29
Vladimir Marko06606b92013-12-02 15:31:08 +000030namespace {
31
32/* Dump a mapping table */
33template <typename It>
34void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
35 const Signature& signature, uint32_t size, It first) {
36 if (size != 0) {
Ian Rogers107c31e2014-01-23 20:55:29 -080037 std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name,
Vladimir Marko06606b92013-12-02 15:31:08 +000038 descriptor, name, signature.ToString().c_str(), size));
39 std::replace(line.begin(), line.end(), ';', '_');
40 LOG(INFO) << line;
41 for (uint32_t i = 0; i != size; ++i) {
42 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
43 ++first;
44 LOG(INFO) << line;
45 }
46 LOG(INFO) <<" };\n\n";
47 }
48}
49
50} // anonymous namespace
51
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070052bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070053 bool res = false;
54 if (rl_src.is_const) {
55 if (rl_src.wide) {
56 if (rl_src.fp) {
57 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
58 } else {
59 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
60 }
61 } else {
62 if (rl_src.fp) {
63 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
64 } else {
65 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
66 }
67 }
68 }
69 return res;
70}
71
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070072void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070073 DCHECK(!inst->flags.use_def_invalid);
74 inst->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070075 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
buzbeeb48819d2013-09-14 16:15:25 -070076 DCHECK_EQ(safepoint_pc->u.m.def_mask, ENCODE_ALL);
Brian Carlstrom7940e442013-07-12 13:46:57 -070077}
78
Ian Rogers9b297bf2013-09-06 11:11:25 -070079bool Mir2Lir::FastInstance(uint32_t field_idx, bool is_put, int* field_offset, bool* is_volatile) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070080 return cu_->compiler_driver->ComputeInstanceFieldInfo(
Ian Rogers9b297bf2013-09-06 11:11:25 -070081 field_idx, mir_graph_->GetCurrentDexCompilationUnit(), is_put, field_offset, is_volatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -070082}
83
buzbee252254b2013-09-08 16:20:53 -070084/* Remove a LIR from the list. */
85void Mir2Lir::UnlinkLIR(LIR* lir) {
86 if (UNLIKELY(lir == first_lir_insn_)) {
87 first_lir_insn_ = lir->next;
88 if (lir->next != NULL) {
89 lir->next->prev = NULL;
90 } else {
91 DCHECK(lir->next == NULL);
92 DCHECK(lir == last_lir_insn_);
93 last_lir_insn_ = NULL;
94 }
95 } else if (lir == last_lir_insn_) {
96 last_lir_insn_ = lir->prev;
97 lir->prev->next = NULL;
98 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
99 lir->prev->next = lir->next;
100 lir->next->prev = lir->prev;
101 }
102}
103
Brian Carlstrom7940e442013-07-12 13:46:57 -0700104/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700105void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700106 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700107 if (!cu_->verbose) {
108 UnlinkLIR(lir);
109 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700110}
111
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700112void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700113 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -0700114 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700115 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700116 DCHECK(!lir->flags.use_def_invalid);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700118 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700119 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700120 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700121 }
122 /* Clear out the memref flags */
123 *mask_ptr &= ~mask;
124 /* ..and then add back the one we need */
125 switch (mem_type) {
126 case kLiteral:
127 DCHECK(is_load);
128 *mask_ptr |= ENCODE_LITERAL;
129 break;
130 case kDalvikReg:
131 *mask_ptr |= ENCODE_DALVIK_REG;
132 break;
133 case kHeapRef:
134 *mask_ptr |= ENCODE_HEAP_REF;
135 break;
136 case kMustNotAlias:
137 /* Currently only loads can be marked as kMustNotAlias */
138 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
139 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
140 break;
141 default:
142 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
143 }
144}
145
146/*
147 * Mark load/store instructions that access Dalvik registers through the stack.
148 */
149void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700150 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700151 SetMemRefType(lir, is_load, kDalvikReg);
152
153 /*
154 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
155 * access.
156 */
buzbeeb48819d2013-09-14 16:15:25 -0700157 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700158}
159
160/*
161 * Debugging macros
162 */
163#define DUMP_RESOURCE_MASK(X)
164
165/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700166void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700167 int offset = lir->offset;
168 int dest = lir->operands[0];
169 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
170
171 /* Handle pseudo-ops individually, and all regular insns as a group */
172 switch (lir->opcode) {
173 case kPseudoMethodEntry:
174 LOG(INFO) << "-------- method entry "
175 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
176 break;
177 case kPseudoMethodExit:
178 LOG(INFO) << "-------- Method_Exit";
179 break;
180 case kPseudoBarrier:
181 LOG(INFO) << "-------- BARRIER";
182 break;
183 case kPseudoEntryBlock:
184 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
185 break;
186 case kPseudoDalvikByteCodeBoundary:
187 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700188 // NOTE: only used for debug listings.
189 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700190 }
191 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000192 << lir->dalvik_offset << " @ "
193 << reinterpret_cast<char*>(UnwrapPointer(lir->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194 break;
195 case kPseudoExitBlock:
196 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
197 break;
198 case kPseudoPseudoAlign4:
199 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
200 << offset << "): .align4";
201 break;
202 case kPseudoEHBlockLabel:
203 LOG(INFO) << "Exception_Handling:";
204 break;
205 case kPseudoTargetLabel:
206 case kPseudoNormalBlockLabel:
207 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
208 break;
209 case kPseudoThrowTarget:
210 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
211 break;
212 case kPseudoIntrinsicRetry:
213 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
214 break;
215 case kPseudoSuspendTarget:
216 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
217 break;
218 case kPseudoSafepointPC:
219 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
220 break;
221 case kPseudoExportedPC:
222 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
223 break;
224 case kPseudoCaseLabel:
225 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
226 << std::hex << lir->operands[0] << "|" << std::dec <<
227 lir->operands[0];
228 break;
229 default:
230 if (lir->flags.is_nop && !dump_nop) {
231 break;
232 } else {
233 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
234 lir, base_addr));
235 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
236 lir, base_addr));
Ian Rogers107c31e2014-01-23 20:55:29 -0800237 LOG(INFO) << StringPrintf("%5p: %-9s%s%s",
238 base_addr + offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700239 op_name.c_str(), op_operands.c_str(),
240 lir->flags.is_nop ? "(nop)" : "");
241 }
242 break;
243 }
244
buzbeeb48819d2013-09-14 16:15:25 -0700245 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
246 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700247 }
buzbeeb48819d2013-09-14 16:15:25 -0700248 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
249 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700250 }
251}
252
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700253void Mir2Lir::DumpPromotionMap() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700254 int num_regs = cu_->num_dalvik_registers + cu_->num_compiler_temps + 1;
255 for (int i = 0; i < num_regs; i++) {
256 PromotionMap v_reg_map = promotion_map_[i];
257 std::string buf;
258 if (v_reg_map.fp_location == kLocPhysReg) {
259 StringAppendF(&buf, " : s%d", v_reg_map.FpReg & FpRegMask());
260 }
261
262 std::string buf3;
263 if (i < cu_->num_dalvik_registers) {
264 StringAppendF(&buf3, "%02d", i);
265 } else if (i == mir_graph_->GetMethodSReg()) {
266 buf3 = "Method*";
267 } else {
268 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
269 }
270
271 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
272 v_reg_map.core_location == kLocPhysReg ?
273 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
274 v_reg_map.core_reg : SRegOffset(i),
275 buf.c_str());
276 }
277}
278
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700280void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700281 LOG(INFO) << "Dumping LIR insns for "
282 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
283 LIR* lir_insn;
284 int insns_size = cu_->code_item->insns_size_in_code_units_;
285
286 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
287 LOG(INFO) << "Ins : " << cu_->num_ins;
288 LOG(INFO) << "Outs : " << cu_->num_outs;
289 LOG(INFO) << "CoreSpills : " << num_core_spills_;
290 LOG(INFO) << "FPSpills : " << num_fp_spills_;
291 LOG(INFO) << "CompilerTemps : " << cu_->num_compiler_temps;
292 LOG(INFO) << "Frame size : " << frame_size_;
293 LOG(INFO) << "code size is " << total_size_ <<
294 " bytes, Dalvik size is " << insns_size * 2;
295 LOG(INFO) << "expansion factor: "
296 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
297 DumpPromotionMap();
298 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
299 DumpLIRInsn(lir_insn, 0);
300 }
301 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
302 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
303 lir_insn->operands[0]);
304 }
305
306 const DexFile::MethodId& method_id =
307 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700308 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
309 const char* name = cu_->dex_file->GetMethodName(method_id);
310 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700311
312 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000313 if (!encoded_mapping_table_.empty()) {
314 MappingTable table(&encoded_mapping_table_[0]);
315 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
316 table.PcToDexSize(), table.PcToDexBegin());
317 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
318 table.DexToPcSize(), table.DexToPcBegin());
319 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700320}
321
322/*
323 * Search the existing constants in the literal pool for an exact or close match
324 * within specified delta (greater or equal to 0).
325 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700326LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700327 while (data_target) {
328 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
329 return data_target;
330 data_target = data_target->next;
331 }
332 return NULL;
333}
334
335/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700336LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 bool lo_match = false;
338 LIR* lo_target = NULL;
339 while (data_target) {
340 if (lo_match && (data_target->operands[0] == val_hi)) {
341 // Record high word in case we need to expand this later.
342 lo_target->operands[1] = val_hi;
343 return lo_target;
344 }
345 lo_match = false;
346 if (data_target->operands[0] == val_lo) {
347 lo_match = true;
348 lo_target = data_target;
349 }
350 data_target = data_target->next;
351 }
352 return NULL;
353}
354
355/*
356 * The following are building blocks to insert constants into the pool or
357 * instruction streams.
358 */
359
360/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700361LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362 /* Add the constant to the literal pool */
363 if (constant_list_p) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700364 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700365 new_value->operands[0] = value;
366 new_value->next = *constant_list_p;
367 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700368 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700369 return new_value;
370 }
371 return NULL;
372}
373
374/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700375LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700376 AddWordData(constant_list_p, val_hi);
377 return AddWordData(constant_list_p, val_lo);
378}
379
380static void PushWord(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700381 buf.push_back(data & 0xff);
382 buf.push_back((data >> 8) & 0xff);
383 buf.push_back((data >> 16) & 0xff);
384 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700385}
386
buzbee0d829482013-10-11 15:24:55 -0700387// Push 8 bytes on 64-bit systems; 4 on 32-bit systems.
388static void PushPointer(std::vector<uint8_t>&buf, void const* pointer) {
389 uintptr_t data = reinterpret_cast<uintptr_t>(pointer);
390 if (sizeof(void*) == sizeof(uint64_t)) {
391 PushWord(buf, (data >> (sizeof(void*) * 4)) & 0xFFFFFFFF);
392 PushWord(buf, data & 0xFFFFFFFF);
393 } else {
394 PushWord(buf, data);
395 }
396}
397
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
399 while (buf.size() < offset) {
400 buf.push_back(0);
401 }
402}
403
404/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700405void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700406 AlignBuffer(code_buffer_, data_offset_);
407 LIR* data_lir = literal_list_;
408 while (data_lir != NULL) {
409 PushWord(code_buffer_, data_lir->operands[0]);
410 data_lir = NEXT_LIR(data_lir);
411 }
412 // Push code and method literals, record offsets for the compiler to patch.
413 data_lir = code_literal_list_;
414 while (data_lir != NULL) {
415 uint32_t target = data_lir->operands[0];
416 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700417 cu_->class_def_idx,
418 cu_->method_idx,
419 cu_->invoke_type,
420 target,
421 static_cast<InvokeType>(data_lir->operands[1]),
422 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700423 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700424 // unique value based on target to ensure code deduplication works
425 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426 data_lir = NEXT_LIR(data_lir);
427 }
428 data_lir = method_literal_list_;
429 while (data_lir != NULL) {
430 uint32_t target = data_lir->operands[0];
431 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700432 cu_->class_def_idx,
433 cu_->method_idx,
434 cu_->invoke_type,
435 target,
436 static_cast<InvokeType>(data_lir->operands[1]),
437 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700438 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700439 // unique value based on target to ensure code deduplication works
440 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 data_lir = NEXT_LIR(data_lir);
442 }
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800443 // Push class literals.
444 data_lir = class_literal_list_;
445 while (data_lir != NULL) {
446 uint32_t target = data_lir->operands[0];
447 cu_->compiler_driver->AddClassPatch(cu_->dex_file,
448 cu_->class_def_idx,
449 cu_->method_idx,
450 target,
451 code_buffer_.size());
452 const DexFile::TypeId& id = cu_->dex_file->GetTypeId(target);
453 // unique value based on target to ensure code deduplication works
454 PushPointer(code_buffer_, &id);
455 data_lir = NEXT_LIR(data_lir);
456 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700457}
458
459/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700460void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700461 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
462 while (true) {
463 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
464 if (tab_rec == NULL) break;
465 AlignBuffer(code_buffer_, tab_rec->offset);
466 /*
467 * For Arm, our reference point is the address of the bx
468 * instruction that does the launch, so we have to subtract
469 * the auto pc-advance. For other targets the reference point
470 * is a label, so we can use the offset as-is.
471 */
472 int bx_offset = INVALID_OFFSET;
473 switch (cu_->instruction_set) {
474 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700475 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700476 bx_offset = tab_rec->anchor->offset + 4;
477 break;
478 case kX86:
479 bx_offset = 0;
480 break;
481 case kMips:
482 bx_offset = tab_rec->anchor->offset;
483 break;
484 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
485 }
486 if (cu_->verbose) {
487 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
488 }
489 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700490 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
492 int disp = tab_rec->targets[elems]->offset - bx_offset;
493 if (cu_->verbose) {
494 LOG(INFO) << " Case[" << elems << "] key: 0x"
495 << std::hex << keys[elems] << ", disp: 0x"
496 << std::hex << disp;
497 }
498 PushWord(code_buffer_, keys[elems]);
499 PushWord(code_buffer_,
500 tab_rec->targets[elems]->offset - bx_offset);
501 }
502 } else {
503 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
504 static_cast<int>(Instruction::kPackedSwitchSignature));
505 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
506 int disp = tab_rec->targets[elems]->offset - bx_offset;
507 if (cu_->verbose) {
508 LOG(INFO) << " Case[" << elems << "] disp: 0x"
509 << std::hex << disp;
510 }
511 PushWord(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
512 }
513 }
514 }
515}
516
517/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700518void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
520 while (true) {
521 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
522 if (tab_rec == NULL) break;
523 AlignBuffer(code_buffer_, tab_rec->offset);
524 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700525 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
526 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700527 }
528 }
529}
530
buzbee0d829482013-10-11 15:24:55 -0700531static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700532 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 lir->offset = offset;
534 offset += 4;
535 }
536 return offset;
537}
538
buzbee0d829482013-10-11 15:24:55 -0700539static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset) {
540 unsigned int element_size = sizeof(void*);
541 // Align to natural pointer size.
542 offset = (offset + (element_size - 1)) & ~(element_size - 1);
543 for (; lir != NULL; lir = lir->next) {
544 lir->offset = offset;
545 offset += element_size;
546 }
547 return offset;
548}
549
Brian Carlstrom7940e442013-07-12 13:46:57 -0700550// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700551bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000552 MappingTable table(&encoded_mapping_table_[0]);
553 std::vector<uint32_t> dex_pcs;
554 dex_pcs.reserve(table.DexToPcSize());
555 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
556 dex_pcs.push_back(it.DexPc());
557 }
558 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
559 std::sort(dex_pcs.begin(), dex_pcs.end());
560
Brian Carlstrom7940e442013-07-12 13:46:57 -0700561 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000562 auto it = dex_pcs.begin(), end = dex_pcs.end();
563 for (uint32_t dex_pc : mir_graph_->catches_) {
564 while (it != end && *it < dex_pc) {
565 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
566 ++it;
567 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700568 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000569 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
571 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000572 } else {
573 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700574 }
575 }
576 if (!success) {
577 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
578 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000579 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 }
581 return success;
582}
583
584
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700585void Mir2Lir::CreateMappingTables() {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000586 uint32_t pc2dex_data_size = 0u;
587 uint32_t pc2dex_entries = 0u;
588 uint32_t pc2dex_offset = 0u;
589 uint32_t pc2dex_dalvik_offset = 0u;
590 uint32_t dex2pc_data_size = 0u;
591 uint32_t dex2pc_entries = 0u;
592 uint32_t dex2pc_offset = 0u;
593 uint32_t dex2pc_dalvik_offset = 0u;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700594 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
595 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000596 pc2dex_entries += 1;
597 DCHECK(pc2dex_offset <= tgt_lir->offset);
598 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
599 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
600 static_cast<int32_t>(pc2dex_dalvik_offset));
601 pc2dex_offset = tgt_lir->offset;
602 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700603 }
604 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000605 dex2pc_entries += 1;
606 DCHECK(dex2pc_offset <= tgt_lir->offset);
607 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
608 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
609 static_cast<int32_t>(dex2pc_dalvik_offset));
610 dex2pc_offset = tgt_lir->offset;
611 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700612 }
613 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000614
615 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
616 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
617 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000618 encoded_mapping_table_.resize(data_size);
619 uint8_t* write_pos = &encoded_mapping_table_[0];
620 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
621 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
622 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
623 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000624
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000625 pc2dex_offset = 0u;
626 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000627 dex2pc_offset = 0u;
628 dex2pc_dalvik_offset = 0u;
629 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
630 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
631 DCHECK(pc2dex_offset <= tgt_lir->offset);
632 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
633 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
634 static_cast<int32_t>(pc2dex_dalvik_offset));
635 pc2dex_offset = tgt_lir->offset;
636 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
637 }
638 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
639 DCHECK(dex2pc_offset <= tgt_lir->offset);
640 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
641 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
642 static_cast<int32_t>(dex2pc_dalvik_offset));
643 dex2pc_offset = tgt_lir->offset;
644 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
645 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000646 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000647 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
648 hdr_data_size + pc2dex_data_size);
649 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000650
Ian Rogers96faf5b2013-08-09 22:05:32 -0700651 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000652 CHECK(VerifyCatchEntries());
653
Ian Rogers96faf5b2013-08-09 22:05:32 -0700654 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000655 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700656 CHECK_EQ(table.TotalSize(), total_entries);
657 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000658 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000659 auto it2 = table.DexToPcBegin();
660 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
661 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
662 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
663 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
664 ++it;
665 }
666 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
667 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
668 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
669 ++it2;
670 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700671 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000672 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000673 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700674 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700675}
676
677class NativePcToReferenceMapBuilder {
678 public:
679 NativePcToReferenceMapBuilder(std::vector<uint8_t>* table,
680 size_t entries, uint32_t max_native_offset,
681 size_t references_width) : entries_(entries),
682 references_width_(references_width), in_use_(entries),
683 table_(table) {
684 // Compute width in bytes needed to hold max_native_offset.
685 native_offset_width_ = 0;
686 while (max_native_offset != 0) {
687 native_offset_width_++;
688 max_native_offset >>= 8;
689 }
690 // Resize table and set up header.
691 table->resize((EntryWidth() * entries) + sizeof(uint32_t));
692 CHECK_LT(native_offset_width_, 1U << 3);
693 (*table)[0] = native_offset_width_ & 7;
694 CHECK_LT(references_width_, 1U << 13);
695 (*table)[0] |= (references_width_ << 3) & 0xFF;
696 (*table)[1] = (references_width_ >> 5) & 0xFF;
697 CHECK_LT(entries, 1U << 16);
698 (*table)[2] = entries & 0xFF;
699 (*table)[3] = (entries >> 8) & 0xFF;
700 }
701
702 void AddEntry(uint32_t native_offset, const uint8_t* references) {
703 size_t table_index = TableIndex(native_offset);
704 while (in_use_[table_index]) {
705 table_index = (table_index + 1) % entries_;
706 }
707 in_use_[table_index] = true;
buzbee0d829482013-10-11 15:24:55 -0700708 SetCodeOffset(table_index, native_offset);
709 DCHECK_EQ(native_offset, GetCodeOffset(table_index));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700710 SetReferences(table_index, references);
711 }
712
713 private:
714 size_t TableIndex(uint32_t native_offset) {
715 return NativePcOffsetToReferenceMap::Hash(native_offset) % entries_;
716 }
717
buzbee0d829482013-10-11 15:24:55 -0700718 uint32_t GetCodeOffset(size_t table_index) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 uint32_t native_offset = 0;
720 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
721 for (size_t i = 0; i < native_offset_width_; i++) {
722 native_offset |= (*table_)[table_offset + i] << (i * 8);
723 }
724 return native_offset;
725 }
726
buzbee0d829482013-10-11 15:24:55 -0700727 void SetCodeOffset(size_t table_index, uint32_t native_offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
729 for (size_t i = 0; i < native_offset_width_; i++) {
730 (*table_)[table_offset + i] = (native_offset >> (i * 8)) & 0xFF;
731 }
732 }
733
734 void SetReferences(size_t table_index, const uint8_t* references) {
735 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
736 memcpy(&(*table_)[table_offset + native_offset_width_], references, references_width_);
737 }
738
739 size_t EntryWidth() const {
740 return native_offset_width_ + references_width_;
741 }
742
743 // Number of entries in the table.
744 const size_t entries_;
745 // Number of bytes used to encode the reference bitmap.
746 const size_t references_width_;
747 // Number of bytes used to encode a native offset.
748 size_t native_offset_width_;
749 // Entries that are in use.
750 std::vector<bool> in_use_;
751 // The table we're building.
752 std::vector<uint8_t>* const table_;
753};
754
755void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000756 DCHECK(!encoded_mapping_table_.empty());
757 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700758 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000759 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
760 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700761 if (native_offset > max_native_offset) {
762 max_native_offset = native_offset;
763 }
764 }
765 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2b5eaa22013-12-13 13:59:30 +0000766 const std::vector<uint8_t>* gc_map_raw =
767 cu_->compiler_driver->GetVerifiedMethodsData()->GetDexGcMap(method_ref);
Vladimir Marko8171fc32013-11-26 17:05:58 +0000768 verifier::DexPcToReferenceMap dex_gc_map(&(*gc_map_raw)[0]);
769 DCHECK_EQ(gc_map_raw->size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700770 // Compute native offset to references size.
771 NativePcToReferenceMapBuilder native_gc_map_builder(&native_gc_map_,
Vladimir Marko06606b92013-12-02 15:31:08 +0000772 mapping_table.PcToDexSize(),
773 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700774
Vladimir Marko06606b92013-12-02 15:31:08 +0000775 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
776 uint32_t native_offset = it.NativePcOffset();
777 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700778 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
779 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc;
780 native_gc_map_builder.AddEntry(native_offset, references);
781 }
782}
783
784/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700785int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700786 offset = AssignLiteralOffsetCommon(literal_list_, offset);
buzbee0d829482013-10-11 15:24:55 -0700787 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset);
788 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800789 offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 return offset;
791}
792
buzbee0d829482013-10-11 15:24:55 -0700793int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700794 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
795 while (true) {
buzbee0d829482013-10-11 15:24:55 -0700796 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700797 if (tab_rec == NULL) break;
798 tab_rec->offset = offset;
799 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
800 offset += tab_rec->table[1] * (sizeof(int) * 2);
801 } else {
802 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
803 static_cast<int>(Instruction::kPackedSwitchSignature));
804 offset += tab_rec->table[1] * sizeof(int);
805 }
806 }
807 return offset;
808}
809
buzbee0d829482013-10-11 15:24:55 -0700810int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700811 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
812 while (true) {
813 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
814 if (tab_rec == NULL) break;
815 tab_rec->offset = offset;
816 offset += tab_rec->size;
817 // word align
818 offset = (offset + 3) & ~3;
819 }
820 return offset;
821}
822
Brian Carlstrom7940e442013-07-12 13:46:57 -0700823/*
824 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700825 * offset vaddr if pretty-printing, otherise use the standard block
826 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700827 * branch table during the assembly phase. All resource flags
828 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 */
buzbee0d829482013-10-11 15:24:55 -0700830LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700831 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700832 LIR* res = boundary_lir;
833 if (cu_->verbose) {
834 // Only pay the expense if we're pretty-printing.
835 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
836 new_label->dalvik_offset = vaddr;
837 new_label->opcode = kPseudoCaseLabel;
838 new_label->operands[0] = keyVal;
839 new_label->flags.fixup = kFixupLabel;
840 DCHECK(!new_label->flags.use_def_invalid);
841 new_label->u.m.def_mask = ENCODE_ALL;
842 InsertLIRAfter(boundary_lir, new_label);
843 res = new_label;
844 }
845 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700846}
847
buzbee0d829482013-10-11 15:24:55 -0700848void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700849 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700850 DexOffset base_vaddr = tab_rec->vaddr;
851 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852 int entries = table[1];
853 int low_key = s4FromSwitchData(&table[2]);
854 for (int i = 0; i < entries; i++) {
855 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
856 }
857}
858
buzbee0d829482013-10-11 15:24:55 -0700859void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700860 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700861 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700863 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
864 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700865 for (int i = 0; i < entries; i++) {
866 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
867 }
868}
869
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700870void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
872 while (true) {
873 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
874 if (tab_rec == NULL) break;
875 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
876 MarkPackedCaseLabels(tab_rec);
877 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
878 MarkSparseCaseLabels(tab_rec);
879 } else {
880 LOG(FATAL) << "Invalid switch table";
881 }
882 }
883}
884
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700885void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700886 /*
887 * Sparse switch data format:
888 * ushort ident = 0x0200 magic value
889 * ushort size number of entries in the table; > 0
890 * int keys[size] keys, sorted low-to-high; 32-bit aligned
891 * int targets[size] branch targets, relative to switch opcode
892 *
893 * Total size is (2+size*4) 16-bit code units.
894 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700895 uint16_t ident = table[0];
896 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700897 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
898 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700899 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
900 << ", entries: " << std::dec << entries;
901 for (int i = 0; i < entries; i++) {
902 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
903 }
904}
905
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700906void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700907 /*
908 * Packed switch data format:
909 * ushort ident = 0x0100 magic value
910 * ushort size number of entries in the table
911 * int first_key first (and lowest) switch case value
912 * int targets[size] branch targets, relative to switch opcode
913 *
914 * Total size is (4+size*2) 16-bit code units.
915 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700916 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700917 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 int entries = table[1];
919 int low_key = s4FromSwitchData(&table[2]);
920 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
921 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
922 for (int i = 0; i < entries; i++) {
923 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
924 << targets[i];
925 }
926}
927
buzbee252254b2013-09-08 16:20:53 -0700928/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700929void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
930 // NOTE: only used for debug listings.
931 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932}
933
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700934bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700935 bool is_taken;
936 switch (opcode) {
937 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
938 case Instruction::IF_NE: is_taken = (src1 != src2); break;
939 case Instruction::IF_LT: is_taken = (src1 < src2); break;
940 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
941 case Instruction::IF_GT: is_taken = (src1 > src2); break;
942 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
943 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
944 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
945 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
946 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
947 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
948 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
949 default:
950 LOG(FATAL) << "Unexpected opcode " << opcode;
951 is_taken = false;
952 }
953 return is_taken;
954}
955
956// Convert relation of src1/src2 to src2/src1
957ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
958 ConditionCode res;
959 switch (before) {
960 case kCondEq: res = kCondEq; break;
961 case kCondNe: res = kCondNe; break;
962 case kCondLt: res = kCondGt; break;
963 case kCondGt: res = kCondLt; break;
964 case kCondLe: res = kCondGe; break;
965 case kCondGe: res = kCondLe; break;
966 default:
967 res = static_cast<ConditionCode>(0);
968 LOG(FATAL) << "Unexpected ccode " << before;
969 }
970 return res;
971}
972
973// TODO: move to mir_to_lir.cc
974Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
975 : Backend(arena),
976 literal_list_(NULL),
977 method_literal_list_(NULL),
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800978 class_literal_list_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979 code_literal_list_(NULL),
buzbeeb48819d2013-09-14 16:15:25 -0700980 first_fixup_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700981 cu_(cu),
982 mir_graph_(mir_graph),
983 switch_tables_(arena, 4, kGrowableArraySwitchTables),
984 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
985 throw_launchpads_(arena, 2048, kGrowableArrayThrowLaunchPads),
986 suspend_launchpads_(arena, 4, kGrowableArraySuspendLaunchPads),
987 intrinsic_launchpads_(arena, 2048, kGrowableArrayMisc),
buzbeebd663de2013-09-10 15:41:31 -0700988 tempreg_info_(arena, 20, kGrowableArrayMisc),
989 reginfo_map_(arena, 64, kGrowableArrayMisc),
buzbee0d829482013-10-11 15:24:55 -0700990 pointer_storage_(arena, 128, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700991 data_offset_(0),
992 total_size_(0),
993 block_label_list_(NULL),
994 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700995 estimated_native_code_size_(0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700996 reg_pool_(NULL),
997 live_sreg_(0),
998 num_core_spills_(0),
999 num_fp_spills_(0),
1000 frame_size_(0),
1001 core_spill_mask_(0),
1002 fp_spill_mask_(0),
1003 first_lir_insn_(NULL),
Vladimir Marko5816ed42013-11-27 17:04:20 +00001004 last_lir_insn_(NULL) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001005 promotion_map_ = static_cast<PromotionMap*>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -07001006 (arena_->Alloc((cu_->num_dalvik_registers + cu_->num_compiler_temps + 1) *
1007 sizeof(promotion_map_[0]), ArenaAllocator::kAllocRegAlloc));
buzbee0d829482013-10-11 15:24:55 -07001008 // Reserve pointer id 0 for NULL.
1009 size_t null_idx = WrapPointer(NULL);
1010 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001011}
1012
1013void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -07001014 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001015 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
1016
1017 /* Allocate Registers using simple local allocation scheme */
1018 SimpleRegAlloc();
1019
Vladimir Marko5816ed42013-11-27 17:04:20 +00001020 /*
1021 * Custom codegen for special cases. If for any reason the
Vladimir Marko51154732014-01-02 09:44:23 +00001022 * special codegen doesn't succeed, first_lir_insn_ will be
Vladimir Marko5816ed42013-11-27 17:04:20 +00001023 * set to NULL;
1024 */
1025 // TODO: Clean up GenSpecial() and return true only if special implementation is emitted.
1026 // Currently, GenSpecial() returns IsSpecial() but doesn't check after SpecialMIR2LIR().
1027 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
1028 cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
1029 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001030
1031 /* Convert MIR to LIR, etc. */
1032 if (first_lir_insn_ == NULL) {
1033 MethodMIR2LIR();
1034 }
1035
1036 /* Method is not empty */
1037 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001038 // mark the targets of switch statement case labels
1039 ProcessSwitchTables();
1040
1041 /* Convert LIR into machine code. */
1042 AssembleLIR();
1043
1044 if (cu_->verbose) {
1045 CodegenDump();
1046 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001047 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048}
1049
1050CompiledMethod* Mir2Lir::GetCompiledMethod() {
1051 // Combine vmap tables - core regs, then fp regs - into vmap_table
Ian Rogers96faf5b2013-08-09 22:05:32 -07001052 std::vector<uint16_t> raw_vmap_table;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001053 // Core regs may have been inserted out of order - sort first
1054 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001055 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001056 // Copy, stripping out the phys register sort key
Ian Rogers96faf5b2013-08-09 22:05:32 -07001057 raw_vmap_table.push_back(~(-1 << VREG_NUM_WIDTH) & core_vmap_table_[i]);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001058 }
1059 // If we have a frame, push a marker to take place of lr
1060 if (frame_size_ > 0) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001061 raw_vmap_table.push_back(INVALID_VREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001062 } else {
1063 DCHECK_EQ(__builtin_popcount(core_spill_mask_), 0);
1064 DCHECK_EQ(__builtin_popcount(fp_spill_mask_), 0);
1065 }
1066 // Combine vmap tables - core regs, then fp regs. fp regs already sorted
1067 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001068 raw_vmap_table.push_back(fp_vmap_table_[i]);
1069 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001070 Leb128EncodingVector vmap_encoder;
Ian Rogers96faf5b2013-08-09 22:05:32 -07001071 // Prefix the encoded data with its size.
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001072 vmap_encoder.PushBackUnsigned(raw_vmap_table.size());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001073 for (uint16_t cur : raw_vmap_table) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001074 vmap_encoder.PushBackUnsigned(cur);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001075 }
1076 CompiledMethod* result =
Mathieu Chartier193bad92013-08-29 18:46:00 -07001077 new CompiledMethod(*cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
Vladimir Marko06606b92013-12-02 15:31:08 +00001078 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_,
Mathieu Chartier193bad92013-08-29 18:46:00 -07001079 vmap_encoder.GetData(), native_gc_map_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001080 return result;
1081}
1082
1083int Mir2Lir::ComputeFrameSize() {
1084 /* Figure out the frame size */
1085 static const uint32_t kAlignMask = kStackAlignment - 1;
1086 uint32_t size = (num_core_spills_ + num_fp_spills_ +
1087 1 /* filler word */ + cu_->num_regs + cu_->num_outs +
1088 cu_->num_compiler_temps + 1 /* cur_method* */)
1089 * sizeof(uint32_t);
1090 /* Align and set */
1091 return (size + kAlignMask) & ~(kAlignMask);
1092}
1093
1094/*
1095 * Append an LIR instruction to the LIR list maintained by a compilation
1096 * unit
1097 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001098void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001099 if (first_lir_insn_ == NULL) {
1100 DCHECK(last_lir_insn_ == NULL);
1101 last_lir_insn_ = first_lir_insn_ = lir;
1102 lir->prev = lir->next = NULL;
1103 } else {
1104 last_lir_insn_->next = lir;
1105 lir->prev = last_lir_insn_;
1106 lir->next = NULL;
1107 last_lir_insn_ = lir;
1108 }
1109}
1110
1111/*
1112 * Insert an LIR instruction before the current instruction, which cannot be the
1113 * first instruction.
1114 *
1115 * prev_lir <-> new_lir <-> current_lir
1116 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001117void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001118 DCHECK(current_lir->prev != NULL);
1119 LIR *prev_lir = current_lir->prev;
1120
1121 prev_lir->next = new_lir;
1122 new_lir->prev = prev_lir;
1123 new_lir->next = current_lir;
1124 current_lir->prev = new_lir;
1125}
1126
1127/*
1128 * Insert an LIR instruction after the current instruction, which cannot be the
1129 * first instruction.
1130 *
1131 * current_lir -> new_lir -> old_next
1132 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001133void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134 new_lir->prev = current_lir;
1135 new_lir->next = current_lir->next;
1136 current_lir->next = new_lir;
1137 new_lir->next->prev = new_lir;
1138}
1139
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001140} // namespace art