blob: af6f91f21d38819557c6bfb37eb4078605ee25f8 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe0b9203e2015-01-22 20:39:27 -080017#include "mir_to_lir-inl.h"
18
Vladimir Marko4bf90122015-10-08 19:07:04 +010019// Mac does not provide endian.h, so we'll use byte order agnostic code.
20#ifndef __APPLE__
Vladimir Markoec7802a2015-10-01 20:57:57 +010021#include <endian.h>
Vladimir Marko4bf90122015-10-08 19:07:04 +010022#endif
Vladimir Markoec7802a2015-10-01 20:57:57 +010023
Vladimir Marko767c7522015-03-20 12:47:30 +000024#include "base/bit_vector-inl.h"
Vladimir Marko35831e82015-09-11 11:59:18 +010025#include "base/stringprintf.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080026#include "dex/mir_graph.h"
27#include "driver/compiler_driver.h"
Yevgeny Roubane3ea8382014-08-08 16:29:38 +070028#include "driver/compiler_options.h"
Andreas Gampe0b9203e2015-01-22 20:39:27 -080029#include "driver/dex_compilation_unit.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030#include "dex_file-inl.h"
31#include "gc_map.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000032#include "gc_map_builder.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070033#include "mapping_table.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000034#include "dex/quick/dex_file_method_inliner.h"
35#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Markoc7f83202014-01-24 17:55:18 +000036#include "dex/verification_results.h"
Vladimir Marko2730db02014-01-27 11:15:17 +000037#include "dex/verified_method.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000038#include "utils/dex_cache_arrays_layout-inl.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070039#include "verifier/dex_gc_map.h"
40#include "verifier/method_verifier.h"
Vladimir Marko2e589aa2014-02-25 17:53:53 +000041#include "vmap_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070042
43namespace art {
44
Vladimir Marko06606b92013-12-02 15:31:08 +000045namespace {
46
47/* Dump a mapping table */
48template <typename It>
49void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
50 const Signature& signature, uint32_t size, It first) {
51 if (size != 0) {
Ian Rogers107c31e2014-01-23 20:55:29 -080052 std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name,
Vladimir Marko06606b92013-12-02 15:31:08 +000053 descriptor, name, signature.ToString().c_str(), size));
54 std::replace(line.begin(), line.end(), ';', '_');
55 LOG(INFO) << line;
56 for (uint32_t i = 0; i != size; ++i) {
57 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
58 ++first;
59 LOG(INFO) << line;
60 }
61 LOG(INFO) <<" };\n\n";
62 }
63}
64
65} // anonymous namespace
66
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070067bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070068 bool res = false;
69 if (rl_src.is_const) {
70 if (rl_src.wide) {
Andreas Gampede0b9962014-08-27 14:24:42 -070071 // For wide registers, check whether we're the high partner. In that case we need to switch
72 // to the lower one for the correct value.
73 if (rl_src.high_word) {
74 rl_src.high_word = false;
75 rl_src.s_reg_low--;
76 rl_src.orig_sreg--;
77 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 if (rl_src.fp) {
Andreas Gampede0b9962014-08-27 14:24:42 -070079 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070080 } else {
Andreas Gampede0b9962014-08-27 14:24:42 -070081 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070082 }
83 } else {
84 if (rl_src.fp) {
Andreas Gampede0b9962014-08-27 14:24:42 -070085 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070086 } else {
Andreas Gampede0b9962014-08-27 14:24:42 -070087 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070088 }
89 }
90 }
91 return res;
92}
93
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070094void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070095 DCHECK(!inst->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010096 inst->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -070097 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010098 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
Vladimir Marko767c7522015-03-20 12:47:30 +000099 DCHECK(current_mir_ != nullptr || (current_dalvik_offset_ == 0 && safepoints_.empty()));
100 safepoints_.emplace_back(safepoint_pc, current_mir_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700101}
102
Andreas Gampe3c12c512014-06-24 18:46:29 +0000103void Mir2Lir::MarkSafepointPCAfter(LIR* after) {
104 DCHECK(!after->flags.use_def_invalid);
105 after->u.m.def_mask = &kEncodeAll;
106 // As NewLIR0 uses Append, we need to create the LIR by hand.
107 LIR* safepoint_pc = RawLIR(current_dalvik_offset_, kPseudoSafepointPC);
108 if (after->next == nullptr) {
109 DCHECK_EQ(after, last_lir_insn_);
110 AppendLIR(safepoint_pc);
111 } else {
112 InsertLIRAfter(after, safepoint_pc);
113 }
114 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
Vladimir Marko767c7522015-03-20 12:47:30 +0000115 DCHECK(current_mir_ != nullptr || (current_dalvik_offset_ == 0 && safepoints_.empty()));
116 safepoints_.emplace_back(safepoint_pc, current_mir_);
Andreas Gampe3c12c512014-06-24 18:46:29 +0000117}
118
buzbee252254b2013-09-08 16:20:53 -0700119/* Remove a LIR from the list. */
120void Mir2Lir::UnlinkLIR(LIR* lir) {
121 if (UNLIKELY(lir == first_lir_insn_)) {
122 first_lir_insn_ = lir->next;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700123 if (lir->next != nullptr) {
124 lir->next->prev = nullptr;
buzbee252254b2013-09-08 16:20:53 -0700125 } else {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700126 DCHECK(lir->next == nullptr);
buzbee252254b2013-09-08 16:20:53 -0700127 DCHECK(lir == last_lir_insn_);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700128 last_lir_insn_ = nullptr;
buzbee252254b2013-09-08 16:20:53 -0700129 }
130 } else if (lir == last_lir_insn_) {
131 last_lir_insn_ = lir->prev;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700132 lir->prev->next = nullptr;
133 } else if ((lir->prev != nullptr) && (lir->next != nullptr)) {
buzbee252254b2013-09-08 16:20:53 -0700134 lir->prev->next = lir->next;
135 lir->next->prev = lir->prev;
136 }
137}
138
Brian Carlstrom7940e442013-07-12 13:46:57 -0700139/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700140void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700141 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700142 if (!cu_->verbose) {
143 UnlinkLIR(lir);
144 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700145}
146
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700147void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700148 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700149 DCHECK(!lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100150 // TODO: Avoid the extra Arena allocation!
151 const ResourceMask** mask_ptr;
152 ResourceMask mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700153 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700154 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700155 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700156 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700157 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100158 mask = **mask_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700159 /* Clear out the memref flags */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100160 mask.ClearBits(kEncodeMem);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700161 /* ..and then add back the one we need */
162 switch (mem_type) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100163 case ResourceMask::kLiteral:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700164 DCHECK(is_load);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100165 mask.SetBit(ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700166 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100167 case ResourceMask::kDalvikReg:
168 mask.SetBit(ResourceMask::kDalvikReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700169 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100170 case ResourceMask::kHeapRef:
171 mask.SetBit(ResourceMask::kHeapRef);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700172 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100173 case ResourceMask::kMustNotAlias:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700174 /* Currently only loads can be marked as kMustNotAlias */
175 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100176 mask.SetBit(ResourceMask::kMustNotAlias);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700177 break;
178 default:
179 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
180 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100181 *mask_ptr = mask_cache_.GetMask(mask);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700182}
183
184/*
185 * Mark load/store instructions that access Dalvik registers through the stack.
186 */
187void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700188 bool is64bit) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100189 DCHECK((is_load ? lir->u.m.use_mask : lir->u.m.def_mask)->Intersection(kEncodeMem).Equals(
190 kEncodeDalvikReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700191
192 /*
193 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
194 * access.
195 */
buzbeeb48819d2013-09-14 16:15:25 -0700196 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700197}
198
199/*
200 * Debugging macros
201 */
202#define DUMP_RESOURCE_MASK(X)
203
204/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700205void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700206 int offset = lir->offset;
207 int dest = lir->operands[0];
208 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
209
210 /* Handle pseudo-ops individually, and all regular insns as a group */
211 switch (lir->opcode) {
David Srbecky6f715892015-03-30 14:21:42 +0100212 case kPseudoPrologueBegin:
213 LOG(INFO) << "-------- PrologueBegin";
Brian Carlstrom7940e442013-07-12 13:46:57 -0700214 break;
David Srbecky6f715892015-03-30 14:21:42 +0100215 case kPseudoPrologueEnd:
216 LOG(INFO) << "-------- PrologueEnd";
217 break;
218 case kPseudoEpilogueBegin:
219 LOG(INFO) << "-------- EpilogueBegin";
220 break;
221 case kPseudoEpilogueEnd:
222 LOG(INFO) << "-------- EpilogueEnd";
Brian Carlstrom7940e442013-07-12 13:46:57 -0700223 break;
224 case kPseudoBarrier:
225 LOG(INFO) << "-------- BARRIER";
226 break;
227 case kPseudoEntryBlock:
228 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
229 break;
230 case kPseudoDalvikByteCodeBoundary:
231 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700232 // NOTE: only used for debug listings.
233 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700234 }
235 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000236 << lir->dalvik_offset << " @ "
Vladimir Markof6737f72015-03-23 17:05:14 +0000237 << UnwrapPointer<char>(lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700238 break;
239 case kPseudoExitBlock:
240 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
241 break;
242 case kPseudoPseudoAlign4:
243 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
244 << offset << "): .align4";
245 break;
246 case kPseudoEHBlockLabel:
247 LOG(INFO) << "Exception_Handling:";
248 break;
249 case kPseudoTargetLabel:
250 case kPseudoNormalBlockLabel:
251 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
252 break;
253 case kPseudoThrowTarget:
254 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
255 break;
256 case kPseudoIntrinsicRetry:
257 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
258 break;
259 case kPseudoSuspendTarget:
260 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
261 break;
262 case kPseudoSafepointPC:
263 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
264 break;
265 case kPseudoExportedPC:
266 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
267 break;
268 case kPseudoCaseLabel:
269 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
270 << std::hex << lir->operands[0] << "|" << std::dec <<
271 lir->operands[0];
272 break;
273 default:
274 if (lir->flags.is_nop && !dump_nop) {
275 break;
276 } else {
277 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
278 lir, base_addr));
279 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
280 lir, base_addr));
David Srbecky6f715892015-03-30 14:21:42 +0100281 LOG(INFO) << StringPrintf("%5p|0x%02x: %-9s%s%s",
Ian Rogers107c31e2014-01-23 20:55:29 -0800282 base_addr + offset,
David Srbecky6f715892015-03-30 14:21:42 +0100283 lir->dalvik_offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700284 op_name.c_str(), op_operands.c_str(),
285 lir->flags.is_nop ? "(nop)" : "");
286 }
287 break;
288 }
289
buzbeeb48819d2013-09-14 16:15:25 -0700290 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100291 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700292 }
buzbeeb48819d2013-09-14 16:15:25 -0700293 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100294 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700295 }
296}
297
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700298void Mir2Lir::DumpPromotionMap() {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700299 uint32_t num_regs = mir_graph_->GetNumOfCodeAndTempVRs();
300 for (uint32_t i = 0; i < num_regs; i++) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700301 PromotionMap v_reg_map = promotion_map_[i];
302 std::string buf;
303 if (v_reg_map.fp_location == kLocPhysReg) {
buzbeeb5860fb2014-06-21 15:31:01 -0700304 StringAppendF(&buf, " : s%d", RegStorage::RegNum(v_reg_map.fp_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700305 }
306
307 std::string buf3;
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700308 if (i < mir_graph_->GetNumOfCodeVRs()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700309 StringAppendF(&buf3, "%02d", i);
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700310 } else if (i == mir_graph_->GetNumOfCodeVRs()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700311 buf3 = "Method*";
312 } else {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700313 uint32_t diff = i - mir_graph_->GetNumOfCodeVRs();
314 StringAppendF(&buf3, "ct%d", diff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700315 }
316
317 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
318 v_reg_map.core_location == kLocPhysReg ?
319 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
320 v_reg_map.core_reg : SRegOffset(i),
321 buf.c_str());
322 }
323}
324
buzbee7a11ab02014-04-28 20:02:38 -0700325void Mir2Lir::UpdateLIROffsets() {
326 // Only used for code listings.
327 size_t offset = 0;
328 for (LIR* lir = first_lir_insn_; lir != nullptr; lir = lir->next) {
329 lir->offset = offset;
330 if (!lir->flags.is_nop && !IsPseudoLirOp(lir->opcode)) {
331 offset += GetInsnSize(lir);
332 } else if (lir->opcode == kPseudoPseudoAlign4) {
333 offset += (offset & 0x2);
334 }
335 }
336}
337
Vladimir Marko743b98c2014-11-24 19:45:41 +0000338void Mir2Lir::MarkGCCard(int opt_flags, RegStorage val_reg, RegStorage tgt_addr_reg) {
Vladimir Markobf535be2014-11-19 18:52:35 +0000339 DCHECK(val_reg.Valid());
340 DCHECK_EQ(val_reg.Is64Bit(), cu_->target64);
Vladimir Marko743b98c2014-11-24 19:45:41 +0000341 if ((opt_flags & MIR_STORE_NON_NULL_VALUE) != 0) {
342 UnconditionallyMarkGCCard(tgt_addr_reg);
343 } else {
344 LIR* branch_over = OpCmpImmBranch(kCondEq, val_reg, 0, nullptr);
345 UnconditionallyMarkGCCard(tgt_addr_reg);
346 LIR* target = NewLIR0(kPseudoTargetLabel);
347 branch_over->target = target;
348 }
Vladimir Markobf535be2014-11-19 18:52:35 +0000349}
350
Brian Carlstrom7940e442013-07-12 13:46:57 -0700351/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700352void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700353 LOG(INFO) << "Dumping LIR insns for "
354 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
355 LIR* lir_insn;
Razvan A Lupusoru75035972014-09-11 15:24:59 -0700356 int insns_size = mir_graph_->GetNumDalvikInsns();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700357
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700358 LOG(INFO) << "Regs (excluding ins) : " << mir_graph_->GetNumOfLocalCodeVRs();
359 LOG(INFO) << "Ins : " << mir_graph_->GetNumOfInVRs();
360 LOG(INFO) << "Outs : " << mir_graph_->GetNumOfOutVRs();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700361 LOG(INFO) << "CoreSpills : " << num_core_spills_;
362 LOG(INFO) << "FPSpills : " << num_fp_spills_;
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800363 LOG(INFO) << "CompilerTemps : " << mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700364 LOG(INFO) << "Frame size : " << frame_size_;
365 LOG(INFO) << "code size is " << total_size_ <<
366 " bytes, Dalvik size is " << insns_size * 2;
367 LOG(INFO) << "expansion factor: "
368 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
369 DumpPromotionMap();
buzbee7a11ab02014-04-28 20:02:38 -0700370 UpdateLIROffsets();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700371 for (lir_insn = first_lir_insn_; lir_insn != nullptr; lir_insn = lir_insn->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700372 DumpLIRInsn(lir_insn, 0);
373 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700374 for (lir_insn = literal_list_; lir_insn != nullptr; lir_insn = lir_insn->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700375 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
376 lir_insn->operands[0]);
377 }
378
379 const DexFile::MethodId& method_id =
380 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700381 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
382 const char* name = cu_->dex_file->GetMethodName(method_id);
383 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700384
385 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000386 if (!encoded_mapping_table_.empty()) {
387 MappingTable table(&encoded_mapping_table_[0]);
388 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
389 table.PcToDexSize(), table.PcToDexBegin());
390 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
391 table.DexToPcSize(), table.DexToPcBegin());
392 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700393}
394
395/*
396 * Search the existing constants in the literal pool for an exact or close match
397 * within specified delta (greater or equal to 0).
398 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700399LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700400 while (data_target) {
401 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
402 return data_target;
403 data_target = data_target->next;
404 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700405 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700406}
407
408/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700409LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700410 bool lo_match = false;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700411 LIR* lo_target = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700412 while (data_target) {
413 if (lo_match && (data_target->operands[0] == val_hi)) {
414 // Record high word in case we need to expand this later.
415 lo_target->operands[1] = val_hi;
416 return lo_target;
417 }
418 lo_match = false;
419 if (data_target->operands[0] == val_lo) {
420 lo_match = true;
421 lo_target = data_target;
422 }
423 data_target = data_target->next;
424 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700425 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426}
427
Vladimir Markoa51a0b02014-05-21 12:08:39 +0100428/* Search the existing constants in the literal pool for an exact method match */
429LIR* Mir2Lir::ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method) {
430 while (data_target) {
431 if (static_cast<uint32_t>(data_target->operands[0]) == method.dex_method_index &&
Vladimir Markof6737f72015-03-23 17:05:14 +0000432 UnwrapPointer<DexFile>(data_target->operands[1]) == method.dex_file) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +0100433 return data_target;
434 }
435 data_target = data_target->next;
436 }
437 return nullptr;
438}
439
Fred Shihe7f82e22014-08-06 10:46:37 -0700440/* Search the existing constants in the literal pool for an exact class match */
441LIR* Mir2Lir::ScanLiteralPoolClass(LIR* data_target, const DexFile& dex_file, uint32_t type_idx) {
442 while (data_target) {
443 if (static_cast<uint32_t>(data_target->operands[0]) == type_idx &&
Vladimir Markof6737f72015-03-23 17:05:14 +0000444 UnwrapPointer<DexFile>(data_target->operands[1]) == &dex_file) {
Fred Shihe7f82e22014-08-06 10:46:37 -0700445 return data_target;
446 }
447 data_target = data_target->next;
448 }
449 return nullptr;
450}
451
Brian Carlstrom7940e442013-07-12 13:46:57 -0700452/*
453 * The following are building blocks to insert constants into the pool or
454 * instruction streams.
455 */
456
457/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700458LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700459 /* Add the constant to the literal pool */
460 if (constant_list_p) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000461 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700462 new_value->operands[0] = value;
463 new_value->next = *constant_list_p;
464 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700465 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700466 return new_value;
467 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700468 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700469}
470
471/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700472LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 AddWordData(constant_list_p, val_hi);
474 return AddWordData(constant_list_p, val_lo);
475}
476
Matteo Franchin27cc0932014-09-08 18:29:24 +0100477/**
478 * @brief Push a compressed reference which needs patching at link/patchoat-time.
479 * @details This needs to be kept consistent with the code which actually does the patching in
480 * oat_writer.cc and in the patchoat tool.
481 */
Vladimir Marko80b96d12015-02-19 15:50:28 +0000482static void PushUnpatchedReference(CodeBuffer* buf) {
Matteo Franchin27cc0932014-09-08 18:29:24 +0100483 // Note that we can safely initialize the patches to zero. The code deduplication mechanism takes
484 // the patches into account when determining whether two pieces of codes are functionally
485 // equivalent.
486 Push32(buf, UINT32_C(0));
buzbee0d829482013-10-11 15:24:55 -0700487}
488
Vladimir Marko80b96d12015-02-19 15:50:28 +0000489static void AlignBuffer(CodeBuffer* buf, size_t offset) {
490 DCHECK_LE(buf->size(), offset);
491 buf->insert(buf->end(), offset - buf->size(), 0u);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700492}
493
494/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700495void Mir2Lir::InstallLiteralPools() {
Vladimir Marko80b96d12015-02-19 15:50:28 +0000496 AlignBuffer(&code_buffer_, data_offset_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700497 LIR* data_lir = literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700498 while (data_lir != nullptr) {
Vladimir Marko80b96d12015-02-19 15:50:28 +0000499 Push32(&code_buffer_, data_lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500 data_lir = NEXT_LIR(data_lir);
501 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100502 // TODO: patches_.reserve() as needed.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700503 // Push code and method literals, record offsets for the compiler to patch.
504 data_lir = code_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700505 while (data_lir != nullptr) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700506 uint32_t target_method_idx = data_lir->operands[0];
Vladimir Markof6737f72015-03-23 17:05:14 +0000507 const DexFile* target_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]);
Vladimir Markof4da6752014-08-01 19:04:18 +0100508 patches_.push_back(LinkerPatch::CodePatch(code_buffer_.size(),
509 target_dex_file, target_method_idx));
Vladimir Marko80b96d12015-02-19 15:50:28 +0000510 PushUnpatchedReference(&code_buffer_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700511 data_lir = NEXT_LIR(data_lir);
512 }
513 data_lir = method_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700514 while (data_lir != nullptr) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700515 uint32_t target_method_idx = data_lir->operands[0];
Vladimir Markof6737f72015-03-23 17:05:14 +0000516 const DexFile* target_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]);
Vladimir Markof4da6752014-08-01 19:04:18 +0100517 patches_.push_back(LinkerPatch::MethodPatch(code_buffer_.size(),
518 target_dex_file, target_method_idx));
Vladimir Marko80b96d12015-02-19 15:50:28 +0000519 PushUnpatchedReference(&code_buffer_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 data_lir = NEXT_LIR(data_lir);
521 }
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800522 // Push class literals.
523 data_lir = class_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700524 while (data_lir != nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +0100525 uint32_t target_type_idx = data_lir->operands[0];
Vladimir Markof6737f72015-03-23 17:05:14 +0000526 const DexFile* class_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]);
Vladimir Markof4da6752014-08-01 19:04:18 +0100527 patches_.push_back(LinkerPatch::TypePatch(code_buffer_.size(),
528 class_dex_file, target_type_idx));
Vladimir Marko80b96d12015-02-19 15:50:28 +0000529 PushUnpatchedReference(&code_buffer_);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800530 data_lir = NEXT_LIR(data_lir);
531 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532}
533
534/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700535void Mir2Lir::InstallSwitchTables() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100536 for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) {
Vladimir Marko80b96d12015-02-19 15:50:28 +0000537 AlignBuffer(&code_buffer_, tab_rec->offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700538 /*
539 * For Arm, our reference point is the address of the bx
540 * instruction that does the launch, so we have to subtract
541 * the auto pc-advance. For other targets the reference point
542 * is a label, so we can use the offset as-is.
543 */
544 int bx_offset = INVALID_OFFSET;
545 switch (cu_->instruction_set) {
546 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700547 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548 bx_offset = tab_rec->anchor->offset + 4;
549 break;
Mark Mendell27dee8b2014-12-01 19:06:12 -0500550 case kX86_64:
551 // RIP relative to switch table.
552 bx_offset = tab_rec->offset;
553 break;
Vladimir Marko1961b602015-04-08 20:51:48 +0100554 case kX86:
Matteo Franchine45fb9e2014-05-06 10:10:30 +0100555 case kArm64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700556 case kMips:
Maja Gagic6ea651f2015-02-24 16:55:04 +0100557 case kMips64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 bx_offset = tab_rec->anchor->offset;
559 break;
560 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
561 }
562 if (cu_->verbose) {
563 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
564 }
565 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800566 DCHECK(tab_rec->switch_mir != nullptr);
567 BasicBlock* bb = mir_graph_->GetBasicBlock(tab_rec->switch_mir->bb);
568 DCHECK(bb != nullptr);
569 int elems = 0;
570 for (SuccessorBlockInfo* successor_block_info : bb->successor_blocks) {
571 int key = successor_block_info->key;
572 int target = successor_block_info->block;
573 LIR* boundary_lir = InsertCaseLabel(target, key);
574 DCHECK(boundary_lir != nullptr);
575 int disp = boundary_lir->offset - bx_offset;
Vladimir Marko80b96d12015-02-19 15:50:28 +0000576 Push32(&code_buffer_, key);
577 Push32(&code_buffer_, disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700578 if (cu_->verbose) {
579 LOG(INFO) << " Case[" << elems << "] key: 0x"
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800580 << std::hex << key << ", disp: 0x"
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 << std::hex << disp;
582 }
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800583 elems++;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700584 }
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800585 DCHECK_EQ(elems, tab_rec->table[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700586 } else {
587 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
588 static_cast<int>(Instruction::kPackedSwitchSignature));
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800589 DCHECK(tab_rec->switch_mir != nullptr);
590 BasicBlock* bb = mir_graph_->GetBasicBlock(tab_rec->switch_mir->bb);
591 DCHECK(bb != nullptr);
592 int elems = 0;
593 int low_key = s4FromSwitchData(&tab_rec->table[2]);
594 for (SuccessorBlockInfo* successor_block_info : bb->successor_blocks) {
595 int key = successor_block_info->key;
596 DCHECK_EQ(elems + low_key, key);
597 int target = successor_block_info->block;
598 LIR* boundary_lir = InsertCaseLabel(target, key);
599 DCHECK(boundary_lir != nullptr);
600 int disp = boundary_lir->offset - bx_offset;
Vladimir Marko80b96d12015-02-19 15:50:28 +0000601 Push32(&code_buffer_, disp);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700602 if (cu_->verbose) {
603 LOG(INFO) << " Case[" << elems << "] disp: 0x"
604 << std::hex << disp;
605 }
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800606 elems++;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700607 }
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800608 DCHECK_EQ(elems, tab_rec->table[1]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700609 }
610 }
611}
612
613/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700614void Mir2Lir::InstallFillArrayData() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100615 for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) {
Vladimir Marko80b96d12015-02-19 15:50:28 +0000616 AlignBuffer(&code_buffer_, tab_rec->offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700617 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700618 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
619 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700620 }
621 }
622}
623
buzbee0d829482013-10-11 15:24:55 -0700624static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700625 for (; lir != nullptr; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700626 lir->offset = offset;
627 offset += 4;
628 }
629 return offset;
630}
631
Ian Rogersff093b32014-04-30 19:04:27 -0700632static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset,
633 unsigned int element_size) {
buzbee0d829482013-10-11 15:24:55 -0700634 // Align to natural pointer size.
Andreas Gampe66018822014-05-05 20:47:19 -0700635 offset = RoundUp(offset, element_size);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700636 for (; lir != nullptr; lir = lir->next) {
buzbee0d829482013-10-11 15:24:55 -0700637 lir->offset = offset;
638 offset += element_size;
639 }
640 return offset;
641}
642
Brian Carlstrom7940e442013-07-12 13:46:57 -0700643// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700644bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000645 MappingTable table(&encoded_mapping_table_[0]);
646 std::vector<uint32_t> dex_pcs;
647 dex_pcs.reserve(table.DexToPcSize());
648 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
649 dex_pcs.push_back(it.DexPc());
650 }
651 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
652 std::sort(dex_pcs.begin(), dex_pcs.end());
653
Brian Carlstrom7940e442013-07-12 13:46:57 -0700654 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000655 auto it = dex_pcs.begin(), end = dex_pcs.end();
656 for (uint32_t dex_pc : mir_graph_->catches_) {
657 while (it != end && *it < dex_pc) {
658 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
659 ++it;
660 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000662 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700663 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
664 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000665 } else {
666 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700667 }
668 }
669 if (!success) {
670 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
671 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000672 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700673 }
674 return success;
675}
676
677
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700678void Mir2Lir::CreateMappingTables() {
David Srbecky8363c772015-05-28 16:12:43 +0100679 bool generate_src_map = cu_->compiler_driver->GetCompilerOptions().GetGenerateDebugInfo();
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700680
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000681 uint32_t pc2dex_data_size = 0u;
682 uint32_t pc2dex_entries = 0u;
683 uint32_t pc2dex_offset = 0u;
684 uint32_t pc2dex_dalvik_offset = 0u;
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700685 uint32_t pc2dex_src_entries = 0u;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000686 uint32_t dex2pc_data_size = 0u;
687 uint32_t dex2pc_entries = 0u;
688 uint32_t dex2pc_offset = 0u;
689 uint32_t dex2pc_dalvik_offset = 0u;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700690 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700691 pc2dex_src_entries++;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700692 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000693 pc2dex_entries += 1;
694 DCHECK(pc2dex_offset <= tgt_lir->offset);
695 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
696 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
697 static_cast<int32_t>(pc2dex_dalvik_offset));
698 pc2dex_offset = tgt_lir->offset;
699 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700700 }
701 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000702 dex2pc_entries += 1;
703 DCHECK(dex2pc_offset <= tgt_lir->offset);
704 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
705 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
706 static_cast<int32_t>(dex2pc_dalvik_offset));
707 dex2pc_offset = tgt_lir->offset;
708 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700709 }
710 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000711
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700712 if (generate_src_map) {
713 src_mapping_table_.reserve(pc2dex_src_entries);
714 }
715
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000716 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
717 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
718 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000719 encoded_mapping_table_.resize(data_size);
720 uint8_t* write_pos = &encoded_mapping_table_[0];
721 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
722 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
723 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
724 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000725
David Srbecky6f715892015-03-30 14:21:42 +0100726 bool is_in_prologue_or_epilogue = false;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000727 pc2dex_offset = 0u;
728 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000729 dex2pc_offset = 0u;
730 dex2pc_dalvik_offset = 0u;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700731 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
David Srbecky6f715892015-03-30 14:21:42 +0100732 if (generate_src_map && !tgt_lir->flags.is_nop && tgt_lir->opcode >= 0) {
733 if (!is_in_prologue_or_epilogue) {
734 src_mapping_table_.push_back(SrcMapElem({tgt_lir->offset,
735 static_cast<int32_t>(tgt_lir->dalvik_offset)}));
736 }
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700737 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000738 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
739 DCHECK(pc2dex_offset <= tgt_lir->offset);
740 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
741 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
742 static_cast<int32_t>(pc2dex_dalvik_offset));
743 pc2dex_offset = tgt_lir->offset;
744 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
745 }
746 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
747 DCHECK(dex2pc_offset <= tgt_lir->offset);
748 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
749 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
750 static_cast<int32_t>(dex2pc_dalvik_offset));
751 dex2pc_offset = tgt_lir->offset;
752 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
753 }
David Srbecky6f715892015-03-30 14:21:42 +0100754 if (tgt_lir->opcode == kPseudoPrologueBegin || tgt_lir->opcode == kPseudoEpilogueBegin) {
755 is_in_prologue_or_epilogue = true;
756 }
757 if (tgt_lir->opcode == kPseudoPrologueEnd || tgt_lir->opcode == kPseudoEpilogueEnd) {
758 is_in_prologue_or_epilogue = false;
759 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000760 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000761 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
762 hdr_data_size + pc2dex_data_size);
763 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000764
Ian Rogers96faf5b2013-08-09 22:05:32 -0700765 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000766 CHECK(VerifyCatchEntries());
767
Ian Rogers96faf5b2013-08-09 22:05:32 -0700768 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000769 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700770 CHECK_EQ(table.TotalSize(), total_entries);
771 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000772 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000773 auto it2 = table.DexToPcBegin();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700774 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000775 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
776 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
777 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
778 ++it;
779 }
780 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
781 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
782 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
783 ++it2;
784 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700785 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000786 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000787 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700788 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789}
790
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko767c7522015-03-20 12:47:30 +0000792 if (UNLIKELY((cu_->disable_opt & (1u << kPromoteRegs)) != 0u)) {
793 // If we're not promoting to physical registers, it's safe to use the verifier's notion of
794 // references. (We disable register promotion when type inference finds a type conflict and
795 // in that the case we defer to the verifier to avoid using the compiler's conflicting info.)
796 CreateNativeGcMapWithoutRegisterPromotion();
797 return;
798 }
799
800 ArenaBitVector* references = new (arena_) ArenaBitVector(arena_, mir_graph_->GetNumSSARegs(),
801 false);
802
803 // Calculate max native offset and max reference vreg.
804 MIR* prev_mir = nullptr;
805 int max_ref_vreg = -1;
806 CodeOffset max_native_offset = 0u;
807 for (const auto& entry : safepoints_) {
808 uint32_t native_offset = entry.first->offset;
809 max_native_offset = std::max(max_native_offset, native_offset);
810 MIR* mir = entry.second;
811 UpdateReferenceVRegs(mir, prev_mir, references);
812 max_ref_vreg = std::max(max_ref_vreg, references->GetHighestBitSet());
813 prev_mir = mir;
814 }
815
Vladimir Marko6e071832015-03-25 11:13:39 +0000816#if defined(BYTE_ORDER) && (BYTE_ORDER == LITTLE_ENDIAN)
817 static constexpr bool kLittleEndian = true;
818#else
819 static constexpr bool kLittleEndian = false;
820#endif
821
Vladimir Marko767c7522015-03-20 12:47:30 +0000822 // Build the GC map.
823 uint32_t reg_width = static_cast<uint32_t>((max_ref_vreg + 8) / 8);
824 GcMapBuilder native_gc_map_builder(&native_gc_map_,
825 safepoints_.size(),
826 max_native_offset, reg_width);
Vladimir Marko6e071832015-03-25 11:13:39 +0000827 if (kLittleEndian) {
828 for (const auto& entry : safepoints_) {
829 uint32_t native_offset = entry.first->offset;
830 MIR* mir = entry.second;
831 UpdateReferenceVRegs(mir, prev_mir, references);
832 // For little-endian, the bytes comprising the bit vector's raw storage are what we need.
833 native_gc_map_builder.AddEntry(native_offset,
834 reinterpret_cast<const uint8_t*>(references->GetRawStorage()));
835 prev_mir = mir;
Vladimir Marko767c7522015-03-20 12:47:30 +0000836 }
Vladimir Marko6e071832015-03-25 11:13:39 +0000837 } else {
838 ArenaVector<uint8_t> references_buffer(arena_->Adapter());
839 references_buffer.resize(reg_width);
840 for (const auto& entry : safepoints_) {
841 uint32_t native_offset = entry.first->offset;
842 MIR* mir = entry.second;
843 UpdateReferenceVRegs(mir, prev_mir, references);
844 // Big-endian or unknown endianness, manually translate the bit vector data.
845 const auto* raw_storage = references->GetRawStorage();
846 for (size_t i = 0; i != reg_width; ++i) {
847 references_buffer[i] = static_cast<uint8_t>(
848 raw_storage[i / sizeof(raw_storage[0])] >> (8u * (i % sizeof(raw_storage[0]))));
849 }
Vladimir Markoec7802a2015-10-01 20:57:57 +0100850 native_gc_map_builder.AddEntry(native_offset, references_buffer.data());
Vladimir Marko6e071832015-03-25 11:13:39 +0000851 prev_mir = mir;
852 }
Vladimir Marko767c7522015-03-20 12:47:30 +0000853 }
854}
855
856void Mir2Lir::CreateNativeGcMapWithoutRegisterPromotion() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000857 DCHECK(!encoded_mapping_table_.empty());
858 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700859 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000860 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
861 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 if (native_offset > max_native_offset) {
863 max_native_offset = native_offset;
864 }
865 }
866 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2730db02014-01-27 11:15:17 +0000867 const std::vector<uint8_t>& gc_map_raw =
868 mir_graph_->GetCurrentDexCompilationUnit()->GetVerifiedMethod()->GetDexGcMap();
869 verifier::DexPcToReferenceMap dex_gc_map(&(gc_map_raw)[0]);
870 DCHECK_EQ(gc_map_raw.size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 // Compute native offset to references size.
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000872 GcMapBuilder native_gc_map_builder(&native_gc_map_,
873 mapping_table.PcToDexSize(),
874 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875
Vladimir Marko06606b92013-12-02 15:31:08 +0000876 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
877 uint32_t native_offset = it.NativePcOffset();
878 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700879 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700880 CHECK(references != nullptr) << "Missing ref for dex pc 0x" << std::hex << dex_pc <<
Dave Allisonf9439142014-03-27 15:10:22 -0700881 ": " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700882 native_gc_map_builder.AddEntry(native_offset, references);
883 }
Mathieu Chartierab972ef2014-12-03 17:38:22 -0800884
885 // Maybe not necessary, but this could help prevent errors where we access the verified method
886 // after it has been deleted.
887 mir_graph_->GetCurrentDexCompilationUnit()->ClearVerifiedMethod();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700888}
889
890/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700891int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700892 offset = AssignLiteralOffsetCommon(literal_list_, offset);
Matteo Franchin27cc0932014-09-08 18:29:24 +0100893 constexpr unsigned int ptr_size = sizeof(uint32_t);
Andreas Gampe785d2f22014-11-03 22:57:30 -0800894 static_assert(ptr_size >= sizeof(mirror::HeapReference<mirror::Object>),
895 "Pointer size cannot hold a heap reference");
Ian Rogersff093b32014-04-30 19:04:27 -0700896 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset, ptr_size);
897 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset, ptr_size);
898 offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset, ptr_size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700899 return offset;
900}
901
buzbee0d829482013-10-11 15:24:55 -0700902int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100903 for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 tab_rec->offset = offset;
905 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
906 offset += tab_rec->table[1] * (sizeof(int) * 2);
907 } else {
908 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
909 static_cast<int>(Instruction::kPackedSwitchSignature));
910 offset += tab_rec->table[1] * sizeof(int);
911 }
912 }
913 return offset;
914}
915
buzbee0d829482013-10-11 15:24:55 -0700916int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100917 for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700918 tab_rec->offset = offset;
919 offset += tab_rec->size;
920 // word align
Andreas Gampe66018822014-05-05 20:47:19 -0700921 offset = RoundUp(offset, 4);
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100922 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700923 return offset;
924}
925
Brian Carlstrom7940e442013-07-12 13:46:57 -0700926/*
927 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700928 * offset vaddr if pretty-printing, otherise use the standard block
929 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700930 * branch table during the assembly phase. All resource flags
931 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932 */
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800933LIR* Mir2Lir::InsertCaseLabel(uint32_t bbid, int keyVal) {
934 LIR* boundary_lir = &block_label_list_[bbid];
buzbeeb48819d2013-09-14 16:15:25 -0700935 LIR* res = boundary_lir;
936 if (cu_->verbose) {
937 // Only pay the expense if we're pretty-printing.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000938 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
Chao-ying Fu72f53af2014-11-11 16:48:40 -0800939 BasicBlock* bb = mir_graph_->GetBasicBlock(bbid);
940 DCHECK(bb != nullptr);
941 new_label->dalvik_offset = bb->start_offset;
buzbeeb48819d2013-09-14 16:15:25 -0700942 new_label->opcode = kPseudoCaseLabel;
943 new_label->operands[0] = keyVal;
944 new_label->flags.fixup = kFixupLabel;
945 DCHECK(!new_label->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100946 new_label->u.m.def_mask = &kEncodeAll;
buzbeeb48819d2013-09-14 16:15:25 -0700947 InsertLIRAfter(boundary_lir, new_label);
buzbeeb48819d2013-09-14 16:15:25 -0700948 }
949 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700950}
951
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700952void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700953 /*
954 * Sparse switch data format:
955 * ushort ident = 0x0200 magic value
956 * ushort size number of entries in the table; > 0
957 * int keys[size] keys, sorted low-to-high; 32-bit aligned
958 * int targets[size] branch targets, relative to switch opcode
959 *
960 * Total size is (2+size*4) 16-bit code units.
961 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700962 uint16_t ident = table[0];
963 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700964 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
965 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700966 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
967 << ", entries: " << std::dec << entries;
968 for (int i = 0; i < entries; i++) {
969 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
970 }
971}
972
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700973void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700974 /*
975 * Packed switch data format:
976 * ushort ident = 0x0100 magic value
977 * ushort size number of entries in the table
978 * int first_key first (and lowest) switch case value
979 * int targets[size] branch targets, relative to switch opcode
980 *
981 * Total size is (4+size*2) 16-bit code units.
982 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700983 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700984 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700985 int entries = table[1];
986 int low_key = s4FromSwitchData(&table[2]);
987 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
988 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
989 for (int i = 0; i < entries; i++) {
990 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
991 << targets[i];
992 }
993}
994
buzbee252254b2013-09-08 16:20:53 -0700995/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100996void Mir2Lir::MarkBoundary(DexOffset offset ATTRIBUTE_UNUSED, const char* inst_str) {
buzbee0d829482013-10-11 15:24:55 -0700997 // NOTE: only used for debug listings.
998 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700999}
1000
Brian Carlstrom7940e442013-07-12 13:46:57 -07001001// Convert relation of src1/src2 to src2/src1
1002ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
1003 ConditionCode res;
1004 switch (before) {
1005 case kCondEq: res = kCondEq; break;
1006 case kCondNe: res = kCondNe; break;
1007 case kCondLt: res = kCondGt; break;
1008 case kCondGt: res = kCondLt; break;
1009 case kCondLe: res = kCondGe; break;
1010 case kCondGe: res = kCondLe; break;
1011 default:
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012 LOG(FATAL) << "Unexpected ccode " << before;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001013 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001014 }
1015 return res;
1016}
1017
Vladimir Markoa1a70742014-03-03 10:28:05 +00001018ConditionCode Mir2Lir::NegateComparison(ConditionCode before) {
1019 ConditionCode res;
1020 switch (before) {
1021 case kCondEq: res = kCondNe; break;
1022 case kCondNe: res = kCondEq; break;
1023 case kCondLt: res = kCondGe; break;
1024 case kCondGt: res = kCondLe; break;
1025 case kCondLe: res = kCondGt; break;
1026 case kCondGe: res = kCondLt; break;
1027 default:
Vladimir Markoa1a70742014-03-03 10:28:05 +00001028 LOG(FATAL) << "Unexpected ccode " << before;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001029 UNREACHABLE();
Vladimir Markoa1a70742014-03-03 10:28:05 +00001030 }
1031 return res;
1032}
1033
Brian Carlstrom7940e442013-07-12 13:46:57 -07001034// TODO: move to mir_to_lir.cc
1035Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
Andreas Gampe9c462082015-01-27 14:31:40 -08001036 : literal_list_(nullptr),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001037 method_literal_list_(nullptr),
1038 class_literal_list_(nullptr),
1039 code_literal_list_(nullptr),
1040 first_fixup_(nullptr),
Andreas Gampe9c462082015-01-27 14:31:40 -08001041 arena_(arena),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001042 cu_(cu),
1043 mir_graph_(mir_graph),
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001044 switch_tables_(arena->Adapter(kArenaAllocSwitchTable)),
1045 fill_array_data_(arena->Adapter(kArenaAllocFillArrayData)),
1046 tempreg_info_(arena->Adapter()),
1047 reginfo_map_(arena->Adapter()),
1048 pointer_storage_(arena->Adapter()),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001049 data_offset_(0),
1050 total_size_(0),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001051 block_label_list_(nullptr),
1052 promotion_map_(nullptr),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001053 current_dalvik_offset_(0),
Vladimir Marko767c7522015-03-20 12:47:30 +00001054 current_mir_(nullptr),
buzbeeb48819d2013-09-14 16:15:25 -07001055 estimated_native_code_size_(0),
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001056 reg_pool_(nullptr),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057 live_sreg_(0),
Vladimir Marko80b96d12015-02-19 15:50:28 +00001058 code_buffer_(mir_graph->GetArena()->Adapter()),
1059 encoded_mapping_table_(mir_graph->GetArena()->Adapter()),
Vladimir Marko8081d2b2014-07-31 15:33:43 +01001060 core_vmap_table_(mir_graph->GetArena()->Adapter()),
1061 fp_vmap_table_(mir_graph->GetArena()->Adapter()),
Vladimir Marko80b96d12015-02-19 15:50:28 +00001062 native_gc_map_(mir_graph->GetArena()->Adapter()),
Vladimir Markof4da6752014-08-01 19:04:18 +01001063 patches_(mir_graph->GetArena()->Adapter()),
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 num_core_spills_(0),
1065 num_fp_spills_(0),
1066 frame_size_(0),
1067 core_spill_mask_(0),
1068 fp_spill_mask_(0),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001069 first_lir_insn_(nullptr),
1070 last_lir_insn_(nullptr),
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001071 slow_paths_(arena->Adapter(kArenaAllocSlowPaths)),
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001072 mem_ref_type_(ResourceMask::kHeapRef),
Serguei Katkov717a3e42014-11-13 17:19:42 +06001073 mask_cache_(arena),
Vladimir Marko767c7522015-03-20 12:47:30 +00001074 safepoints_(arena->Adapter()),
Vladimir Marko20f85592015-03-19 10:07:02 +00001075 dex_cache_arrays_layout_(cu->compiler_driver->GetDexCacheArraysLayout(cu->dex_file)),
Vladimir Markocc234812015-04-07 09:36:09 +01001076 pc_rel_temp_(nullptr),
1077 dex_cache_arrays_min_offset_(std::numeric_limits<uint32_t>::max()),
David Srbecky1109fb32015-04-07 20:21:06 +01001078 cfi_(&last_lir_insn_,
David Srbecky5b1c2ca2016-01-25 17:32:41 +00001079 cu->compiler_driver->GetCompilerOptions().GenerateAnyDebugInfo(),
David Srbecky1109fb32015-04-07 20:21:06 +01001080 arena),
Serguei Katkov717a3e42014-11-13 17:19:42 +06001081 in_to_reg_storage_mapping_(arena) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001082 switch_tables_.reserve(4);
1083 fill_array_data_.reserve(4);
1084 tempreg_info_.reserve(20);
1085 reginfo_map_.reserve(RegStorage::kMaxRegs);
1086 pointer_storage_.reserve(128);
1087 slow_paths_.reserve(32);
Mathieu Chartier2cebb242015-04-21 16:50:40 -07001088 // Reserve pointer id 0 for null.
Vladimir Markof6737f72015-03-23 17:05:14 +00001089 size_t null_idx = WrapPointer<void>(nullptr);
buzbee0d829482013-10-11 15:24:55 -07001090 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001091}
1092
1093void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -07001094 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
1096
1097 /* Allocate Registers using simple local allocation scheme */
1098 SimpleRegAlloc();
1099
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001100 /* First try the custom light codegen for special cases. */
Vladimir Marko5816ed42013-11-27 17:04:20 +00001101 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001102 bool special_worked = cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
Vladimir Marko5816ed42013-11-27 17:04:20 +00001103 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001104
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001105 /* Take normal path for converting MIR to LIR only if the special codegen did not succeed. */
1106 if (special_worked == false) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001107 MethodMIR2LIR();
1108 }
1109
1110 /* Method is not empty */
1111 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001112 /* Convert LIR into machine code. */
1113 AssembleLIR();
1114
buzbeeb01bf152014-05-13 15:59:07 -07001115 if ((cu_->enable_debug & (1 << kDebugCodegenDump)) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001116 CodegenDump();
1117 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001118 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001119}
1120
1121CompiledMethod* Mir2Lir::GetCompiledMethod() {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001122 // Combine vmap tables - core regs, then fp regs - into vmap_table.
Vladimir Markof9f64412015-09-02 14:05:49 +01001123 Leb128EncodingVector<> vmap_encoder;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001124 if (frame_size_ > 0) {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001125 // Prefix the encoded data with its size.
1126 size_t size = core_vmap_table_.size() + 1 /* marker */ + fp_vmap_table_.size();
1127 vmap_encoder.Reserve(size + 1u); // All values are likely to be one byte in ULEB128 (<128).
1128 vmap_encoder.PushBackUnsigned(size);
1129 // Core regs may have been inserted out of order - sort first.
1130 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
1131 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
1132 // Copy, stripping out the phys register sort key.
1133 vmap_encoder.PushBackUnsigned(
Vladimir Marko97a87ec2015-09-29 11:25:48 +01001134 ~(~0u << VREG_NUM_WIDTH) & (core_vmap_table_[i] + VmapTable::kEntryAdjustment));
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001135 }
1136 // Push a marker to take place of lr.
1137 vmap_encoder.PushBackUnsigned(VmapTable::kAdjustedFpMarker);
Serguei Katkovc3801912014-07-08 17:21:53 +07001138 if (cu_->instruction_set == kThumb2) {
1139 // fp regs already sorted.
1140 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
1141 vmap_encoder.PushBackUnsigned(fp_vmap_table_[i] + VmapTable::kEntryAdjustment);
1142 }
1143 } else {
1144 // For other platforms regs may have been inserted out of order - sort first.
1145 std::sort(fp_vmap_table_.begin(), fp_vmap_table_.end());
1146 for (size_t i = 0 ; i < fp_vmap_table_.size(); ++i) {
1147 // Copy, stripping out the phys register sort key.
1148 vmap_encoder.PushBackUnsigned(
Vladimir Marko97a87ec2015-09-29 11:25:48 +01001149 ~(~0u << VREG_NUM_WIDTH) & (fp_vmap_table_[i] + VmapTable::kEntryAdjustment));
Serguei Katkovc3801912014-07-08 17:21:53 +07001150 }
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001151 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001152 } else {
Vladimir Marko81949632014-05-02 11:53:22 +01001153 DCHECK_EQ(POPCOUNT(core_spill_mask_), 0);
1154 DCHECK_EQ(POPCOUNT(fp_spill_mask_), 0);
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001155 DCHECK_EQ(core_vmap_table_.size(), 0u);
1156 DCHECK_EQ(fp_vmap_table_.size(), 0u);
1157 vmap_encoder.PushBackUnsigned(0u); // Size is 0.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001158 }
Mark Mendellae9fd932014-02-10 16:14:35 -08001159
Vladimir Marko58155012015-08-19 12:49:41 +00001160 // Sort patches by literal offset. Required for .oat_patches encoding.
Vladimir Markof4da6752014-08-01 19:04:18 +01001161 std::sort(patches_.begin(), patches_.end(), [](const LinkerPatch& lhs, const LinkerPatch& rhs) {
1162 return lhs.LiteralOffset() < rhs.LiteralOffset();
1163 });
1164
Andreas Gampee21dc3d2014-12-08 16:59:43 -08001165 return CompiledMethod::SwapAllocCompiledMethod(
1166 cu_->compiler_driver, cu_->instruction_set,
1167 ArrayRef<const uint8_t>(code_buffer_),
1168 frame_size_, core_spill_mask_, fp_spill_mask_,
Vladimir Marko35831e82015-09-11 11:59:18 +01001169 ArrayRef<const SrcMapElem>(src_mapping_table_),
Andreas Gampee21dc3d2014-12-08 16:59:43 -08001170 ArrayRef<const uint8_t>(encoded_mapping_table_),
1171 ArrayRef<const uint8_t>(vmap_encoder.GetData()),
1172 ArrayRef<const uint8_t>(native_gc_map_),
David Srbecky1109fb32015-04-07 20:21:06 +01001173 ArrayRef<const uint8_t>(*cfi_.Patch(code_buffer_.size())),
Vladimir Markob207e142015-04-02 21:25:21 +01001174 ArrayRef<const LinkerPatch>(patches_));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001175}
1176
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001177size_t Mir2Lir::GetMaxPossibleCompilerTemps() const {
1178 // Chose a reasonably small value in order to contain stack growth.
1179 // Backends that are smarter about spill region can return larger values.
1180 const size_t max_compiler_temps = 10;
1181 return max_compiler_temps;
1182}
1183
1184size_t Mir2Lir::GetNumBytesForCompilerTempSpillRegion() {
1185 // By default assume that the Mir2Lir will need one slot for each temporary.
1186 // If the backend can better determine temps that have non-overlapping ranges and
1187 // temps that do not need spilled, it can actually provide a small region.
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001188 mir_graph_->CommitCompilerTemps();
1189 return mir_graph_->GetNumBytesForSpecialTemps() + mir_graph_->GetMaximumBytesForNonSpecialTemps();
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001190}
1191
Brian Carlstrom7940e442013-07-12 13:46:57 -07001192int Mir2Lir::ComputeFrameSize() {
1193 /* Figure out the frame size */
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001194 uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set)
1195 + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set)
1196 + sizeof(uint32_t) // Filler.
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001197 + mir_graph_->GetNumOfLocalCodeVRs() * sizeof(uint32_t)
1198 + mir_graph_->GetNumOfOutVRs() * sizeof(uint32_t)
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001199 + GetNumBytesForCompilerTempSpillRegion();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001200 /* Align and set */
Andreas Gampe66018822014-05-05 20:47:19 -07001201 return RoundUp(size, kStackAlignment);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001202}
1203
1204/*
1205 * Append an LIR instruction to the LIR list maintained by a compilation
1206 * unit
1207 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001208void Mir2Lir::AppendLIR(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001209 if (first_lir_insn_ == nullptr) {
1210 DCHECK(last_lir_insn_ == nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001211 last_lir_insn_ = first_lir_insn_ = lir;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001212 lir->prev = lir->next = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001213 } else {
1214 last_lir_insn_->next = lir;
1215 lir->prev = last_lir_insn_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001216 lir->next = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001217 last_lir_insn_ = lir;
1218 }
1219}
1220
1221/*
1222 * Insert an LIR instruction before the current instruction, which cannot be the
1223 * first instruction.
1224 *
1225 * prev_lir <-> new_lir <-> current_lir
1226 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001227void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001228 DCHECK(current_lir->prev != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001229 LIR *prev_lir = current_lir->prev;
1230
1231 prev_lir->next = new_lir;
1232 new_lir->prev = prev_lir;
1233 new_lir->next = current_lir;
1234 current_lir->prev = new_lir;
1235}
1236
1237/*
1238 * Insert an LIR instruction after the current instruction, which cannot be the
Andreas Gampe3c12c512014-06-24 18:46:29 +00001239 * last instruction.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001240 *
1241 * current_lir -> new_lir -> old_next
1242 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001243void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001244 new_lir->prev = current_lir;
1245 new_lir->next = current_lir->next;
1246 current_lir->next = new_lir;
1247 new_lir->next->prev = new_lir;
1248}
1249
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001250bool Mir2Lir::PartiallyIntersects(RegLocation rl_src, RegLocation rl_dest) {
Mark Mendell4708dcd2014-01-22 09:05:18 -08001251 DCHECK(rl_src.wide);
1252 DCHECK(rl_dest.wide);
1253 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) == 1);
1254}
1255
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001256bool Mir2Lir::Intersects(RegLocation rl_src, RegLocation rl_dest) {
1257 DCHECK(rl_src.wide);
1258 DCHECK(rl_dest.wide);
1259 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) <= 1);
1260}
1261
buzbee2700f7e2014-03-07 09:46:20 -08001262LIR *Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
Dave Allison69dfe512014-07-11 17:11:58 +00001263 int offset, int check_value, LIR* target, LIR** compare) {
Mark Mendell766e9292014-01-27 07:55:47 -08001264 // Handle this for architectures that can't compare to memory.
Dave Allison69dfe512014-07-11 17:11:58 +00001265 LIR* inst = Load32Disp(base_reg, offset, temp_reg);
1266 if (compare != nullptr) {
1267 *compare = inst;
1268 }
Mark Mendell766e9292014-01-27 07:55:47 -08001269 LIR* branch = OpCmpImmBranch(cond, temp_reg, check_value, target);
1270 return branch;
1271}
1272
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001273void Mir2Lir::AddSlowPath(LIRSlowPath* slowpath) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001274 slow_paths_.push_back(slowpath);
Serguei Katkov589e0462014-09-05 18:37:22 +07001275 ResetDefTracking();
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001276}
Mark Mendell55d0eac2014-02-06 11:02:52 -08001277
Jeff Hao49161ce2014-03-12 11:05:25 -07001278void Mir2Lir::LoadCodeAddress(const MethodReference& target_method, InvokeType type,
1279 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001280 LIR* data_target = ScanLiteralPoolMethod(code_literal_list_, target_method);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001281 if (data_target == nullptr) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001282 data_target = AddWordData(&code_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001283 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001284 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1285 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1286 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001287 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001288 }
Chao-ying Fua77ee512014-07-01 17:43:41 -07001289 // Loads a code pointer. Code from oat file can be mapped anywhere.
Vladimir Markof6737f72015-03-23 17:05:14 +00001290 OpPcRelLoad(TargetPtrReg(symbolic_reg), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001291 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
Maja Gagic6ea651f2015-02-24 16:55:04 +01001292 DCHECK_NE(cu_->instruction_set, kMips64) << reinterpret_cast<void*>(data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001293}
1294
Jeff Hao49161ce2014-03-12 11:05:25 -07001295void Mir2Lir::LoadMethodAddress(const MethodReference& target_method, InvokeType type,
1296 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001297 LIR* data_target = ScanLiteralPoolMethod(method_literal_list_, target_method);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001298 if (data_target == nullptr) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001299 data_target = AddWordData(&method_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001300 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001301 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1302 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1303 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001304 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001305 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001306 // Loads an ArtMethod pointer, which is not a reference.
1307 OpPcRelLoad(TargetPtrReg(symbolic_reg), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001308 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
Maja Gagic6ea651f2015-02-24 16:55:04 +01001309 DCHECK_NE(cu_->instruction_set, kMips64) << reinterpret_cast<void*>(data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001310}
1311
Fred Shihe7f82e22014-08-06 10:46:37 -07001312void Mir2Lir::LoadClassType(const DexFile& dex_file, uint32_t type_idx,
1313 SpecialTargetRegister symbolic_reg) {
Mark Mendell55d0eac2014-02-06 11:02:52 -08001314 // Use the literal pool and a PC-relative load from a data word.
Fred Shihe7f82e22014-08-06 10:46:37 -07001315 LIR* data_target = ScanLiteralPoolClass(class_literal_list_, dex_file, type_idx);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001316 if (data_target == nullptr) {
1317 data_target = AddWordData(&class_literal_list_, type_idx);
Fred Shih4fc78532014-08-06 16:44:22 -07001318 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(&dex_file));
Mark Mendell55d0eac2014-02-06 11:02:52 -08001319 }
Chao-ying Fua77ee512014-07-01 17:43:41 -07001320 // Loads a Class pointer, which is a reference as it lives in the heap.
Vladimir Markof6737f72015-03-23 17:05:14 +00001321 OpPcRelLoad(TargetReg(symbolic_reg, kRef), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001322}
1323
Vladimir Marko20f85592015-03-19 10:07:02 +00001324bool Mir2Lir::CanUseOpPcRelDexCacheArrayLoad() const {
1325 return false;
1326}
1327
1328void Mir2Lir::OpPcRelDexCacheArrayLoad(const DexFile* dex_file ATTRIBUTE_UNUSED,
1329 int offset ATTRIBUTE_UNUSED,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001330 RegStorage r_dest ATTRIBUTE_UNUSED,
1331 bool wide ATTRIBUTE_UNUSED) {
Vladimir Marko20f85592015-03-19 10:07:02 +00001332 LOG(FATAL) << "No generic implementation.";
1333 UNREACHABLE();
1334}
1335
buzbee2700f7e2014-03-07 09:46:20 -08001336RegLocation Mir2Lir::NarrowRegLoc(RegLocation loc) {
buzbee091cc402014-03-31 10:14:40 -07001337 if (loc.location == kLocPhysReg) {
buzbee85089dd2014-05-25 15:10:52 -07001338 DCHECK(!loc.reg.Is32Bit());
buzbee091cc402014-03-31 10:14:40 -07001339 if (loc.reg.IsPair()) {
buzbee85089dd2014-05-25 15:10:52 -07001340 RegisterInfo* info_lo = GetRegInfo(loc.reg.GetLow());
1341 RegisterInfo* info_hi = GetRegInfo(loc.reg.GetHigh());
1342 info_lo->SetIsWide(false);
1343 info_hi->SetIsWide(false);
1344 loc.reg = info_lo->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001345 } else {
buzbee85089dd2014-05-25 15:10:52 -07001346 RegisterInfo* info = GetRegInfo(loc.reg);
1347 RegisterInfo* info_new = info->FindMatchingView(RegisterInfo::k32SoloStorageMask);
1348 DCHECK(info_new != nullptr);
1349 if (info->IsLive() && (info->SReg() == loc.s_reg_low)) {
1350 info->MarkDead();
1351 info_new->MarkLive(loc.s_reg_low);
1352 }
1353 loc.reg = info_new->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001354 }
buzbee85089dd2014-05-25 15:10:52 -07001355 DCHECK(loc.reg.Valid());
buzbee2700f7e2014-03-07 09:46:20 -08001356 }
buzbee85089dd2014-05-25 15:10:52 -07001357 loc.wide = false;
buzbee2700f7e2014-03-07 09:46:20 -08001358 return loc;
1359}
1360
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001361void Mir2Lir::GenMachineSpecificExtendedMethodMIR(BasicBlock* bb ATTRIBUTE_UNUSED,
1362 MIR* mir ATTRIBUTE_UNUSED) {
Mark Mendelld65c51a2014-04-29 16:55:20 -04001363 LOG(FATAL) << "Unknown MIR opcode not supported on this architecture";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001364 UNREACHABLE();
Mark Mendelld65c51a2014-04-29 16:55:20 -04001365}
1366
Vladimir Marko767c7522015-03-20 12:47:30 +00001367void Mir2Lir::InitReferenceVRegs(BasicBlock* bb, BitVector* references) {
1368 // Mark the references coming from the first predecessor.
1369 DCHECK(bb != nullptr);
1370 DCHECK(bb->block_type == kEntryBlock || !bb->predecessors.empty());
1371 BasicBlock* first_bb =
1372 (bb->block_type == kEntryBlock) ? bb : mir_graph_->GetBasicBlock(bb->predecessors[0]);
1373 DCHECK(first_bb != nullptr);
1374 DCHECK(first_bb->data_flow_info != nullptr);
1375 DCHECK(first_bb->data_flow_info->vreg_to_ssa_map_exit != nullptr);
1376 const int32_t* first_vreg_to_ssa_map = first_bb->data_flow_info->vreg_to_ssa_map_exit;
1377 references->ClearAllBits();
Pavel Vyssotski41f9cc22015-06-16 17:57:37 +06001378 for (uint32_t vreg = 0,
1379 num_vregs = mir_graph_->GetNumOfCodeVRs() + mir_graph_->GetNumUsedCompilerTemps();
1380 vreg != num_vregs; ++vreg) {
Vladimir Marko767c7522015-03-20 12:47:30 +00001381 int32_t sreg = first_vreg_to_ssa_map[vreg];
1382 if (sreg != INVALID_SREG && mir_graph_->reg_location_[sreg].ref &&
1383 !mir_graph_->IsConstantNullRef(mir_graph_->reg_location_[sreg])) {
1384 references->SetBit(vreg);
1385 }
1386 }
1387 // Unmark the references that are merging with a different value.
1388 for (size_t i = 1u, num_pred = bb->predecessors.size(); i < num_pred; ++i) {
1389 BasicBlock* pred_bb = mir_graph_->GetBasicBlock(bb->predecessors[i]);
1390 DCHECK(pred_bb != nullptr);
1391 DCHECK(pred_bb->data_flow_info != nullptr);
1392 DCHECK(pred_bb->data_flow_info->vreg_to_ssa_map_exit != nullptr);
1393 const int32_t* pred_vreg_to_ssa_map = pred_bb->data_flow_info->vreg_to_ssa_map_exit;
1394 for (uint32_t vreg : references->Indexes()) {
1395 if (first_vreg_to_ssa_map[vreg] != pred_vreg_to_ssa_map[vreg]) {
1396 // NOTE: The BitVectorSet::IndexIterator will not check the pointed-to bit again,
1397 // so clearing the bit has no effect on the iterator.
1398 references->ClearBit(vreg);
1399 }
1400 }
1401 }
Vladimir Marko767c7522015-03-20 12:47:30 +00001402}
1403
1404bool Mir2Lir::UpdateReferenceVRegsLocal(MIR* mir, MIR* prev_mir, BitVector* references) {
1405 DCHECK(mir == nullptr || mir->bb == prev_mir->bb);
1406 DCHECK(prev_mir != nullptr);
1407 while (prev_mir != nullptr) {
1408 if (prev_mir == mir) {
1409 return true;
1410 }
1411 const size_t num_defs = prev_mir->ssa_rep->num_defs;
1412 const int32_t* defs = prev_mir->ssa_rep->defs;
1413 if (num_defs == 1u && mir_graph_->reg_location_[defs[0]].ref &&
1414 !mir_graph_->IsConstantNullRef(mir_graph_->reg_location_[defs[0]])) {
1415 references->SetBit(mir_graph_->SRegToVReg(defs[0]));
1416 } else {
1417 for (size_t i = 0u; i != num_defs; ++i) {
1418 references->ClearBit(mir_graph_->SRegToVReg(defs[i]));
1419 }
1420 }
1421 prev_mir = prev_mir->next;
1422 }
1423 return false;
1424}
1425
1426void Mir2Lir::UpdateReferenceVRegs(MIR* mir, MIR* prev_mir, BitVector* references) {
1427 if (mir == nullptr) {
1428 // Safepoint in entry sequence.
1429 InitReferenceVRegs(mir_graph_->GetEntryBlock(), references);
1430 return;
1431 }
1432 if (IsInstructionReturn(mir->dalvikInsn.opcode) ||
1433 mir->dalvikInsn.opcode == Instruction::RETURN_VOID_NO_BARRIER) {
1434 references->ClearAllBits();
1435 if (mir->dalvikInsn.opcode == Instruction::RETURN_OBJECT) {
1436 references->SetBit(mir_graph_->SRegToVReg(mir->ssa_rep->uses[0]));
1437 }
1438 return;
1439 }
1440 if (prev_mir != nullptr && mir->bb == prev_mir->bb &&
1441 UpdateReferenceVRegsLocal(mir, prev_mir, references)) {
1442 return;
1443 }
1444 BasicBlock* bb = mir_graph_->GetBasicBlock(mir->bb);
1445 DCHECK(bb != nullptr);
1446 InitReferenceVRegs(bb, references);
1447 bool success = UpdateReferenceVRegsLocal(mir, bb->first_mir_insn, references);
1448 DCHECK(success) << "MIR @0x" << std::hex << mir->offset << " not in BB#" << std::dec << mir->bb;
1449}
1450
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001451} // namespace art