blob: 5ff73800de921fa6775ba744d3f7b390feb67264 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000020#include "gc_map_builder.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070021#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022#include "mir_to_lir-inl.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000023#include "dex/quick/dex_file_method_inliner.h"
24#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Markoc7f83202014-01-24 17:55:18 +000025#include "dex/verification_results.h"
Vladimir Marko2730db02014-01-27 11:15:17 +000026#include "dex/verified_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "verifier/dex_gc_map.h"
28#include "verifier/method_verifier.h"
Vladimir Marko2e589aa2014-02-25 17:53:53 +000029#include "vmap_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030
31namespace art {
32
Vladimir Marko06606b92013-12-02 15:31:08 +000033namespace {
34
35/* Dump a mapping table */
36template <typename It>
37void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
38 const Signature& signature, uint32_t size, It first) {
39 if (size != 0) {
Ian Rogers107c31e2014-01-23 20:55:29 -080040 std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name,
Vladimir Marko06606b92013-12-02 15:31:08 +000041 descriptor, name, signature.ToString().c_str(), size));
42 std::replace(line.begin(), line.end(), ';', '_');
43 LOG(INFO) << line;
44 for (uint32_t i = 0; i != size; ++i) {
45 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
46 ++first;
47 LOG(INFO) << line;
48 }
49 LOG(INFO) <<" };\n\n";
50 }
51}
52
53} // anonymous namespace
54
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070055bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070056 bool res = false;
57 if (rl_src.is_const) {
58 if (rl_src.wide) {
59 if (rl_src.fp) {
60 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
61 } else {
62 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
63 }
64 } else {
65 if (rl_src.fp) {
66 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
67 } else {
68 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
69 }
70 }
71 }
72 return res;
73}
74
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070075void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070076 DCHECK(!inst->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010077 inst->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010079 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
Brian Carlstrom7940e442013-07-12 13:46:57 -070080}
81
Andreas Gampe2689fba2014-06-23 13:23:04 -070082void Mir2Lir::MarkSafepointPCAfter(LIR* after) {
83 DCHECK(!after->flags.use_def_invalid);
84 after->u.m.def_mask = &kEncodeAll;
85 // As NewLIR0 uses Append, we need to create the LIR by hand.
86 LIR* safepoint_pc = RawLIR(current_dalvik_offset_, kPseudoSafepointPC);
87 InsertLIRAfter(after, safepoint_pc);
88 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
89}
90
buzbee252254b2013-09-08 16:20:53 -070091/* Remove a LIR from the list. */
92void Mir2Lir::UnlinkLIR(LIR* lir) {
93 if (UNLIKELY(lir == first_lir_insn_)) {
94 first_lir_insn_ = lir->next;
95 if (lir->next != NULL) {
96 lir->next->prev = NULL;
97 } else {
98 DCHECK(lir->next == NULL);
99 DCHECK(lir == last_lir_insn_);
100 last_lir_insn_ = NULL;
101 }
102 } else if (lir == last_lir_insn_) {
103 last_lir_insn_ = lir->prev;
104 lir->prev->next = NULL;
105 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
106 lir->prev->next = lir->next;
107 lir->next->prev = lir->prev;
108 }
109}
110
Brian Carlstrom7940e442013-07-12 13:46:57 -0700111/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700112void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700113 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700114 if (!cu_->verbose) {
115 UnlinkLIR(lir);
116 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117}
118
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700119void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700120 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700121 DCHECK(!lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100122 // TODO: Avoid the extra Arena allocation!
123 const ResourceMask** mask_ptr;
124 ResourceMask mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700125 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700126 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700127 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700128 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700129 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100130 mask = **mask_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700131 /* Clear out the memref flags */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100132 mask.ClearBits(kEncodeMem);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700133 /* ..and then add back the one we need */
134 switch (mem_type) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100135 case ResourceMask::kLiteral:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700136 DCHECK(is_load);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100137 mask.SetBit(ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100139 case ResourceMask::kDalvikReg:
140 mask.SetBit(ResourceMask::kDalvikReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700141 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100142 case ResourceMask::kHeapRef:
143 mask.SetBit(ResourceMask::kHeapRef);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700144 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100145 case ResourceMask::kMustNotAlias:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700146 /* Currently only loads can be marked as kMustNotAlias */
147 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100148 mask.SetBit(ResourceMask::kMustNotAlias);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700149 break;
150 default:
151 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
152 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100153 *mask_ptr = mask_cache_.GetMask(mask);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700154}
155
156/*
157 * Mark load/store instructions that access Dalvik registers through the stack.
158 */
159void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700160 bool is64bit) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100161 DCHECK((is_load ? lir->u.m.use_mask : lir->u.m.def_mask)->Intersection(kEncodeMem).Equals(
162 kEncodeDalvikReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700163
164 /*
165 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
166 * access.
167 */
buzbeeb48819d2013-09-14 16:15:25 -0700168 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700169}
170
171/*
172 * Debugging macros
173 */
174#define DUMP_RESOURCE_MASK(X)
175
176/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700177void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700178 int offset = lir->offset;
179 int dest = lir->operands[0];
180 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
181
182 /* Handle pseudo-ops individually, and all regular insns as a group */
183 switch (lir->opcode) {
184 case kPseudoMethodEntry:
185 LOG(INFO) << "-------- method entry "
186 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
187 break;
188 case kPseudoMethodExit:
189 LOG(INFO) << "-------- Method_Exit";
190 break;
191 case kPseudoBarrier:
192 LOG(INFO) << "-------- BARRIER";
193 break;
194 case kPseudoEntryBlock:
195 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
196 break;
197 case kPseudoDalvikByteCodeBoundary:
198 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700199 // NOTE: only used for debug listings.
200 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700201 }
202 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000203 << lir->dalvik_offset << " @ "
204 << reinterpret_cast<char*>(UnwrapPointer(lir->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700205 break;
206 case kPseudoExitBlock:
207 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
208 break;
209 case kPseudoPseudoAlign4:
210 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
211 << offset << "): .align4";
212 break;
213 case kPseudoEHBlockLabel:
214 LOG(INFO) << "Exception_Handling:";
215 break;
216 case kPseudoTargetLabel:
217 case kPseudoNormalBlockLabel:
218 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
219 break;
220 case kPseudoThrowTarget:
221 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
222 break;
223 case kPseudoIntrinsicRetry:
224 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
225 break;
226 case kPseudoSuspendTarget:
227 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
228 break;
229 case kPseudoSafepointPC:
230 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
231 break;
232 case kPseudoExportedPC:
233 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
234 break;
235 case kPseudoCaseLabel:
236 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
237 << std::hex << lir->operands[0] << "|" << std::dec <<
238 lir->operands[0];
239 break;
240 default:
241 if (lir->flags.is_nop && !dump_nop) {
242 break;
243 } else {
244 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
245 lir, base_addr));
246 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
247 lir, base_addr));
Ian Rogers107c31e2014-01-23 20:55:29 -0800248 LOG(INFO) << StringPrintf("%5p: %-9s%s%s",
249 base_addr + offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700250 op_name.c_str(), op_operands.c_str(),
251 lir->flags.is_nop ? "(nop)" : "");
252 }
253 break;
254 }
255
buzbeeb48819d2013-09-14 16:15:25 -0700256 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100257 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700258 }
buzbeeb48819d2013-09-14 16:15:25 -0700259 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100260 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700261 }
262}
263
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700264void Mir2Lir::DumpPromotionMap() {
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800265 int num_regs = cu_->num_dalvik_registers + mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700266 for (int i = 0; i < num_regs; i++) {
267 PromotionMap v_reg_map = promotion_map_[i];
268 std::string buf;
269 if (v_reg_map.fp_location == kLocPhysReg) {
buzbee091cc402014-03-31 10:14:40 -0700270 StringAppendF(&buf, " : s%d", RegStorage::RegNum(v_reg_map.FpReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700271 }
272
273 std::string buf3;
274 if (i < cu_->num_dalvik_registers) {
275 StringAppendF(&buf3, "%02d", i);
276 } else if (i == mir_graph_->GetMethodSReg()) {
277 buf3 = "Method*";
278 } else {
279 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
280 }
281
282 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
283 v_reg_map.core_location == kLocPhysReg ?
284 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
285 v_reg_map.core_reg : SRegOffset(i),
286 buf.c_str());
287 }
288}
289
buzbee7a11ab02014-04-28 20:02:38 -0700290void Mir2Lir::UpdateLIROffsets() {
291 // Only used for code listings.
292 size_t offset = 0;
293 for (LIR* lir = first_lir_insn_; lir != nullptr; lir = lir->next) {
294 lir->offset = offset;
295 if (!lir->flags.is_nop && !IsPseudoLirOp(lir->opcode)) {
296 offset += GetInsnSize(lir);
297 } else if (lir->opcode == kPseudoPseudoAlign4) {
298 offset += (offset & 0x2);
299 }
300 }
301}
302
Brian Carlstrom7940e442013-07-12 13:46:57 -0700303/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700304void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700305 LOG(INFO) << "Dumping LIR insns for "
306 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
307 LIR* lir_insn;
308 int insns_size = cu_->code_item->insns_size_in_code_units_;
309
310 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
311 LOG(INFO) << "Ins : " << cu_->num_ins;
312 LOG(INFO) << "Outs : " << cu_->num_outs;
313 LOG(INFO) << "CoreSpills : " << num_core_spills_;
314 LOG(INFO) << "FPSpills : " << num_fp_spills_;
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800315 LOG(INFO) << "CompilerTemps : " << mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700316 LOG(INFO) << "Frame size : " << frame_size_;
317 LOG(INFO) << "code size is " << total_size_ <<
318 " bytes, Dalvik size is " << insns_size * 2;
319 LOG(INFO) << "expansion factor: "
320 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
321 DumpPromotionMap();
buzbee7a11ab02014-04-28 20:02:38 -0700322 UpdateLIROffsets();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
324 DumpLIRInsn(lir_insn, 0);
325 }
326 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
327 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
328 lir_insn->operands[0]);
329 }
330
331 const DexFile::MethodId& method_id =
332 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700333 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
334 const char* name = cu_->dex_file->GetMethodName(method_id);
335 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700336
337 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000338 if (!encoded_mapping_table_.empty()) {
339 MappingTable table(&encoded_mapping_table_[0]);
340 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
341 table.PcToDexSize(), table.PcToDexBegin());
342 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
343 table.DexToPcSize(), table.DexToPcBegin());
344 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700345}
346
347/*
348 * Search the existing constants in the literal pool for an exact or close match
349 * within specified delta (greater or equal to 0).
350 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700351LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700352 while (data_target) {
353 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
354 return data_target;
355 data_target = data_target->next;
356 }
357 return NULL;
358}
359
360/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700361LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362 bool lo_match = false;
363 LIR* lo_target = NULL;
364 while (data_target) {
365 if (lo_match && (data_target->operands[0] == val_hi)) {
366 // Record high word in case we need to expand this later.
367 lo_target->operands[1] = val_hi;
368 return lo_target;
369 }
370 lo_match = false;
371 if (data_target->operands[0] == val_lo) {
372 lo_match = true;
373 lo_target = data_target;
374 }
375 data_target = data_target->next;
376 }
377 return NULL;
378}
379
Vladimir Markoa51a0b02014-05-21 12:08:39 +0100380/* Search the existing constants in the literal pool for an exact method match */
381LIR* Mir2Lir::ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method) {
382 while (data_target) {
383 if (static_cast<uint32_t>(data_target->operands[0]) == method.dex_method_index &&
384 UnwrapPointer(data_target->operands[1]) == method.dex_file) {
385 return data_target;
386 }
387 data_target = data_target->next;
388 }
389 return nullptr;
390}
391
Brian Carlstrom7940e442013-07-12 13:46:57 -0700392/*
393 * The following are building blocks to insert constants into the pool or
394 * instruction streams.
395 */
396
397/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700398LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700399 /* Add the constant to the literal pool */
400 if (constant_list_p) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000401 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700402 new_value->operands[0] = value;
403 new_value->next = *constant_list_p;
404 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700405 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700406 return new_value;
407 }
408 return NULL;
409}
410
411/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700412LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700413 AddWordData(constant_list_p, val_hi);
414 return AddWordData(constant_list_p, val_lo);
415}
416
Andreas Gampe2da88232014-02-27 12:26:20 -0800417static void Push32(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700418 buf.push_back(data & 0xff);
419 buf.push_back((data >> 8) & 0xff);
420 buf.push_back((data >> 16) & 0xff);
421 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700422}
423
Andreas Gampe2da88232014-02-27 12:26:20 -0800424// Push 8 bytes on 64-bit target systems; 4 on 32-bit target systems.
425static void PushPointer(std::vector<uint8_t>&buf, const void* pointer, bool target64) {
426 uint64_t data = reinterpret_cast<uintptr_t>(pointer);
427 if (target64) {
428 Push32(buf, data & 0xFFFFFFFF);
429 Push32(buf, (data >> 32) & 0xFFFFFFFF);
buzbee0d829482013-10-11 15:24:55 -0700430 } else {
Andreas Gampe2da88232014-02-27 12:26:20 -0800431 Push32(buf, static_cast<uint32_t>(data));
buzbee0d829482013-10-11 15:24:55 -0700432 }
433}
434
Brian Carlstrom7940e442013-07-12 13:46:57 -0700435static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
436 while (buf.size() < offset) {
437 buf.push_back(0);
438 }
439}
440
441/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700442void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700443 AlignBuffer(code_buffer_, data_offset_);
444 LIR* data_lir = literal_list_;
445 while (data_lir != NULL) {
Andreas Gampe2da88232014-02-27 12:26:20 -0800446 Push32(code_buffer_, data_lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700447 data_lir = NEXT_LIR(data_lir);
448 }
449 // Push code and method literals, record offsets for the compiler to patch.
450 data_lir = code_literal_list_;
451 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700452 uint32_t target_method_idx = data_lir->operands[0];
453 const DexFile* target_dex_file =
454 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700455 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700456 cu_->class_def_idx,
457 cu_->method_idx,
458 cu_->invoke_type,
Jeff Hao49161ce2014-03-12 11:05:25 -0700459 target_method_idx,
460 target_dex_file,
461 static_cast<InvokeType>(data_lir->operands[2]),
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700462 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700463 const DexFile::MethodId& target_method_id = target_dex_file->GetMethodId(target_method_idx);
buzbee0d829482013-10-11 15:24:55 -0700464 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700465 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700466 data_lir = NEXT_LIR(data_lir);
467 }
468 data_lir = method_literal_list_;
469 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700470 uint32_t target_method_idx = data_lir->operands[0];
471 const DexFile* target_dex_file =
472 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700473 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700474 cu_->class_def_idx,
475 cu_->method_idx,
476 cu_->invoke_type,
Jeff Hao49161ce2014-03-12 11:05:25 -0700477 target_method_idx,
478 target_dex_file,
479 static_cast<InvokeType>(data_lir->operands[2]),
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700480 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700481 const DexFile::MethodId& target_method_id = target_dex_file->GetMethodId(target_method_idx);
buzbee0d829482013-10-11 15:24:55 -0700482 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700483 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700484 data_lir = NEXT_LIR(data_lir);
485 }
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800486 // Push class literals.
487 data_lir = class_literal_list_;
488 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700489 uint32_t target_method_idx = data_lir->operands[0];
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800490 cu_->compiler_driver->AddClassPatch(cu_->dex_file,
491 cu_->class_def_idx,
492 cu_->method_idx,
Jeff Hao49161ce2014-03-12 11:05:25 -0700493 target_method_idx,
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800494 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700495 const DexFile::TypeId& target_method_id = cu_->dex_file->GetTypeId(target_method_idx);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800496 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700497 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800498 data_lir = NEXT_LIR(data_lir);
499 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700500}
501
502/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700503void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700504 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
505 while (true) {
506 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
507 if (tab_rec == NULL) break;
508 AlignBuffer(code_buffer_, tab_rec->offset);
509 /*
510 * For Arm, our reference point is the address of the bx
511 * instruction that does the launch, so we have to subtract
512 * the auto pc-advance. For other targets the reference point
513 * is a label, so we can use the offset as-is.
514 */
515 int bx_offset = INVALID_OFFSET;
516 switch (cu_->instruction_set) {
517 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700518 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 bx_offset = tab_rec->anchor->offset + 4;
520 break;
521 case kX86:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700522 case kX86_64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700523 bx_offset = 0;
524 break;
Matteo Franchine45fb9e2014-05-06 10:10:30 +0100525 case kArm64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700526 case kMips:
527 bx_offset = tab_rec->anchor->offset;
528 break;
529 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
530 }
531 if (cu_->verbose) {
532 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
533 }
534 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700535 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
537 int disp = tab_rec->targets[elems]->offset - bx_offset;
538 if (cu_->verbose) {
539 LOG(INFO) << " Case[" << elems << "] key: 0x"
540 << std::hex << keys[elems] << ", disp: 0x"
541 << std::hex << disp;
542 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800543 Push32(code_buffer_, keys[elems]);
544 Push32(code_buffer_,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 tab_rec->targets[elems]->offset - bx_offset);
546 }
547 } else {
548 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
549 static_cast<int>(Instruction::kPackedSwitchSignature));
550 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
551 int disp = tab_rec->targets[elems]->offset - bx_offset;
552 if (cu_->verbose) {
553 LOG(INFO) << " Case[" << elems << "] disp: 0x"
554 << std::hex << disp;
555 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800556 Push32(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700557 }
558 }
559 }
560}
561
562/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700563void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700564 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
565 while (true) {
566 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
567 if (tab_rec == NULL) break;
568 AlignBuffer(code_buffer_, tab_rec->offset);
569 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700570 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
571 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700572 }
573 }
574}
575
buzbee0d829482013-10-11 15:24:55 -0700576static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700577 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700578 lir->offset = offset;
579 offset += 4;
580 }
581 return offset;
582}
583
Ian Rogersff093b32014-04-30 19:04:27 -0700584static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset,
585 unsigned int element_size) {
buzbee0d829482013-10-11 15:24:55 -0700586 // Align to natural pointer size.
Andreas Gampe66018822014-05-05 20:47:19 -0700587 offset = RoundUp(offset, element_size);
buzbee0d829482013-10-11 15:24:55 -0700588 for (; lir != NULL; lir = lir->next) {
589 lir->offset = offset;
590 offset += element_size;
591 }
592 return offset;
593}
594
Brian Carlstrom7940e442013-07-12 13:46:57 -0700595// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700596bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000597 MappingTable table(&encoded_mapping_table_[0]);
598 std::vector<uint32_t> dex_pcs;
599 dex_pcs.reserve(table.DexToPcSize());
600 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
601 dex_pcs.push_back(it.DexPc());
602 }
603 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
604 std::sort(dex_pcs.begin(), dex_pcs.end());
605
Brian Carlstrom7940e442013-07-12 13:46:57 -0700606 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000607 auto it = dex_pcs.begin(), end = dex_pcs.end();
608 for (uint32_t dex_pc : mir_graph_->catches_) {
609 while (it != end && *it < dex_pc) {
610 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
611 ++it;
612 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700613 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000614 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700615 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
616 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000617 } else {
618 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700619 }
620 }
621 if (!success) {
622 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
623 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000624 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700625 }
626 return success;
627}
628
629
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700630void Mir2Lir::CreateMappingTables() {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000631 uint32_t pc2dex_data_size = 0u;
632 uint32_t pc2dex_entries = 0u;
633 uint32_t pc2dex_offset = 0u;
634 uint32_t pc2dex_dalvik_offset = 0u;
635 uint32_t dex2pc_data_size = 0u;
636 uint32_t dex2pc_entries = 0u;
637 uint32_t dex2pc_offset = 0u;
638 uint32_t dex2pc_dalvik_offset = 0u;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700639 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
640 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000641 pc2dex_entries += 1;
642 DCHECK(pc2dex_offset <= tgt_lir->offset);
643 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
644 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
645 static_cast<int32_t>(pc2dex_dalvik_offset));
646 pc2dex_offset = tgt_lir->offset;
647 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700648 }
649 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000650 dex2pc_entries += 1;
651 DCHECK(dex2pc_offset <= tgt_lir->offset);
652 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
653 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
654 static_cast<int32_t>(dex2pc_dalvik_offset));
655 dex2pc_offset = tgt_lir->offset;
656 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700657 }
658 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000659
660 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
661 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
662 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000663 encoded_mapping_table_.resize(data_size);
664 uint8_t* write_pos = &encoded_mapping_table_[0];
665 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
666 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
667 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
668 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000669
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000670 pc2dex_offset = 0u;
671 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000672 dex2pc_offset = 0u;
673 dex2pc_dalvik_offset = 0u;
674 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
675 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
676 DCHECK(pc2dex_offset <= tgt_lir->offset);
677 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
678 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
679 static_cast<int32_t>(pc2dex_dalvik_offset));
680 pc2dex_offset = tgt_lir->offset;
681 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
682 }
683 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
684 DCHECK(dex2pc_offset <= tgt_lir->offset);
685 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
686 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
687 static_cast<int32_t>(dex2pc_dalvik_offset));
688 dex2pc_offset = tgt_lir->offset;
689 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
690 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000691 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000692 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
693 hdr_data_size + pc2dex_data_size);
694 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000695
Ian Rogers96faf5b2013-08-09 22:05:32 -0700696 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000697 CHECK(VerifyCatchEntries());
698
Ian Rogers96faf5b2013-08-09 22:05:32 -0700699 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000700 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700701 CHECK_EQ(table.TotalSize(), total_entries);
702 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000703 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000704 auto it2 = table.DexToPcBegin();
705 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
706 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
707 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
708 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
709 ++it;
710 }
711 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
712 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
713 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
714 ++it2;
715 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700716 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000717 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000718 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700719 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700720}
721
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000723 DCHECK(!encoded_mapping_table_.empty());
724 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000726 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
727 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 if (native_offset > max_native_offset) {
729 max_native_offset = native_offset;
730 }
731 }
732 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2730db02014-01-27 11:15:17 +0000733 const std::vector<uint8_t>& gc_map_raw =
734 mir_graph_->GetCurrentDexCompilationUnit()->GetVerifiedMethod()->GetDexGcMap();
735 verifier::DexPcToReferenceMap dex_gc_map(&(gc_map_raw)[0]);
736 DCHECK_EQ(gc_map_raw.size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737 // Compute native offset to references size.
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000738 GcMapBuilder native_gc_map_builder(&native_gc_map_,
739 mapping_table.PcToDexSize(),
740 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700741
Vladimir Marko06606b92013-12-02 15:31:08 +0000742 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
743 uint32_t native_offset = it.NativePcOffset();
744 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
Dave Allisonf9439142014-03-27 15:10:22 -0700746 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc <<
747 ": " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 native_gc_map_builder.AddEntry(native_offset, references);
749 }
750}
751
752/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700753int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700754 offset = AssignLiteralOffsetCommon(literal_list_, offset);
Ian Rogersff093b32014-04-30 19:04:27 -0700755 unsigned int ptr_size = GetInstructionSetPointerSize(cu_->instruction_set);
756 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset, ptr_size);
757 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset, ptr_size);
758 offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset, ptr_size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700759 return offset;
760}
761
buzbee0d829482013-10-11 15:24:55 -0700762int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700763 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
764 while (true) {
buzbee0d829482013-10-11 15:24:55 -0700765 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766 if (tab_rec == NULL) break;
767 tab_rec->offset = offset;
768 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
769 offset += tab_rec->table[1] * (sizeof(int) * 2);
770 } else {
771 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
772 static_cast<int>(Instruction::kPackedSwitchSignature));
773 offset += tab_rec->table[1] * sizeof(int);
774 }
775 }
776 return offset;
777}
778
buzbee0d829482013-10-11 15:24:55 -0700779int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700780 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
781 while (true) {
782 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
783 if (tab_rec == NULL) break;
784 tab_rec->offset = offset;
785 offset += tab_rec->size;
786 // word align
Andreas Gampe66018822014-05-05 20:47:19 -0700787 offset = RoundUp(offset, 4);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700788 }
789 return offset;
790}
791
Brian Carlstrom7940e442013-07-12 13:46:57 -0700792/*
793 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700794 * offset vaddr if pretty-printing, otherise use the standard block
795 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700796 * branch table during the assembly phase. All resource flags
797 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700798 */
buzbee0d829482013-10-11 15:24:55 -0700799LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700800 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700801 LIR* res = boundary_lir;
802 if (cu_->verbose) {
803 // Only pay the expense if we're pretty-printing.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000804 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
buzbeeb48819d2013-09-14 16:15:25 -0700805 new_label->dalvik_offset = vaddr;
806 new_label->opcode = kPseudoCaseLabel;
807 new_label->operands[0] = keyVal;
808 new_label->flags.fixup = kFixupLabel;
809 DCHECK(!new_label->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100810 new_label->u.m.def_mask = &kEncodeAll;
buzbeeb48819d2013-09-14 16:15:25 -0700811 InsertLIRAfter(boundary_lir, new_label);
812 res = new_label;
813 }
814 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700815}
816
buzbee0d829482013-10-11 15:24:55 -0700817void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700819 DexOffset base_vaddr = tab_rec->vaddr;
820 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700821 int entries = table[1];
822 int low_key = s4FromSwitchData(&table[2]);
823 for (int i = 0; i < entries; i++) {
824 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
825 }
826}
827
buzbee0d829482013-10-11 15:24:55 -0700828void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700830 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700832 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
833 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700834 for (int i = 0; i < entries; i++) {
835 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
836 }
837}
838
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700839void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700840 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
841 while (true) {
842 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
843 if (tab_rec == NULL) break;
844 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
845 MarkPackedCaseLabels(tab_rec);
846 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
847 MarkSparseCaseLabels(tab_rec);
848 } else {
849 LOG(FATAL) << "Invalid switch table";
850 }
851 }
852}
853
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700854void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700855 /*
856 * Sparse switch data format:
857 * ushort ident = 0x0200 magic value
858 * ushort size number of entries in the table; > 0
859 * int keys[size] keys, sorted low-to-high; 32-bit aligned
860 * int targets[size] branch targets, relative to switch opcode
861 *
862 * Total size is (2+size*4) 16-bit code units.
863 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700864 uint16_t ident = table[0];
865 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700866 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
867 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700868 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
869 << ", entries: " << std::dec << entries;
870 for (int i = 0; i < entries; i++) {
871 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
872 }
873}
874
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700875void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700876 /*
877 * Packed switch data format:
878 * ushort ident = 0x0100 magic value
879 * ushort size number of entries in the table
880 * int first_key first (and lowest) switch case value
881 * int targets[size] branch targets, relative to switch opcode
882 *
883 * Total size is (4+size*2) 16-bit code units.
884 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700885 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700886 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700887 int entries = table[1];
888 int low_key = s4FromSwitchData(&table[2]);
889 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
890 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
891 for (int i = 0; i < entries; i++) {
892 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
893 << targets[i];
894 }
895}
896
buzbee252254b2013-09-08 16:20:53 -0700897/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700898void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
899 // NOTE: only used for debug listings.
900 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901}
902
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700903bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700904 bool is_taken;
905 switch (opcode) {
906 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
907 case Instruction::IF_NE: is_taken = (src1 != src2); break;
908 case Instruction::IF_LT: is_taken = (src1 < src2); break;
909 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
910 case Instruction::IF_GT: is_taken = (src1 > src2); break;
911 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
912 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
913 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
914 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
915 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
916 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
917 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
918 default:
919 LOG(FATAL) << "Unexpected opcode " << opcode;
920 is_taken = false;
921 }
922 return is_taken;
923}
924
925// Convert relation of src1/src2 to src2/src1
926ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
927 ConditionCode res;
928 switch (before) {
929 case kCondEq: res = kCondEq; break;
930 case kCondNe: res = kCondNe; break;
931 case kCondLt: res = kCondGt; break;
932 case kCondGt: res = kCondLt; break;
933 case kCondLe: res = kCondGe; break;
934 case kCondGe: res = kCondLe; break;
935 default:
936 res = static_cast<ConditionCode>(0);
937 LOG(FATAL) << "Unexpected ccode " << before;
938 }
939 return res;
940}
941
Vladimir Markoa1a70742014-03-03 10:28:05 +0000942ConditionCode Mir2Lir::NegateComparison(ConditionCode before) {
943 ConditionCode res;
944 switch (before) {
945 case kCondEq: res = kCondNe; break;
946 case kCondNe: res = kCondEq; break;
947 case kCondLt: res = kCondGe; break;
948 case kCondGt: res = kCondLe; break;
949 case kCondLe: res = kCondGt; break;
950 case kCondGe: res = kCondLt; break;
951 default:
952 res = static_cast<ConditionCode>(0);
953 LOG(FATAL) << "Unexpected ccode " << before;
954 }
955 return res;
956}
957
Brian Carlstrom7940e442013-07-12 13:46:57 -0700958// TODO: move to mir_to_lir.cc
959Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
960 : Backend(arena),
961 literal_list_(NULL),
962 method_literal_list_(NULL),
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800963 class_literal_list_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700964 code_literal_list_(NULL),
buzbeeb48819d2013-09-14 16:15:25 -0700965 first_fixup_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700966 cu_(cu),
967 mir_graph_(mir_graph),
968 switch_tables_(arena, 4, kGrowableArraySwitchTables),
969 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
buzbeebd663de2013-09-10 15:41:31 -0700970 tempreg_info_(arena, 20, kGrowableArrayMisc),
buzbee091cc402014-03-31 10:14:40 -0700971 reginfo_map_(arena, RegStorage::kMaxRegs, kGrowableArrayMisc),
buzbee0d829482013-10-11 15:24:55 -0700972 pointer_storage_(arena, 128, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700973 data_offset_(0),
974 total_size_(0),
975 block_label_list_(NULL),
buzbeed69835d2014-02-03 14:40:27 -0800976 promotion_map_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700977 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700978 estimated_native_code_size_(0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979 reg_pool_(NULL),
980 live_sreg_(0),
981 num_core_spills_(0),
982 num_fp_spills_(0),
983 frame_size_(0),
984 core_spill_mask_(0),
985 fp_spill_mask_(0),
986 first_lir_insn_(NULL),
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800987 last_lir_insn_(NULL),
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100988 slow_paths_(arena, 32, kGrowableArraySlowPaths),
989 mem_ref_type_(ResourceMask::kHeapRef),
990 mask_cache_(arena) {
buzbee0d829482013-10-11 15:24:55 -0700991 // Reserve pointer id 0 for NULL.
992 size_t null_idx = WrapPointer(NULL);
993 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700994}
995
996void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -0700997 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -0700998 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
999
1000 /* Allocate Registers using simple local allocation scheme */
1001 SimpleRegAlloc();
1002
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001003 /* First try the custom light codegen for special cases. */
Vladimir Marko5816ed42013-11-27 17:04:20 +00001004 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001005 bool special_worked = cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
Vladimir Marko5816ed42013-11-27 17:04:20 +00001006 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001007
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001008 /* Take normal path for converting MIR to LIR only if the special codegen did not succeed. */
1009 if (special_worked == false) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001010 MethodMIR2LIR();
1011 }
1012
1013 /* Method is not empty */
1014 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001015 // mark the targets of switch statement case labels
1016 ProcessSwitchTables();
1017
1018 /* Convert LIR into machine code. */
1019 AssembleLIR();
1020
buzbeeb01bf152014-05-13 15:59:07 -07001021 if ((cu_->enable_debug & (1 << kDebugCodegenDump)) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001022 CodegenDump();
1023 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001024 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001025}
1026
1027CompiledMethod* Mir2Lir::GetCompiledMethod() {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001028 // Combine vmap tables - core regs, then fp regs - into vmap_table.
1029 Leb128EncodingVector vmap_encoder;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001030 if (frame_size_ > 0) {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001031 // Prefix the encoded data with its size.
1032 size_t size = core_vmap_table_.size() + 1 /* marker */ + fp_vmap_table_.size();
1033 vmap_encoder.Reserve(size + 1u); // All values are likely to be one byte in ULEB128 (<128).
1034 vmap_encoder.PushBackUnsigned(size);
1035 // Core regs may have been inserted out of order - sort first.
1036 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
1037 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
1038 // Copy, stripping out the phys register sort key.
1039 vmap_encoder.PushBackUnsigned(
1040 ~(-1 << VREG_NUM_WIDTH) & (core_vmap_table_[i] + VmapTable::kEntryAdjustment));
1041 }
1042 // Push a marker to take place of lr.
1043 vmap_encoder.PushBackUnsigned(VmapTable::kAdjustedFpMarker);
1044 // fp regs already sorted.
1045 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
1046 vmap_encoder.PushBackUnsigned(fp_vmap_table_[i] + VmapTable::kEntryAdjustment);
1047 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048 } else {
Vladimir Marko81949632014-05-02 11:53:22 +01001049 DCHECK_EQ(POPCOUNT(core_spill_mask_), 0);
1050 DCHECK_EQ(POPCOUNT(fp_spill_mask_), 0);
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001051 DCHECK_EQ(core_vmap_table_.size(), 0u);
1052 DCHECK_EQ(fp_vmap_table_.size(), 0u);
1053 vmap_encoder.PushBackUnsigned(0u); // Size is 0.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001054 }
Mark Mendellae9fd932014-02-10 16:14:35 -08001055
Ian Rogers700a4022014-05-19 16:49:03 -07001056 std::unique_ptr<std::vector<uint8_t>> cfi_info(ReturnCallFrameInformation());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057 CompiledMethod* result =
Ian Rogers72d32622014-05-06 16:20:11 -07001058 new CompiledMethod(cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
Vladimir Marko06606b92013-12-02 15:31:08 +00001059 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_,
Dave Allisond6ed6422014-04-09 23:36:15 +00001060 vmap_encoder.GetData(), native_gc_map_, cfi_info.get());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001061 return result;
1062}
1063
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001064size_t Mir2Lir::GetMaxPossibleCompilerTemps() const {
1065 // Chose a reasonably small value in order to contain stack growth.
1066 // Backends that are smarter about spill region can return larger values.
1067 const size_t max_compiler_temps = 10;
1068 return max_compiler_temps;
1069}
1070
1071size_t Mir2Lir::GetNumBytesForCompilerTempSpillRegion() {
1072 // By default assume that the Mir2Lir will need one slot for each temporary.
1073 // If the backend can better determine temps that have non-overlapping ranges and
1074 // temps that do not need spilled, it can actually provide a small region.
1075 return (mir_graph_->GetNumUsedCompilerTemps() * sizeof(uint32_t));
1076}
1077
Brian Carlstrom7940e442013-07-12 13:46:57 -07001078int Mir2Lir::ComputeFrameSize() {
1079 /* Figure out the frame size */
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001080 uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set)
1081 + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set)
1082 + sizeof(uint32_t) // Filler.
1083 + (cu_->num_regs + cu_->num_outs) * sizeof(uint32_t)
1084 + GetNumBytesForCompilerTempSpillRegion();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 /* Align and set */
Andreas Gampe66018822014-05-05 20:47:19 -07001086 return RoundUp(size, kStackAlignment);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087}
1088
1089/*
1090 * Append an LIR instruction to the LIR list maintained by a compilation
1091 * unit
1092 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001093void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001094 if (first_lir_insn_ == NULL) {
1095 DCHECK(last_lir_insn_ == NULL);
1096 last_lir_insn_ = first_lir_insn_ = lir;
1097 lir->prev = lir->next = NULL;
1098 } else {
1099 last_lir_insn_->next = lir;
1100 lir->prev = last_lir_insn_;
1101 lir->next = NULL;
1102 last_lir_insn_ = lir;
1103 }
1104}
1105
1106/*
1107 * Insert an LIR instruction before the current instruction, which cannot be the
1108 * first instruction.
1109 *
1110 * prev_lir <-> new_lir <-> current_lir
1111 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001112void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113 DCHECK(current_lir->prev != NULL);
1114 LIR *prev_lir = current_lir->prev;
1115
1116 prev_lir->next = new_lir;
1117 new_lir->prev = prev_lir;
1118 new_lir->next = current_lir;
1119 current_lir->prev = new_lir;
1120}
1121
1122/*
1123 * Insert an LIR instruction after the current instruction, which cannot be the
1124 * first instruction.
1125 *
1126 * current_lir -> new_lir -> old_next
1127 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001128void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001129 new_lir->prev = current_lir;
1130 new_lir->next = current_lir->next;
1131 current_lir->next = new_lir;
1132 new_lir->next->prev = new_lir;
1133}
1134
Mark Mendell4708dcd2014-01-22 09:05:18 -08001135bool Mir2Lir::IsPowerOfTwo(uint64_t x) {
1136 return (x & (x - 1)) == 0;
1137}
1138
1139// Returns the index of the lowest set bit in 'x'.
1140int32_t Mir2Lir::LowestSetBit(uint64_t x) {
1141 int bit_posn = 0;
1142 while ((x & 0xf) == 0) {
1143 bit_posn += 4;
1144 x >>= 4;
1145 }
1146 while ((x & 1) == 0) {
1147 bit_posn++;
1148 x >>= 1;
1149 }
1150 return bit_posn;
1151}
1152
1153bool Mir2Lir::BadOverlap(RegLocation rl_src, RegLocation rl_dest) {
1154 DCHECK(rl_src.wide);
1155 DCHECK(rl_dest.wide);
1156 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) == 1);
1157}
1158
buzbee2700f7e2014-03-07 09:46:20 -08001159LIR *Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
Mark Mendell766e9292014-01-27 07:55:47 -08001160 int offset, int check_value, LIR* target) {
1161 // Handle this for architectures that can't compare to memory.
buzbee695d13a2014-04-19 13:32:20 -07001162 Load32Disp(base_reg, offset, temp_reg);
Mark Mendell766e9292014-01-27 07:55:47 -08001163 LIR* branch = OpCmpImmBranch(cond, temp_reg, check_value, target);
1164 return branch;
1165}
1166
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001167void Mir2Lir::AddSlowPath(LIRSlowPath* slowpath) {
1168 slow_paths_.Insert(slowpath);
1169}
Mark Mendell55d0eac2014-02-06 11:02:52 -08001170
Jeff Hao49161ce2014-03-12 11:05:25 -07001171void Mir2Lir::LoadCodeAddress(const MethodReference& target_method, InvokeType type,
1172 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001173 LIR* data_target = ScanLiteralPoolMethod(code_literal_list_, target_method);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001174 if (data_target == NULL) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001175 data_target = AddWordData(&code_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001176 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001177 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1178 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1179 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001180 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001181 }
1182 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1183 AppendLIR(load_pc_rel);
1184 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1185}
1186
Jeff Hao49161ce2014-03-12 11:05:25 -07001187void Mir2Lir::LoadMethodAddress(const MethodReference& target_method, InvokeType type,
1188 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001189 LIR* data_target = ScanLiteralPoolMethod(method_literal_list_, target_method);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001190 if (data_target == NULL) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001191 data_target = AddWordData(&method_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001192 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001193 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1194 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1195 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001196 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001197 }
1198 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1199 AppendLIR(load_pc_rel);
1200 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1201}
1202
1203void Mir2Lir::LoadClassType(uint32_t type_idx, SpecialTargetRegister symbolic_reg) {
1204 // Use the literal pool and a PC-relative load from a data word.
1205 LIR* data_target = ScanLiteralPool(class_literal_list_, type_idx, 0);
1206 if (data_target == nullptr) {
1207 data_target = AddWordData(&class_literal_list_, type_idx);
1208 }
1209 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1210 AppendLIR(load_pc_rel);
1211}
1212
Mark Mendellae9fd932014-02-10 16:14:35 -08001213std::vector<uint8_t>* Mir2Lir::ReturnCallFrameInformation() {
1214 // Default case is to do nothing.
1215 return nullptr;
1216}
1217
buzbee2700f7e2014-03-07 09:46:20 -08001218RegLocation Mir2Lir::NarrowRegLoc(RegLocation loc) {
buzbee091cc402014-03-31 10:14:40 -07001219 if (loc.location == kLocPhysReg) {
buzbee85089dd2014-05-25 15:10:52 -07001220 DCHECK(!loc.reg.Is32Bit());
buzbee091cc402014-03-31 10:14:40 -07001221 if (loc.reg.IsPair()) {
buzbee85089dd2014-05-25 15:10:52 -07001222 RegisterInfo* info_lo = GetRegInfo(loc.reg.GetLow());
1223 RegisterInfo* info_hi = GetRegInfo(loc.reg.GetHigh());
1224 info_lo->SetIsWide(false);
1225 info_hi->SetIsWide(false);
1226 loc.reg = info_lo->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001227 } else {
buzbee85089dd2014-05-25 15:10:52 -07001228 RegisterInfo* info = GetRegInfo(loc.reg);
1229 RegisterInfo* info_new = info->FindMatchingView(RegisterInfo::k32SoloStorageMask);
1230 DCHECK(info_new != nullptr);
1231 if (info->IsLive() && (info->SReg() == loc.s_reg_low)) {
1232 info->MarkDead();
1233 info_new->MarkLive(loc.s_reg_low);
1234 }
1235 loc.reg = info_new->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001236 }
buzbee85089dd2014-05-25 15:10:52 -07001237 DCHECK(loc.reg.Valid());
buzbee2700f7e2014-03-07 09:46:20 -08001238 }
buzbee85089dd2014-05-25 15:10:52 -07001239 loc.wide = false;
buzbee2700f7e2014-03-07 09:46:20 -08001240 return loc;
1241}
1242
Mark Mendelld65c51a2014-04-29 16:55:20 -04001243void Mir2Lir::GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
1244 LOG(FATAL) << "Unknown MIR opcode not supported on this architecture";
1245}
1246
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001247} // namespace art