blob: a6653fab198a8c7210a5bbdc0585b873e532f88a [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070020#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "mir_to_lir-inl.h"
22#include "verifier/dex_gc_map.h"
23#include "verifier/method_verifier.h"
24
25namespace art {
26
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070027bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070028 bool res = false;
29 if (rl_src.is_const) {
30 if (rl_src.wide) {
31 if (rl_src.fp) {
32 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
33 } else {
34 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
35 }
36 } else {
37 if (rl_src.fp) {
38 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
39 } else {
40 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
41 }
42 }
43 }
44 return res;
45}
46
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070047void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070048 DCHECK(!inst->flags.use_def_invalid);
49 inst->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070050 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
buzbeeb48819d2013-09-14 16:15:25 -070051 DCHECK_EQ(safepoint_pc->u.m.def_mask, ENCODE_ALL);
Brian Carlstrom7940e442013-07-12 13:46:57 -070052}
53
Ian Rogers9b297bf2013-09-06 11:11:25 -070054bool Mir2Lir::FastInstance(uint32_t field_idx, bool is_put, int* field_offset, bool* is_volatile) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070055 return cu_->compiler_driver->ComputeInstanceFieldInfo(
Ian Rogers9b297bf2013-09-06 11:11:25 -070056 field_idx, mir_graph_->GetCurrentDexCompilationUnit(), is_put, field_offset, is_volatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -070057}
58
buzbee252254b2013-09-08 16:20:53 -070059/* Remove a LIR from the list. */
60void Mir2Lir::UnlinkLIR(LIR* lir) {
61 if (UNLIKELY(lir == first_lir_insn_)) {
62 first_lir_insn_ = lir->next;
63 if (lir->next != NULL) {
64 lir->next->prev = NULL;
65 } else {
66 DCHECK(lir->next == NULL);
67 DCHECK(lir == last_lir_insn_);
68 last_lir_insn_ = NULL;
69 }
70 } else if (lir == last_lir_insn_) {
71 last_lir_insn_ = lir->prev;
72 lir->prev->next = NULL;
73 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
74 lir->prev->next = lir->next;
75 lir->next->prev = lir->prev;
76 }
77}
78
Brian Carlstrom7940e442013-07-12 13:46:57 -070079/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -070080void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070081 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -070082 if (!cu_->verbose) {
83 UnlinkLIR(lir);
84 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070085}
86
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070087void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070088 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -070089 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -070090 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -070091 DCHECK(!lir->flags.use_def_invalid);
Brian Carlstrom7940e442013-07-12 13:46:57 -070092 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -070093 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -070094 } else {
buzbeeb48819d2013-09-14 16:15:25 -070095 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -070096 }
97 /* Clear out the memref flags */
98 *mask_ptr &= ~mask;
99 /* ..and then add back the one we need */
100 switch (mem_type) {
101 case kLiteral:
102 DCHECK(is_load);
103 *mask_ptr |= ENCODE_LITERAL;
104 break;
105 case kDalvikReg:
106 *mask_ptr |= ENCODE_DALVIK_REG;
107 break;
108 case kHeapRef:
109 *mask_ptr |= ENCODE_HEAP_REF;
110 break;
111 case kMustNotAlias:
112 /* Currently only loads can be marked as kMustNotAlias */
113 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
114 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
115 break;
116 default:
117 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
118 }
119}
120
121/*
122 * Mark load/store instructions that access Dalvik registers through the stack.
123 */
124void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700125 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 SetMemRefType(lir, is_load, kDalvikReg);
127
128 /*
129 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
130 * access.
131 */
buzbeeb48819d2013-09-14 16:15:25 -0700132 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700133}
134
135/*
136 * Debugging macros
137 */
138#define DUMP_RESOURCE_MASK(X)
139
140/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700141void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700142 int offset = lir->offset;
143 int dest = lir->operands[0];
144 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
145
146 /* Handle pseudo-ops individually, and all regular insns as a group */
147 switch (lir->opcode) {
148 case kPseudoMethodEntry:
149 LOG(INFO) << "-------- method entry "
150 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
151 break;
152 case kPseudoMethodExit:
153 LOG(INFO) << "-------- Method_Exit";
154 break;
155 case kPseudoBarrier:
156 LOG(INFO) << "-------- BARRIER";
157 break;
158 case kPseudoEntryBlock:
159 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
160 break;
161 case kPseudoDalvikByteCodeBoundary:
162 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700163 // NOTE: only used for debug listings.
164 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700165 }
166 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
167 << lir->dalvik_offset << " @ " << reinterpret_cast<char*>(lir->operands[0]);
168 break;
169 case kPseudoExitBlock:
170 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
171 break;
172 case kPseudoPseudoAlign4:
173 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
174 << offset << "): .align4";
175 break;
176 case kPseudoEHBlockLabel:
177 LOG(INFO) << "Exception_Handling:";
178 break;
179 case kPseudoTargetLabel:
180 case kPseudoNormalBlockLabel:
181 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
182 break;
183 case kPseudoThrowTarget:
184 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
185 break;
186 case kPseudoIntrinsicRetry:
187 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
188 break;
189 case kPseudoSuspendTarget:
190 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
191 break;
192 case kPseudoSafepointPC:
193 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
194 break;
195 case kPseudoExportedPC:
196 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
197 break;
198 case kPseudoCaseLabel:
199 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
200 << std::hex << lir->operands[0] << "|" << std::dec <<
201 lir->operands[0];
202 break;
203 default:
204 if (lir->flags.is_nop && !dump_nop) {
205 break;
206 } else {
207 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
208 lir, base_addr));
209 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
210 lir, base_addr));
211 LOG(INFO) << StringPrintf("%05x: %-9s%s%s",
212 reinterpret_cast<unsigned int>(base_addr + offset),
213 op_name.c_str(), op_operands.c_str(),
214 lir->flags.is_nop ? "(nop)" : "");
215 }
216 break;
217 }
218
buzbeeb48819d2013-09-14 16:15:25 -0700219 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
220 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700221 }
buzbeeb48819d2013-09-14 16:15:25 -0700222 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
223 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700224 }
225}
226
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700227void Mir2Lir::DumpPromotionMap() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700228 int num_regs = cu_->num_dalvik_registers + cu_->num_compiler_temps + 1;
229 for (int i = 0; i < num_regs; i++) {
230 PromotionMap v_reg_map = promotion_map_[i];
231 std::string buf;
232 if (v_reg_map.fp_location == kLocPhysReg) {
233 StringAppendF(&buf, " : s%d", v_reg_map.FpReg & FpRegMask());
234 }
235
236 std::string buf3;
237 if (i < cu_->num_dalvik_registers) {
238 StringAppendF(&buf3, "%02d", i);
239 } else if (i == mir_graph_->GetMethodSReg()) {
240 buf3 = "Method*";
241 } else {
242 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
243 }
244
245 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
246 v_reg_map.core_location == kLocPhysReg ?
247 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
248 v_reg_map.core_reg : SRegOffset(i),
249 buf.c_str());
250 }
251}
252
253/* Dump a mapping table */
Ian Rogersd91d6d62013-09-25 20:26:14 -0700254void Mir2Lir::DumpMappingTable(const char* table_name, const char* descriptor,
255 const char* name, const Signature& signature,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700256 const std::vector<uint32_t>& v) {
257 if (v.size() > 0) {
258 std::string line(StringPrintf("\n %s %s%s_%s_table[%zu] = {", table_name,
Ian Rogersd91d6d62013-09-25 20:26:14 -0700259 descriptor, name, signature.ToString().c_str(), v.size()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700260 std::replace(line.begin(), line.end(), ';', '_');
261 LOG(INFO) << line;
262 for (uint32_t i = 0; i < v.size(); i+=2) {
263 line = StringPrintf(" {0x%05x, 0x%04x},", v[i], v[i+1]);
264 LOG(INFO) << line;
265 }
266 LOG(INFO) <<" };\n\n";
267 }
268}
269
270/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700271void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700272 LOG(INFO) << "Dumping LIR insns for "
273 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
274 LIR* lir_insn;
275 int insns_size = cu_->code_item->insns_size_in_code_units_;
276
277 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
278 LOG(INFO) << "Ins : " << cu_->num_ins;
279 LOG(INFO) << "Outs : " << cu_->num_outs;
280 LOG(INFO) << "CoreSpills : " << num_core_spills_;
281 LOG(INFO) << "FPSpills : " << num_fp_spills_;
282 LOG(INFO) << "CompilerTemps : " << cu_->num_compiler_temps;
283 LOG(INFO) << "Frame size : " << frame_size_;
284 LOG(INFO) << "code size is " << total_size_ <<
285 " bytes, Dalvik size is " << insns_size * 2;
286 LOG(INFO) << "expansion factor: "
287 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
288 DumpPromotionMap();
289 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
290 DumpLIRInsn(lir_insn, 0);
291 }
292 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
293 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
294 lir_insn->operands[0]);
295 }
296
297 const DexFile::MethodId& method_id =
298 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700299 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
300 const char* name = cu_->dex_file->GetMethodName(method_id);
301 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700302
303 // Dump mapping tables
304 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature, pc2dex_mapping_table_);
305 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature, dex2pc_mapping_table_);
306}
307
308/*
309 * Search the existing constants in the literal pool for an exact or close match
310 * within specified delta (greater or equal to 0).
311 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700312LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700313 while (data_target) {
314 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
315 return data_target;
316 data_target = data_target->next;
317 }
318 return NULL;
319}
320
321/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700322LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323 bool lo_match = false;
324 LIR* lo_target = NULL;
325 while (data_target) {
326 if (lo_match && (data_target->operands[0] == val_hi)) {
327 // Record high word in case we need to expand this later.
328 lo_target->operands[1] = val_hi;
329 return lo_target;
330 }
331 lo_match = false;
332 if (data_target->operands[0] == val_lo) {
333 lo_match = true;
334 lo_target = data_target;
335 }
336 data_target = data_target->next;
337 }
338 return NULL;
339}
340
341/*
342 * The following are building blocks to insert constants into the pool or
343 * instruction streams.
344 */
345
346/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700347LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700348 /* Add the constant to the literal pool */
349 if (constant_list_p) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700350 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700351 new_value->operands[0] = value;
352 new_value->next = *constant_list_p;
353 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700354 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700355 return new_value;
356 }
357 return NULL;
358}
359
360/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700361LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362 AddWordData(constant_list_p, val_hi);
363 return AddWordData(constant_list_p, val_lo);
364}
365
366static void PushWord(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700367 buf.push_back(data & 0xff);
368 buf.push_back((data >> 8) & 0xff);
369 buf.push_back((data >> 16) & 0xff);
370 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700371}
372
buzbee0d829482013-10-11 15:24:55 -0700373// Push 8 bytes on 64-bit systems; 4 on 32-bit systems.
374static void PushPointer(std::vector<uint8_t>&buf, void const* pointer) {
375 uintptr_t data = reinterpret_cast<uintptr_t>(pointer);
376 if (sizeof(void*) == sizeof(uint64_t)) {
377 PushWord(buf, (data >> (sizeof(void*) * 4)) & 0xFFFFFFFF);
378 PushWord(buf, data & 0xFFFFFFFF);
379 } else {
380 PushWord(buf, data);
381 }
382}
383
Brian Carlstrom7940e442013-07-12 13:46:57 -0700384static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
385 while (buf.size() < offset) {
386 buf.push_back(0);
387 }
388}
389
390/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700391void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700392 AlignBuffer(code_buffer_, data_offset_);
393 LIR* data_lir = literal_list_;
394 while (data_lir != NULL) {
395 PushWord(code_buffer_, data_lir->operands[0]);
396 data_lir = NEXT_LIR(data_lir);
397 }
398 // Push code and method literals, record offsets for the compiler to patch.
399 data_lir = code_literal_list_;
400 while (data_lir != NULL) {
401 uint32_t target = data_lir->operands[0];
402 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700403 cu_->class_def_idx,
404 cu_->method_idx,
405 cu_->invoke_type,
406 target,
407 static_cast<InvokeType>(data_lir->operands[1]),
408 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700409 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700410 // unique value based on target to ensure code deduplication works
411 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700412 data_lir = NEXT_LIR(data_lir);
413 }
414 data_lir = method_literal_list_;
415 while (data_lir != NULL) {
416 uint32_t target = data_lir->operands[0];
417 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700418 cu_->class_def_idx,
419 cu_->method_idx,
420 cu_->invoke_type,
421 target,
422 static_cast<InvokeType>(data_lir->operands[1]),
423 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700424 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700425 // unique value based on target to ensure code deduplication works
426 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700427 data_lir = NEXT_LIR(data_lir);
428 }
429}
430
431/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700432void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700433 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
434 while (true) {
435 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
436 if (tab_rec == NULL) break;
437 AlignBuffer(code_buffer_, tab_rec->offset);
438 /*
439 * For Arm, our reference point is the address of the bx
440 * instruction that does the launch, so we have to subtract
441 * the auto pc-advance. For other targets the reference point
442 * is a label, so we can use the offset as-is.
443 */
444 int bx_offset = INVALID_OFFSET;
445 switch (cu_->instruction_set) {
446 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700447 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448 bx_offset = tab_rec->anchor->offset + 4;
449 break;
450 case kX86:
451 bx_offset = 0;
452 break;
453 case kMips:
454 bx_offset = tab_rec->anchor->offset;
455 break;
456 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
457 }
458 if (cu_->verbose) {
459 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
460 }
461 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700462 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700463 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
464 int disp = tab_rec->targets[elems]->offset - bx_offset;
465 if (cu_->verbose) {
466 LOG(INFO) << " Case[" << elems << "] key: 0x"
467 << std::hex << keys[elems] << ", disp: 0x"
468 << std::hex << disp;
469 }
470 PushWord(code_buffer_, keys[elems]);
471 PushWord(code_buffer_,
472 tab_rec->targets[elems]->offset - bx_offset);
473 }
474 } else {
475 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
476 static_cast<int>(Instruction::kPackedSwitchSignature));
477 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
478 int disp = tab_rec->targets[elems]->offset - bx_offset;
479 if (cu_->verbose) {
480 LOG(INFO) << " Case[" << elems << "] disp: 0x"
481 << std::hex << disp;
482 }
483 PushWord(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
484 }
485 }
486 }
487}
488
489/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700490void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
492 while (true) {
493 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
494 if (tab_rec == NULL) break;
495 AlignBuffer(code_buffer_, tab_rec->offset);
496 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700497 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
498 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700499 }
500 }
501}
502
buzbee0d829482013-10-11 15:24:55 -0700503static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700504 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700505 lir->offset = offset;
506 offset += 4;
507 }
508 return offset;
509}
510
buzbee0d829482013-10-11 15:24:55 -0700511static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset) {
512 unsigned int element_size = sizeof(void*);
513 // Align to natural pointer size.
514 offset = (offset + (element_size - 1)) & ~(element_size - 1);
515 for (; lir != NULL; lir = lir->next) {
516 lir->offset = offset;
517 offset += element_size;
518 }
519 return offset;
520}
521
Brian Carlstrom7940e442013-07-12 13:46:57 -0700522// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700523bool Mir2Lir::VerifyCatchEntries() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700524 bool success = true;
525 for (std::set<uint32_t>::const_iterator it = mir_graph_->catches_.begin();
526 it != mir_graph_->catches_.end(); ++it) {
527 uint32_t dex_pc = *it;
528 bool found = false;
529 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
530 if (dex_pc == dex2pc_mapping_table_[i+1]) {
531 found = true;
532 break;
533 }
534 }
535 if (!found) {
536 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
537 success = false;
538 }
539 }
540 // Now, try in the other direction
541 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
542 uint32_t dex_pc = dex2pc_mapping_table_[i+1];
543 if (mir_graph_->catches_.find(dex_pc) == mir_graph_->catches_.end()) {
544 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << dex_pc;
545 success = false;
546 }
547 }
548 if (!success) {
549 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
550 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
551 << dex2pc_mapping_table_.size()/2;
552 }
553 return success;
554}
555
556
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700557void Mir2Lir::CreateMappingTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700558 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
559 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
560 pc2dex_mapping_table_.push_back(tgt_lir->offset);
561 pc2dex_mapping_table_.push_back(tgt_lir->dalvik_offset);
562 }
563 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
564 dex2pc_mapping_table_.push_back(tgt_lir->offset);
565 dex2pc_mapping_table_.push_back(tgt_lir->dalvik_offset);
566 }
567 }
568 if (kIsDebugBuild) {
Ian Rogers96faf5b2013-08-09 22:05:32 -0700569 CHECK(VerifyCatchEntries());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700570 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700571 CHECK_EQ(pc2dex_mapping_table_.size() & 1, 0U);
572 CHECK_EQ(dex2pc_mapping_table_.size() & 1, 0U);
573 uint32_t total_entries = (pc2dex_mapping_table_.size() + dex2pc_mapping_table_.size()) / 2;
574 uint32_t pc2dex_entries = pc2dex_mapping_table_.size() / 2;
575 encoded_mapping_table_.PushBack(total_entries);
576 encoded_mapping_table_.PushBack(pc2dex_entries);
577 encoded_mapping_table_.InsertBack(pc2dex_mapping_table_.begin(), pc2dex_mapping_table_.end());
578 encoded_mapping_table_.InsertBack(dex2pc_mapping_table_.begin(), dex2pc_mapping_table_.end());
579 if (kIsDebugBuild) {
580 // Verify the encoded table holds the expected data.
581 MappingTable table(&encoded_mapping_table_.GetData()[0]);
582 CHECK_EQ(table.TotalSize(), total_entries);
583 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
584 CHECK_EQ(table.DexToPcSize(), dex2pc_mapping_table_.size() / 2);
585 MappingTable::PcToDexIterator it = table.PcToDexBegin();
586 for (uint32_t i = 0; i < pc2dex_mapping_table_.size(); ++i, ++it) {
587 CHECK_EQ(pc2dex_mapping_table_.at(i), it.NativePcOffset());
588 ++i;
589 CHECK_EQ(pc2dex_mapping_table_.at(i), it.DexPc());
590 }
591 MappingTable::DexToPcIterator it2 = table.DexToPcBegin();
592 for (uint32_t i = 0; i < dex2pc_mapping_table_.size(); ++i, ++it2) {
593 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.NativePcOffset());
594 ++i;
595 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.DexPc());
596 }
597 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598}
599
600class NativePcToReferenceMapBuilder {
601 public:
602 NativePcToReferenceMapBuilder(std::vector<uint8_t>* table,
603 size_t entries, uint32_t max_native_offset,
604 size_t references_width) : entries_(entries),
605 references_width_(references_width), in_use_(entries),
606 table_(table) {
607 // Compute width in bytes needed to hold max_native_offset.
608 native_offset_width_ = 0;
609 while (max_native_offset != 0) {
610 native_offset_width_++;
611 max_native_offset >>= 8;
612 }
613 // Resize table and set up header.
614 table->resize((EntryWidth() * entries) + sizeof(uint32_t));
615 CHECK_LT(native_offset_width_, 1U << 3);
616 (*table)[0] = native_offset_width_ & 7;
617 CHECK_LT(references_width_, 1U << 13);
618 (*table)[0] |= (references_width_ << 3) & 0xFF;
619 (*table)[1] = (references_width_ >> 5) & 0xFF;
620 CHECK_LT(entries, 1U << 16);
621 (*table)[2] = entries & 0xFF;
622 (*table)[3] = (entries >> 8) & 0xFF;
623 }
624
625 void AddEntry(uint32_t native_offset, const uint8_t* references) {
626 size_t table_index = TableIndex(native_offset);
627 while (in_use_[table_index]) {
628 table_index = (table_index + 1) % entries_;
629 }
630 in_use_[table_index] = true;
buzbee0d829482013-10-11 15:24:55 -0700631 SetCodeOffset(table_index, native_offset);
632 DCHECK_EQ(native_offset, GetCodeOffset(table_index));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700633 SetReferences(table_index, references);
634 }
635
636 private:
637 size_t TableIndex(uint32_t native_offset) {
638 return NativePcOffsetToReferenceMap::Hash(native_offset) % entries_;
639 }
640
buzbee0d829482013-10-11 15:24:55 -0700641 uint32_t GetCodeOffset(size_t table_index) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700642 uint32_t native_offset = 0;
643 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
644 for (size_t i = 0; i < native_offset_width_; i++) {
645 native_offset |= (*table_)[table_offset + i] << (i * 8);
646 }
647 return native_offset;
648 }
649
buzbee0d829482013-10-11 15:24:55 -0700650 void SetCodeOffset(size_t table_index, uint32_t native_offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700651 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
652 for (size_t i = 0; i < native_offset_width_; i++) {
653 (*table_)[table_offset + i] = (native_offset >> (i * 8)) & 0xFF;
654 }
655 }
656
657 void SetReferences(size_t table_index, const uint8_t* references) {
658 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
659 memcpy(&(*table_)[table_offset + native_offset_width_], references, references_width_);
660 }
661
662 size_t EntryWidth() const {
663 return native_offset_width_ + references_width_;
664 }
665
666 // Number of entries in the table.
667 const size_t entries_;
668 // Number of bytes used to encode the reference bitmap.
669 const size_t references_width_;
670 // Number of bytes used to encode a native offset.
671 size_t native_offset_width_;
672 // Entries that are in use.
673 std::vector<bool> in_use_;
674 // The table we're building.
675 std::vector<uint8_t>* const table_;
676};
677
678void Mir2Lir::CreateNativeGcMap() {
679 const std::vector<uint32_t>& mapping_table = pc2dex_mapping_table_;
680 uint32_t max_native_offset = 0;
681 for (size_t i = 0; i < mapping_table.size(); i += 2) {
682 uint32_t native_offset = mapping_table[i + 0];
683 if (native_offset > max_native_offset) {
684 max_native_offset = native_offset;
685 }
686 }
687 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
688 const std::vector<uint8_t>* gc_map_raw = verifier::MethodVerifier::GetDexGcMap(method_ref);
689 verifier::DexPcToReferenceMap dex_gc_map(&(*gc_map_raw)[4], gc_map_raw->size() - 4);
690 // Compute native offset to references size.
691 NativePcToReferenceMapBuilder native_gc_map_builder(&native_gc_map_,
692 mapping_table.size() / 2, max_native_offset,
693 dex_gc_map.RegWidth());
694
695 for (size_t i = 0; i < mapping_table.size(); i += 2) {
696 uint32_t native_offset = mapping_table[i + 0];
697 uint32_t dex_pc = mapping_table[i + 1];
698 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
699 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc;
700 native_gc_map_builder.AddEntry(native_offset, references);
701 }
702}
703
704/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700705int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700706 offset = AssignLiteralOffsetCommon(literal_list_, offset);
buzbee0d829482013-10-11 15:24:55 -0700707 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset);
708 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700709 return offset;
710}
711
buzbee0d829482013-10-11 15:24:55 -0700712int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700713 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
714 while (true) {
buzbee0d829482013-10-11 15:24:55 -0700715 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700716 if (tab_rec == NULL) break;
717 tab_rec->offset = offset;
718 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
719 offset += tab_rec->table[1] * (sizeof(int) * 2);
720 } else {
721 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
722 static_cast<int>(Instruction::kPackedSwitchSignature));
723 offset += tab_rec->table[1] * sizeof(int);
724 }
725 }
726 return offset;
727}
728
buzbee0d829482013-10-11 15:24:55 -0700729int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700730 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
731 while (true) {
732 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
733 if (tab_rec == NULL) break;
734 tab_rec->offset = offset;
735 offset += tab_rec->size;
736 // word align
737 offset = (offset + 3) & ~3;
738 }
739 return offset;
740}
741
Brian Carlstrom7940e442013-07-12 13:46:57 -0700742/*
743 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700744 * offset vaddr if pretty-printing, otherise use the standard block
745 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700746 * branch table during the assembly phase. All resource flags
747 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700748 */
buzbee0d829482013-10-11 15:24:55 -0700749LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700750 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700751 LIR* res = boundary_lir;
752 if (cu_->verbose) {
753 // Only pay the expense if we're pretty-printing.
754 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
755 new_label->dalvik_offset = vaddr;
756 new_label->opcode = kPseudoCaseLabel;
757 new_label->operands[0] = keyVal;
758 new_label->flags.fixup = kFixupLabel;
759 DCHECK(!new_label->flags.use_def_invalid);
760 new_label->u.m.def_mask = ENCODE_ALL;
761 InsertLIRAfter(boundary_lir, new_label);
762 res = new_label;
763 }
764 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700765}
766
buzbee0d829482013-10-11 15:24:55 -0700767void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700768 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700769 DexOffset base_vaddr = tab_rec->vaddr;
770 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700771 int entries = table[1];
772 int low_key = s4FromSwitchData(&table[2]);
773 for (int i = 0; i < entries; i++) {
774 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
775 }
776}
777
buzbee0d829482013-10-11 15:24:55 -0700778void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700780 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700782 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
783 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700784 for (int i = 0; i < entries; i++) {
785 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
786 }
787}
788
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700789void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700790 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
791 while (true) {
792 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
793 if (tab_rec == NULL) break;
794 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
795 MarkPackedCaseLabels(tab_rec);
796 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
797 MarkSparseCaseLabels(tab_rec);
798 } else {
799 LOG(FATAL) << "Invalid switch table";
800 }
801 }
802}
803
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700804void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 /*
806 * Sparse switch data format:
807 * ushort ident = 0x0200 magic value
808 * ushort size number of entries in the table; > 0
809 * int keys[size] keys, sorted low-to-high; 32-bit aligned
810 * int targets[size] branch targets, relative to switch opcode
811 *
812 * Total size is (2+size*4) 16-bit code units.
813 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 uint16_t ident = table[0];
815 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700816 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
817 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700818 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
819 << ", entries: " << std::dec << entries;
820 for (int i = 0; i < entries; i++) {
821 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
822 }
823}
824
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700825void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700826 /*
827 * Packed switch data format:
828 * ushort ident = 0x0100 magic value
829 * ushort size number of entries in the table
830 * int first_key first (and lowest) switch case value
831 * int targets[size] branch targets, relative to switch opcode
832 *
833 * Total size is (4+size*2) 16-bit code units.
834 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700835 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700836 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 int entries = table[1];
838 int low_key = s4FromSwitchData(&table[2]);
839 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
840 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
841 for (int i = 0; i < entries; i++) {
842 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
843 << targets[i];
844 }
845}
846
buzbee252254b2013-09-08 16:20:53 -0700847/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700848void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
849 // NOTE: only used for debug listings.
850 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700851}
852
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700853bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700854 bool is_taken;
855 switch (opcode) {
856 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
857 case Instruction::IF_NE: is_taken = (src1 != src2); break;
858 case Instruction::IF_LT: is_taken = (src1 < src2); break;
859 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
860 case Instruction::IF_GT: is_taken = (src1 > src2); break;
861 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
862 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
863 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
864 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
865 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
866 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
867 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
868 default:
869 LOG(FATAL) << "Unexpected opcode " << opcode;
870 is_taken = false;
871 }
872 return is_taken;
873}
874
875// Convert relation of src1/src2 to src2/src1
876ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
877 ConditionCode res;
878 switch (before) {
879 case kCondEq: res = kCondEq; break;
880 case kCondNe: res = kCondNe; break;
881 case kCondLt: res = kCondGt; break;
882 case kCondGt: res = kCondLt; break;
883 case kCondLe: res = kCondGe; break;
884 case kCondGe: res = kCondLe; break;
885 default:
886 res = static_cast<ConditionCode>(0);
887 LOG(FATAL) << "Unexpected ccode " << before;
888 }
889 return res;
890}
891
892// TODO: move to mir_to_lir.cc
893Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
894 : Backend(arena),
895 literal_list_(NULL),
896 method_literal_list_(NULL),
897 code_literal_list_(NULL),
buzbeeb48819d2013-09-14 16:15:25 -0700898 first_fixup_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700899 cu_(cu),
900 mir_graph_(mir_graph),
901 switch_tables_(arena, 4, kGrowableArraySwitchTables),
902 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
903 throw_launchpads_(arena, 2048, kGrowableArrayThrowLaunchPads),
904 suspend_launchpads_(arena, 4, kGrowableArraySuspendLaunchPads),
905 intrinsic_launchpads_(arena, 2048, kGrowableArrayMisc),
buzbeebd663de2013-09-10 15:41:31 -0700906 tempreg_info_(arena, 20, kGrowableArrayMisc),
907 reginfo_map_(arena, 64, kGrowableArrayMisc),
buzbee0d829482013-10-11 15:24:55 -0700908 pointer_storage_(arena, 128, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700909 data_offset_(0),
910 total_size_(0),
911 block_label_list_(NULL),
912 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700913 estimated_native_code_size_(0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700914 reg_pool_(NULL),
915 live_sreg_(0),
916 num_core_spills_(0),
917 num_fp_spills_(0),
918 frame_size_(0),
919 core_spill_mask_(0),
920 fp_spill_mask_(0),
921 first_lir_insn_(NULL),
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700922 last_lir_insn_(NULL) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700923 promotion_map_ = static_cast<PromotionMap*>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700924 (arena_->Alloc((cu_->num_dalvik_registers + cu_->num_compiler_temps + 1) *
925 sizeof(promotion_map_[0]), ArenaAllocator::kAllocRegAlloc));
buzbee0d829482013-10-11 15:24:55 -0700926 // Reserve pointer id 0 for NULL.
927 size_t null_idx = WrapPointer(NULL);
928 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700929}
930
931void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -0700932 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -0700933 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
934
935 /* Allocate Registers using simple local allocation scheme */
936 SimpleRegAlloc();
937
buzbee479f83c2013-07-19 10:58:21 -0700938 if (mir_graph_->IsSpecialCase()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700939 /*
940 * Custom codegen for special cases. If for any reason the
941 * special codegen doesn't succeed, first_lir_insn_ will
942 * set to NULL;
943 */
buzbeea61f4952013-08-23 14:27:06 -0700944 cu_->NewTimingSplit("SpecialMIR2LIR");
buzbee479f83c2013-07-19 10:58:21 -0700945 SpecialMIR2LIR(mir_graph_->GetSpecialCase());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700946 }
947
948 /* Convert MIR to LIR, etc. */
949 if (first_lir_insn_ == NULL) {
950 MethodMIR2LIR();
951 }
952
953 /* Method is not empty */
954 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700955 // mark the targets of switch statement case labels
956 ProcessSwitchTables();
957
958 /* Convert LIR into machine code. */
959 AssembleLIR();
960
961 if (cu_->verbose) {
962 CodegenDump();
963 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700964 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700965}
966
967CompiledMethod* Mir2Lir::GetCompiledMethod() {
968 // Combine vmap tables - core regs, then fp regs - into vmap_table
Ian Rogers96faf5b2013-08-09 22:05:32 -0700969 std::vector<uint16_t> raw_vmap_table;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700970 // Core regs may have been inserted out of order - sort first
971 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
Mathieu Chartier193bad92013-08-29 18:46:00 -0700972 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700973 // Copy, stripping out the phys register sort key
Ian Rogers96faf5b2013-08-09 22:05:32 -0700974 raw_vmap_table.push_back(~(-1 << VREG_NUM_WIDTH) & core_vmap_table_[i]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975 }
976 // If we have a frame, push a marker to take place of lr
977 if (frame_size_ > 0) {
Ian Rogers96faf5b2013-08-09 22:05:32 -0700978 raw_vmap_table.push_back(INVALID_VREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979 } else {
980 DCHECK_EQ(__builtin_popcount(core_spill_mask_), 0);
981 DCHECK_EQ(__builtin_popcount(fp_spill_mask_), 0);
982 }
983 // Combine vmap tables - core regs, then fp regs. fp regs already sorted
984 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
Ian Rogers96faf5b2013-08-09 22:05:32 -0700985 raw_vmap_table.push_back(fp_vmap_table_[i]);
986 }
987 UnsignedLeb128EncodingVector vmap_encoder;
988 // Prefix the encoded data with its size.
989 vmap_encoder.PushBack(raw_vmap_table.size());
Mathieu Chartier193bad92013-08-29 18:46:00 -0700990 for (uint16_t cur : raw_vmap_table) {
991 vmap_encoder.PushBack(cur);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700992 }
993 CompiledMethod* result =
Mathieu Chartier193bad92013-08-29 18:46:00 -0700994 new CompiledMethod(*cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
995 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_.GetData(),
996 vmap_encoder.GetData(), native_gc_map_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700997 return result;
998}
999
1000int Mir2Lir::ComputeFrameSize() {
1001 /* Figure out the frame size */
1002 static const uint32_t kAlignMask = kStackAlignment - 1;
1003 uint32_t size = (num_core_spills_ + num_fp_spills_ +
1004 1 /* filler word */ + cu_->num_regs + cu_->num_outs +
1005 cu_->num_compiler_temps + 1 /* cur_method* */)
1006 * sizeof(uint32_t);
1007 /* Align and set */
1008 return (size + kAlignMask) & ~(kAlignMask);
1009}
1010
1011/*
1012 * Append an LIR instruction to the LIR list maintained by a compilation
1013 * unit
1014 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001015void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001016 if (first_lir_insn_ == NULL) {
1017 DCHECK(last_lir_insn_ == NULL);
1018 last_lir_insn_ = first_lir_insn_ = lir;
1019 lir->prev = lir->next = NULL;
1020 } else {
1021 last_lir_insn_->next = lir;
1022 lir->prev = last_lir_insn_;
1023 lir->next = NULL;
1024 last_lir_insn_ = lir;
1025 }
1026}
1027
1028/*
1029 * Insert an LIR instruction before the current instruction, which cannot be the
1030 * first instruction.
1031 *
1032 * prev_lir <-> new_lir <-> current_lir
1033 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001034void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001035 DCHECK(current_lir->prev != NULL);
1036 LIR *prev_lir = current_lir->prev;
1037
1038 prev_lir->next = new_lir;
1039 new_lir->prev = prev_lir;
1040 new_lir->next = current_lir;
1041 current_lir->prev = new_lir;
1042}
1043
1044/*
1045 * Insert an LIR instruction after the current instruction, which cannot be the
1046 * first instruction.
1047 *
1048 * current_lir -> new_lir -> old_next
1049 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001050void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001051 new_lir->prev = current_lir;
1052 new_lir->next = current_lir->next;
1053 current_lir->next = new_lir;
1054 new_lir->next->prev = new_lir;
1055}
1056
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001057} // namespace art