blob: 6e49f0bc54ae39198b61925bc7e5db10df9e9ff2 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070020#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "mir_to_lir-inl.h"
22#include "verifier/dex_gc_map.h"
23#include "verifier/method_verifier.h"
24
25namespace art {
26
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070027bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070028 bool res = false;
29 if (rl_src.is_const) {
30 if (rl_src.wide) {
31 if (rl_src.fp) {
32 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
33 } else {
34 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
35 }
36 } else {
37 if (rl_src.fp) {
38 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
39 } else {
40 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
41 }
42 }
43 }
44 return res;
45}
46
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070047void Mir2Lir::MarkSafepointPC(LIR* inst) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070048 inst->def_mask = ENCODE_ALL;
49 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
50 DCHECK_EQ(safepoint_pc->def_mask, ENCODE_ALL);
51}
52
Ian Rogers9b297bf2013-09-06 11:11:25 -070053bool Mir2Lir::FastInstance(uint32_t field_idx, bool is_put, int* field_offset, bool* is_volatile) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070054 return cu_->compiler_driver->ComputeInstanceFieldInfo(
Ian Rogers9b297bf2013-09-06 11:11:25 -070055 field_idx, mir_graph_->GetCurrentDexCompilationUnit(), is_put, field_offset, is_volatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -070056}
57
buzbee252254b2013-09-08 16:20:53 -070058/* Remove a LIR from the list. */
59void Mir2Lir::UnlinkLIR(LIR* lir) {
60 if (UNLIKELY(lir == first_lir_insn_)) {
61 first_lir_insn_ = lir->next;
62 if (lir->next != NULL) {
63 lir->next->prev = NULL;
64 } else {
65 DCHECK(lir->next == NULL);
66 DCHECK(lir == last_lir_insn_);
67 last_lir_insn_ = NULL;
68 }
69 } else if (lir == last_lir_insn_) {
70 last_lir_insn_ = lir->prev;
71 lir->prev->next = NULL;
72 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
73 lir->prev->next = lir->next;
74 lir->next->prev = lir->prev;
75 }
76}
77
Brian Carlstrom7940e442013-07-12 13:46:57 -070078/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -070079void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070080 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -070081 if (!cu_->verbose) {
82 UnlinkLIR(lir);
83 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070084}
85
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070086void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070087 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -070088 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -070089 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
90 if (is_load) {
91 mask_ptr = &lir->use_mask;
92 } else {
93 mask_ptr = &lir->def_mask;
94 }
95 /* Clear out the memref flags */
96 *mask_ptr &= ~mask;
97 /* ..and then add back the one we need */
98 switch (mem_type) {
99 case kLiteral:
100 DCHECK(is_load);
101 *mask_ptr |= ENCODE_LITERAL;
102 break;
103 case kDalvikReg:
104 *mask_ptr |= ENCODE_DALVIK_REG;
105 break;
106 case kHeapRef:
107 *mask_ptr |= ENCODE_HEAP_REF;
108 break;
109 case kMustNotAlias:
110 /* Currently only loads can be marked as kMustNotAlias */
111 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
112 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
113 break;
114 default:
115 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
116 }
117}
118
119/*
120 * Mark load/store instructions that access Dalvik registers through the stack.
121 */
122void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700123 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700124 SetMemRefType(lir, is_load, kDalvikReg);
125
126 /*
127 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
128 * access.
129 */
130 lir->alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
131}
132
133/*
134 * Debugging macros
135 */
136#define DUMP_RESOURCE_MASK(X)
137
138/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700139void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700140 int offset = lir->offset;
141 int dest = lir->operands[0];
142 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
143
144 /* Handle pseudo-ops individually, and all regular insns as a group */
145 switch (lir->opcode) {
146 case kPseudoMethodEntry:
147 LOG(INFO) << "-------- method entry "
148 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
149 break;
150 case kPseudoMethodExit:
151 LOG(INFO) << "-------- Method_Exit";
152 break;
153 case kPseudoBarrier:
154 LOG(INFO) << "-------- BARRIER";
155 break;
156 case kPseudoEntryBlock:
157 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
158 break;
159 case kPseudoDalvikByteCodeBoundary:
160 if (lir->operands[0] == 0) {
161 lir->operands[0] = reinterpret_cast<uintptr_t>("No instruction string");
162 }
163 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
164 << lir->dalvik_offset << " @ " << reinterpret_cast<char*>(lir->operands[0]);
165 break;
166 case kPseudoExitBlock:
167 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
168 break;
169 case kPseudoPseudoAlign4:
170 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
171 << offset << "): .align4";
172 break;
173 case kPseudoEHBlockLabel:
174 LOG(INFO) << "Exception_Handling:";
175 break;
176 case kPseudoTargetLabel:
177 case kPseudoNormalBlockLabel:
178 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
179 break;
180 case kPseudoThrowTarget:
181 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
182 break;
183 case kPseudoIntrinsicRetry:
184 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
185 break;
186 case kPseudoSuspendTarget:
187 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
188 break;
189 case kPseudoSafepointPC:
190 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
191 break;
192 case kPseudoExportedPC:
193 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
194 break;
195 case kPseudoCaseLabel:
196 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
197 << std::hex << lir->operands[0] << "|" << std::dec <<
198 lir->operands[0];
199 break;
200 default:
201 if (lir->flags.is_nop && !dump_nop) {
202 break;
203 } else {
204 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
205 lir, base_addr));
206 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
207 lir, base_addr));
208 LOG(INFO) << StringPrintf("%05x: %-9s%s%s",
209 reinterpret_cast<unsigned int>(base_addr + offset),
210 op_name.c_str(), op_operands.c_str(),
211 lir->flags.is_nop ? "(nop)" : "");
212 }
213 break;
214 }
215
216 if (lir->use_mask && (!lir->flags.is_nop || dump_nop)) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700217 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700218 }
219 if (lir->def_mask && (!lir->flags.is_nop || dump_nop)) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700220 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700221 }
222}
223
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700224void Mir2Lir::DumpPromotionMap() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700225 int num_regs = cu_->num_dalvik_registers + cu_->num_compiler_temps + 1;
226 for (int i = 0; i < num_regs; i++) {
227 PromotionMap v_reg_map = promotion_map_[i];
228 std::string buf;
229 if (v_reg_map.fp_location == kLocPhysReg) {
230 StringAppendF(&buf, " : s%d", v_reg_map.FpReg & FpRegMask());
231 }
232
233 std::string buf3;
234 if (i < cu_->num_dalvik_registers) {
235 StringAppendF(&buf3, "%02d", i);
236 } else if (i == mir_graph_->GetMethodSReg()) {
237 buf3 = "Method*";
238 } else {
239 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
240 }
241
242 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
243 v_reg_map.core_location == kLocPhysReg ?
244 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
245 v_reg_map.core_reg : SRegOffset(i),
246 buf.c_str());
247 }
248}
249
250/* Dump a mapping table */
Ian Rogersd91d6d62013-09-25 20:26:14 -0700251void Mir2Lir::DumpMappingTable(const char* table_name, const char* descriptor,
252 const char* name, const Signature& signature,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700253 const std::vector<uint32_t>& v) {
254 if (v.size() > 0) {
255 std::string line(StringPrintf("\n %s %s%s_%s_table[%zu] = {", table_name,
Ian Rogersd91d6d62013-09-25 20:26:14 -0700256 descriptor, name, signature.ToString().c_str(), v.size()));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700257 std::replace(line.begin(), line.end(), ';', '_');
258 LOG(INFO) << line;
259 for (uint32_t i = 0; i < v.size(); i+=2) {
260 line = StringPrintf(" {0x%05x, 0x%04x},", v[i], v[i+1]);
261 LOG(INFO) << line;
262 }
263 LOG(INFO) <<" };\n\n";
264 }
265}
266
267/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700268void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700269 LOG(INFO) << "Dumping LIR insns for "
270 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
271 LIR* lir_insn;
272 int insns_size = cu_->code_item->insns_size_in_code_units_;
273
274 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
275 LOG(INFO) << "Ins : " << cu_->num_ins;
276 LOG(INFO) << "Outs : " << cu_->num_outs;
277 LOG(INFO) << "CoreSpills : " << num_core_spills_;
278 LOG(INFO) << "FPSpills : " << num_fp_spills_;
279 LOG(INFO) << "CompilerTemps : " << cu_->num_compiler_temps;
280 LOG(INFO) << "Frame size : " << frame_size_;
281 LOG(INFO) << "code size is " << total_size_ <<
282 " bytes, Dalvik size is " << insns_size * 2;
283 LOG(INFO) << "expansion factor: "
284 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
285 DumpPromotionMap();
286 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
287 DumpLIRInsn(lir_insn, 0);
288 }
289 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
290 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
291 lir_insn->operands[0]);
292 }
293
294 const DexFile::MethodId& method_id =
295 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700296 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
297 const char* name = cu_->dex_file->GetMethodName(method_id);
298 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700299
300 // Dump mapping tables
301 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature, pc2dex_mapping_table_);
302 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature, dex2pc_mapping_table_);
303}
304
305/*
306 * Search the existing constants in the literal pool for an exact or close match
307 * within specified delta (greater or equal to 0).
308 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700309LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700310 while (data_target) {
311 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
312 return data_target;
313 data_target = data_target->next;
314 }
315 return NULL;
316}
317
318/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700319LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700320 bool lo_match = false;
321 LIR* lo_target = NULL;
322 while (data_target) {
323 if (lo_match && (data_target->operands[0] == val_hi)) {
324 // Record high word in case we need to expand this later.
325 lo_target->operands[1] = val_hi;
326 return lo_target;
327 }
328 lo_match = false;
329 if (data_target->operands[0] == val_lo) {
330 lo_match = true;
331 lo_target = data_target;
332 }
333 data_target = data_target->next;
334 }
335 return NULL;
336}
337
338/*
339 * The following are building blocks to insert constants into the pool or
340 * instruction streams.
341 */
342
343/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700344LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700345 /* Add the constant to the literal pool */
346 if (constant_list_p) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700347 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700348 new_value->operands[0] = value;
349 new_value->next = *constant_list_p;
350 *constant_list_p = new_value;
351 return new_value;
352 }
353 return NULL;
354}
355
356/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700357LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700358 AddWordData(constant_list_p, val_hi);
359 return AddWordData(constant_list_p, val_lo);
360}
361
362static void PushWord(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700363 buf.push_back(data & 0xff);
364 buf.push_back((data >> 8) & 0xff);
365 buf.push_back((data >> 16) & 0xff);
366 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700367}
368
369static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
370 while (buf.size() < offset) {
371 buf.push_back(0);
372 }
373}
374
375/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700376void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700377 AlignBuffer(code_buffer_, data_offset_);
378 LIR* data_lir = literal_list_;
379 while (data_lir != NULL) {
380 PushWord(code_buffer_, data_lir->operands[0]);
381 data_lir = NEXT_LIR(data_lir);
382 }
383 // Push code and method literals, record offsets for the compiler to patch.
384 data_lir = code_literal_list_;
385 while (data_lir != NULL) {
386 uint32_t target = data_lir->operands[0];
387 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700388 cu_->class_def_idx,
389 cu_->method_idx,
390 cu_->invoke_type,
391 target,
392 static_cast<InvokeType>(data_lir->operands[1]),
393 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700394 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
395 // unique based on target to ensure code deduplication works
396 uint32_t unique_patch_value = reinterpret_cast<uint32_t>(&id);
397 PushWord(code_buffer_, unique_patch_value);
398 data_lir = NEXT_LIR(data_lir);
399 }
400 data_lir = method_literal_list_;
401 while (data_lir != NULL) {
402 uint32_t target = data_lir->operands[0];
403 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700404 cu_->class_def_idx,
405 cu_->method_idx,
406 cu_->invoke_type,
407 target,
408 static_cast<InvokeType>(data_lir->operands[1]),
409 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700410 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
411 // unique based on target to ensure code deduplication works
412 uint32_t unique_patch_value = reinterpret_cast<uint32_t>(&id);
413 PushWord(code_buffer_, unique_patch_value);
414 data_lir = NEXT_LIR(data_lir);
415 }
416}
417
418/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700419void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700420 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
421 while (true) {
422 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
423 if (tab_rec == NULL) break;
424 AlignBuffer(code_buffer_, tab_rec->offset);
425 /*
426 * For Arm, our reference point is the address of the bx
427 * instruction that does the launch, so we have to subtract
428 * the auto pc-advance. For other targets the reference point
429 * is a label, so we can use the offset as-is.
430 */
431 int bx_offset = INVALID_OFFSET;
432 switch (cu_->instruction_set) {
433 case kThumb2:
434 bx_offset = tab_rec->anchor->offset + 4;
435 break;
436 case kX86:
437 bx_offset = 0;
438 break;
439 case kMips:
440 bx_offset = tab_rec->anchor->offset;
441 break;
442 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
443 }
444 if (cu_->verbose) {
445 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
446 }
447 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
448 const int* keys = reinterpret_cast<const int*>(&(tab_rec->table[2]));
449 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
450 int disp = tab_rec->targets[elems]->offset - bx_offset;
451 if (cu_->verbose) {
452 LOG(INFO) << " Case[" << elems << "] key: 0x"
453 << std::hex << keys[elems] << ", disp: 0x"
454 << std::hex << disp;
455 }
456 PushWord(code_buffer_, keys[elems]);
457 PushWord(code_buffer_,
458 tab_rec->targets[elems]->offset - bx_offset);
459 }
460 } else {
461 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
462 static_cast<int>(Instruction::kPackedSwitchSignature));
463 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
464 int disp = tab_rec->targets[elems]->offset - bx_offset;
465 if (cu_->verbose) {
466 LOG(INFO) << " Case[" << elems << "] disp: 0x"
467 << std::hex << disp;
468 }
469 PushWord(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
470 }
471 }
472 }
473}
474
475/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700476void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
478 while (true) {
479 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
480 if (tab_rec == NULL) break;
481 AlignBuffer(code_buffer_, tab_rec->offset);
482 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700483 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
484 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700485 }
486 }
487}
488
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700489static int AssignLiteralOffsetCommon(LIR* lir, int offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700490 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700491 lir->offset = offset;
492 offset += 4;
493 }
494 return offset;
495}
496
497// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700498bool Mir2Lir::VerifyCatchEntries() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700499 bool success = true;
500 for (std::set<uint32_t>::const_iterator it = mir_graph_->catches_.begin();
501 it != mir_graph_->catches_.end(); ++it) {
502 uint32_t dex_pc = *it;
503 bool found = false;
504 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
505 if (dex_pc == dex2pc_mapping_table_[i+1]) {
506 found = true;
507 break;
508 }
509 }
510 if (!found) {
511 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
512 success = false;
513 }
514 }
515 // Now, try in the other direction
516 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
517 uint32_t dex_pc = dex2pc_mapping_table_[i+1];
518 if (mir_graph_->catches_.find(dex_pc) == mir_graph_->catches_.end()) {
519 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << dex_pc;
520 success = false;
521 }
522 }
523 if (!success) {
524 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
525 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
526 << dex2pc_mapping_table_.size()/2;
527 }
528 return success;
529}
530
531
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700532void Mir2Lir::CreateMappingTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700533 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
534 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
535 pc2dex_mapping_table_.push_back(tgt_lir->offset);
536 pc2dex_mapping_table_.push_back(tgt_lir->dalvik_offset);
537 }
538 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
539 dex2pc_mapping_table_.push_back(tgt_lir->offset);
540 dex2pc_mapping_table_.push_back(tgt_lir->dalvik_offset);
541 }
542 }
543 if (kIsDebugBuild) {
Ian Rogers96faf5b2013-08-09 22:05:32 -0700544 CHECK(VerifyCatchEntries());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700545 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700546 CHECK_EQ(pc2dex_mapping_table_.size() & 1, 0U);
547 CHECK_EQ(dex2pc_mapping_table_.size() & 1, 0U);
548 uint32_t total_entries = (pc2dex_mapping_table_.size() + dex2pc_mapping_table_.size()) / 2;
549 uint32_t pc2dex_entries = pc2dex_mapping_table_.size() / 2;
550 encoded_mapping_table_.PushBack(total_entries);
551 encoded_mapping_table_.PushBack(pc2dex_entries);
552 encoded_mapping_table_.InsertBack(pc2dex_mapping_table_.begin(), pc2dex_mapping_table_.end());
553 encoded_mapping_table_.InsertBack(dex2pc_mapping_table_.begin(), dex2pc_mapping_table_.end());
554 if (kIsDebugBuild) {
555 // Verify the encoded table holds the expected data.
556 MappingTable table(&encoded_mapping_table_.GetData()[0]);
557 CHECK_EQ(table.TotalSize(), total_entries);
558 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
559 CHECK_EQ(table.DexToPcSize(), dex2pc_mapping_table_.size() / 2);
560 MappingTable::PcToDexIterator it = table.PcToDexBegin();
561 for (uint32_t i = 0; i < pc2dex_mapping_table_.size(); ++i, ++it) {
562 CHECK_EQ(pc2dex_mapping_table_.at(i), it.NativePcOffset());
563 ++i;
564 CHECK_EQ(pc2dex_mapping_table_.at(i), it.DexPc());
565 }
566 MappingTable::DexToPcIterator it2 = table.DexToPcBegin();
567 for (uint32_t i = 0; i < dex2pc_mapping_table_.size(); ++i, ++it2) {
568 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.NativePcOffset());
569 ++i;
570 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.DexPc());
571 }
572 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700573}
574
575class NativePcToReferenceMapBuilder {
576 public:
577 NativePcToReferenceMapBuilder(std::vector<uint8_t>* table,
578 size_t entries, uint32_t max_native_offset,
579 size_t references_width) : entries_(entries),
580 references_width_(references_width), in_use_(entries),
581 table_(table) {
582 // Compute width in bytes needed to hold max_native_offset.
583 native_offset_width_ = 0;
584 while (max_native_offset != 0) {
585 native_offset_width_++;
586 max_native_offset >>= 8;
587 }
588 // Resize table and set up header.
589 table->resize((EntryWidth() * entries) + sizeof(uint32_t));
590 CHECK_LT(native_offset_width_, 1U << 3);
591 (*table)[0] = native_offset_width_ & 7;
592 CHECK_LT(references_width_, 1U << 13);
593 (*table)[0] |= (references_width_ << 3) & 0xFF;
594 (*table)[1] = (references_width_ >> 5) & 0xFF;
595 CHECK_LT(entries, 1U << 16);
596 (*table)[2] = entries & 0xFF;
597 (*table)[3] = (entries >> 8) & 0xFF;
598 }
599
600 void AddEntry(uint32_t native_offset, const uint8_t* references) {
601 size_t table_index = TableIndex(native_offset);
602 while (in_use_[table_index]) {
603 table_index = (table_index + 1) % entries_;
604 }
605 in_use_[table_index] = true;
606 SetNativeOffset(table_index, native_offset);
607 DCHECK_EQ(native_offset, GetNativeOffset(table_index));
608 SetReferences(table_index, references);
609 }
610
611 private:
612 size_t TableIndex(uint32_t native_offset) {
613 return NativePcOffsetToReferenceMap::Hash(native_offset) % entries_;
614 }
615
616 uint32_t GetNativeOffset(size_t table_index) {
617 uint32_t native_offset = 0;
618 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
619 for (size_t i = 0; i < native_offset_width_; i++) {
620 native_offset |= (*table_)[table_offset + i] << (i * 8);
621 }
622 return native_offset;
623 }
624
625 void SetNativeOffset(size_t table_index, uint32_t native_offset) {
626 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
627 for (size_t i = 0; i < native_offset_width_; i++) {
628 (*table_)[table_offset + i] = (native_offset >> (i * 8)) & 0xFF;
629 }
630 }
631
632 void SetReferences(size_t table_index, const uint8_t* references) {
633 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
634 memcpy(&(*table_)[table_offset + native_offset_width_], references, references_width_);
635 }
636
637 size_t EntryWidth() const {
638 return native_offset_width_ + references_width_;
639 }
640
641 // Number of entries in the table.
642 const size_t entries_;
643 // Number of bytes used to encode the reference bitmap.
644 const size_t references_width_;
645 // Number of bytes used to encode a native offset.
646 size_t native_offset_width_;
647 // Entries that are in use.
648 std::vector<bool> in_use_;
649 // The table we're building.
650 std::vector<uint8_t>* const table_;
651};
652
653void Mir2Lir::CreateNativeGcMap() {
654 const std::vector<uint32_t>& mapping_table = pc2dex_mapping_table_;
655 uint32_t max_native_offset = 0;
656 for (size_t i = 0; i < mapping_table.size(); i += 2) {
657 uint32_t native_offset = mapping_table[i + 0];
658 if (native_offset > max_native_offset) {
659 max_native_offset = native_offset;
660 }
661 }
662 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
663 const std::vector<uint8_t>* gc_map_raw = verifier::MethodVerifier::GetDexGcMap(method_ref);
664 verifier::DexPcToReferenceMap dex_gc_map(&(*gc_map_raw)[4], gc_map_raw->size() - 4);
665 // Compute native offset to references size.
666 NativePcToReferenceMapBuilder native_gc_map_builder(&native_gc_map_,
667 mapping_table.size() / 2, max_native_offset,
668 dex_gc_map.RegWidth());
669
670 for (size_t i = 0; i < mapping_table.size(); i += 2) {
671 uint32_t native_offset = mapping_table[i + 0];
672 uint32_t dex_pc = mapping_table[i + 1];
673 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
674 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc;
675 native_gc_map_builder.AddEntry(native_offset, references);
676 }
677}
678
679/* Determine the offset of each literal field */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700680int Mir2Lir::AssignLiteralOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700681 offset = AssignLiteralOffsetCommon(literal_list_, offset);
682 offset = AssignLiteralOffsetCommon(code_literal_list_, offset);
683 offset = AssignLiteralOffsetCommon(method_literal_list_, offset);
684 return offset;
685}
686
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700687int Mir2Lir::AssignSwitchTablesOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700688 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
689 while (true) {
690 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
691 if (tab_rec == NULL) break;
692 tab_rec->offset = offset;
693 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
694 offset += tab_rec->table[1] * (sizeof(int) * 2);
695 } else {
696 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
697 static_cast<int>(Instruction::kPackedSwitchSignature));
698 offset += tab_rec->table[1] * sizeof(int);
699 }
700 }
701 return offset;
702}
703
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700704int Mir2Lir::AssignFillArrayDataOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
706 while (true) {
707 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
708 if (tab_rec == NULL) break;
709 tab_rec->offset = offset;
710 offset += tab_rec->size;
711 // word align
712 offset = (offset + 3) & ~3;
713 }
714 return offset;
715}
716
717// LIR offset assignment.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700718int Mir2Lir::AssignInsnOffsets() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 LIR* lir;
720 int offset = 0;
721
722 for (lir = first_lir_insn_; lir != NULL; lir = NEXT_LIR(lir)) {
723 lir->offset = offset;
buzbee28c23002013-09-07 09:12:27 -0700724 if (LIKELY(lir->opcode >= 0)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700725 if (!lir->flags.is_nop) {
726 offset += lir->flags.size;
727 }
buzbee28c23002013-09-07 09:12:27 -0700728 } else if (UNLIKELY(lir->opcode == kPseudoPseudoAlign4)) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700729 if (offset & 0x2) {
730 offset += 2;
731 lir->operands[0] = 1;
732 } else {
733 lir->operands[0] = 0;
734 }
735 }
736 /* Pseudo opcodes don't consume space */
737 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700738 return offset;
739}
740
741/*
742 * Walk the compilation unit and assign offsets to instructions
743 * and literals and compute the total size of the compiled unit.
744 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700745void Mir2Lir::AssignOffsets() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700746 int offset = AssignInsnOffsets();
747
748 /* Const values have to be word aligned */
749 offset = (offset + 3) & ~3;
750
751 /* Set up offsets for literals */
752 data_offset_ = offset;
753
754 offset = AssignLiteralOffset(offset);
755
756 offset = AssignSwitchTablesOffset(offset);
757
758 offset = AssignFillArrayDataOffset(offset);
759
760 total_size_ = offset;
761}
762
763/*
764 * Go over each instruction in the list and calculate the offset from the top
765 * before sending them off to the assembler. If out-of-range branch distance is
766 * seen rearrange the instructions a bit to correct it.
767 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700768void Mir2Lir::AssembleLIR() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700769 AssignOffsets();
770 int assembler_retries = 0;
771 /*
772 * Assemble here. Note that we generate code with optimistic assumptions
773 * and if found now to work, we'll have to redo the sequence and retry.
774 */
775
776 while (true) {
777 AssemblerStatus res = AssembleInstructions(0);
778 if (res == kSuccess) {
779 break;
780 } else {
781 assembler_retries++;
782 if (assembler_retries > MAX_ASSEMBLER_RETRIES) {
783 CodegenDump();
784 LOG(FATAL) << "Assembler error - too many retries";
785 }
786 // Redo offsets and try again
787 AssignOffsets();
788 code_buffer_.clear();
789 }
790 }
791
792 // Install literals
793 InstallLiteralPools();
794
795 // Install switch tables
796 InstallSwitchTables();
797
798 // Install fill array data
799 InstallFillArrayData();
800
801 // Create the mapping table and native offset to reference map.
802 CreateMappingTables();
803
804 CreateNativeGcMap();
805}
806
807/*
808 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
809 * offset vaddr. This label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700810 * branch table during the assembly phase. All resource flags
811 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700812 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700813LIR* Mir2Lir::InsertCaseLabel(int vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700814 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700815 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816 new_label->dalvik_offset = vaddr;
817 new_label->opcode = kPseudoCaseLabel;
818 new_label->operands[0] = keyVal;
buzbee252254b2013-09-08 16:20:53 -0700819 new_label->def_mask = ENCODE_ALL;
buzbee56c71782013-09-05 17:13:19 -0700820 InsertLIRAfter(boundary_lir, new_label);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700821 return new_label;
822}
823
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700824void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable *tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700825 const uint16_t* table = tab_rec->table;
826 int base_vaddr = tab_rec->vaddr;
827 const int *targets = reinterpret_cast<const int*>(&table[4]);
828 int entries = table[1];
829 int low_key = s4FromSwitchData(&table[2]);
830 for (int i = 0; i < entries; i++) {
831 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
832 }
833}
834
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700835void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable *tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700836 const uint16_t* table = tab_rec->table;
837 int base_vaddr = tab_rec->vaddr;
838 int entries = table[1];
839 const int* keys = reinterpret_cast<const int*>(&table[2]);
840 const int* targets = &keys[entries];
841 for (int i = 0; i < entries; i++) {
842 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
843 }
844}
845
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700846void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700847 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
848 while (true) {
849 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
850 if (tab_rec == NULL) break;
851 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
852 MarkPackedCaseLabels(tab_rec);
853 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
854 MarkSparseCaseLabels(tab_rec);
855 } else {
856 LOG(FATAL) << "Invalid switch table";
857 }
858 }
859}
860
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700861void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700862 /*
863 * Sparse switch data format:
864 * ushort ident = 0x0200 magic value
865 * ushort size number of entries in the table; > 0
866 * int keys[size] keys, sorted low-to-high; 32-bit aligned
867 * int targets[size] branch targets, relative to switch opcode
868 *
869 * Total size is (2+size*4) 16-bit code units.
870 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 uint16_t ident = table[0];
872 int entries = table[1];
873 const int* keys = reinterpret_cast<const int*>(&table[2]);
874 const int* targets = &keys[entries];
875 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
876 << ", entries: " << std::dec << entries;
877 for (int i = 0; i < entries; i++) {
878 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
879 }
880}
881
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700882void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700883 /*
884 * Packed switch data format:
885 * ushort ident = 0x0100 magic value
886 * ushort size number of entries in the table
887 * int first_key first (and lowest) switch case value
888 * int targets[size] branch targets, relative to switch opcode
889 *
890 * Total size is (4+size*2) 16-bit code units.
891 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700892 uint16_t ident = table[0];
893 const int* targets = reinterpret_cast<const int*>(&table[4]);
894 int entries = table[1];
895 int low_key = s4FromSwitchData(&table[2]);
896 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
897 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
898 for (int i = 0; i < entries; i++) {
899 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
900 << targets[i];
901 }
902}
903
buzbee252254b2013-09-08 16:20:53 -0700904/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
905void Mir2Lir::MarkBoundary(int offset, const char* inst_str) {
906 NewLIR1(kPseudoDalvikByteCodeBoundary, reinterpret_cast<uintptr_t>(inst_str));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700907}
908
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700909bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700910 bool is_taken;
911 switch (opcode) {
912 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
913 case Instruction::IF_NE: is_taken = (src1 != src2); break;
914 case Instruction::IF_LT: is_taken = (src1 < src2); break;
915 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
916 case Instruction::IF_GT: is_taken = (src1 > src2); break;
917 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
918 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
919 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
920 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
921 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
922 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
923 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
924 default:
925 LOG(FATAL) << "Unexpected opcode " << opcode;
926 is_taken = false;
927 }
928 return is_taken;
929}
930
931// Convert relation of src1/src2 to src2/src1
932ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
933 ConditionCode res;
934 switch (before) {
935 case kCondEq: res = kCondEq; break;
936 case kCondNe: res = kCondNe; break;
937 case kCondLt: res = kCondGt; break;
938 case kCondGt: res = kCondLt; break;
939 case kCondLe: res = kCondGe; break;
940 case kCondGe: res = kCondLe; break;
941 default:
942 res = static_cast<ConditionCode>(0);
943 LOG(FATAL) << "Unexpected ccode " << before;
944 }
945 return res;
946}
947
948// TODO: move to mir_to_lir.cc
949Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
950 : Backend(arena),
951 literal_list_(NULL),
952 method_literal_list_(NULL),
953 code_literal_list_(NULL),
954 cu_(cu),
955 mir_graph_(mir_graph),
956 switch_tables_(arena, 4, kGrowableArraySwitchTables),
957 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
958 throw_launchpads_(arena, 2048, kGrowableArrayThrowLaunchPads),
959 suspend_launchpads_(arena, 4, kGrowableArraySuspendLaunchPads),
960 intrinsic_launchpads_(arena, 2048, kGrowableArrayMisc),
buzbeebd663de2013-09-10 15:41:31 -0700961 tempreg_info_(arena, 20, kGrowableArrayMisc),
962 reginfo_map_(arena, 64, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700963 data_offset_(0),
964 total_size_(0),
965 block_label_list_(NULL),
966 current_dalvik_offset_(0),
967 reg_pool_(NULL),
968 live_sreg_(0),
969 num_core_spills_(0),
970 num_fp_spills_(0),
971 frame_size_(0),
972 core_spill_mask_(0),
973 fp_spill_mask_(0),
974 first_lir_insn_(NULL),
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700975 last_lir_insn_(NULL) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700976 promotion_map_ = static_cast<PromotionMap*>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700977 (arena_->Alloc((cu_->num_dalvik_registers + cu_->num_compiler_temps + 1) *
978 sizeof(promotion_map_[0]), ArenaAllocator::kAllocRegAlloc));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979}
980
981void Mir2Lir::Materialize() {
982 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
983
984 /* Allocate Registers using simple local allocation scheme */
985 SimpleRegAlloc();
986
buzbee479f83c2013-07-19 10:58:21 -0700987 if (mir_graph_->IsSpecialCase()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700988 /*
989 * Custom codegen for special cases. If for any reason the
990 * special codegen doesn't succeed, first_lir_insn_ will
991 * set to NULL;
992 */
buzbee479f83c2013-07-19 10:58:21 -0700993 SpecialMIR2LIR(mir_graph_->GetSpecialCase());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700994 }
995
996 /* Convert MIR to LIR, etc. */
997 if (first_lir_insn_ == NULL) {
998 MethodMIR2LIR();
999 }
1000
1001 /* Method is not empty */
1002 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001003 // mark the targets of switch statement case labels
1004 ProcessSwitchTables();
1005
1006 /* Convert LIR into machine code. */
1007 AssembleLIR();
1008
1009 if (cu_->verbose) {
1010 CodegenDump();
1011 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001013}
1014
1015CompiledMethod* Mir2Lir::GetCompiledMethod() {
1016 // Combine vmap tables - core regs, then fp regs - into vmap_table
Ian Rogers96faf5b2013-08-09 22:05:32 -07001017 std::vector<uint16_t> raw_vmap_table;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001018 // Core regs may have been inserted out of order - sort first
1019 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001020 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001021 // Copy, stripping out the phys register sort key
Ian Rogers96faf5b2013-08-09 22:05:32 -07001022 raw_vmap_table.push_back(~(-1 << VREG_NUM_WIDTH) & core_vmap_table_[i]);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001023 }
1024 // If we have a frame, push a marker to take place of lr
1025 if (frame_size_ > 0) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001026 raw_vmap_table.push_back(INVALID_VREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001027 } else {
1028 DCHECK_EQ(__builtin_popcount(core_spill_mask_), 0);
1029 DCHECK_EQ(__builtin_popcount(fp_spill_mask_), 0);
1030 }
1031 // Combine vmap tables - core regs, then fp regs. fp regs already sorted
1032 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001033 raw_vmap_table.push_back(fp_vmap_table_[i]);
1034 }
1035 UnsignedLeb128EncodingVector vmap_encoder;
1036 // Prefix the encoded data with its size.
1037 vmap_encoder.PushBack(raw_vmap_table.size());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001038 for (uint16_t cur : raw_vmap_table) {
1039 vmap_encoder.PushBack(cur);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001040 }
1041 CompiledMethod* result =
Mathieu Chartier193bad92013-08-29 18:46:00 -07001042 new CompiledMethod(*cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
1043 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_.GetData(),
1044 vmap_encoder.GetData(), native_gc_map_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001045 return result;
1046}
1047
1048int Mir2Lir::ComputeFrameSize() {
1049 /* Figure out the frame size */
1050 static const uint32_t kAlignMask = kStackAlignment - 1;
1051 uint32_t size = (num_core_spills_ + num_fp_spills_ +
1052 1 /* filler word */ + cu_->num_regs + cu_->num_outs +
1053 cu_->num_compiler_temps + 1 /* cur_method* */)
1054 * sizeof(uint32_t);
1055 /* Align and set */
1056 return (size + kAlignMask) & ~(kAlignMask);
1057}
1058
1059/*
1060 * Append an LIR instruction to the LIR list maintained by a compilation
1061 * unit
1062 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001063void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 if (first_lir_insn_ == NULL) {
1065 DCHECK(last_lir_insn_ == NULL);
1066 last_lir_insn_ = first_lir_insn_ = lir;
1067 lir->prev = lir->next = NULL;
1068 } else {
1069 last_lir_insn_->next = lir;
1070 lir->prev = last_lir_insn_;
1071 lir->next = NULL;
1072 last_lir_insn_ = lir;
1073 }
1074}
1075
1076/*
1077 * Insert an LIR instruction before the current instruction, which cannot be the
1078 * first instruction.
1079 *
1080 * prev_lir <-> new_lir <-> current_lir
1081 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001082void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001083 DCHECK(current_lir->prev != NULL);
1084 LIR *prev_lir = current_lir->prev;
1085
1086 prev_lir->next = new_lir;
1087 new_lir->prev = prev_lir;
1088 new_lir->next = current_lir;
1089 current_lir->prev = new_lir;
1090}
1091
1092/*
1093 * Insert an LIR instruction after the current instruction, which cannot be the
1094 * first instruction.
1095 *
1096 * current_lir -> new_lir -> old_next
1097 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001098void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001099 new_lir->prev = current_lir;
1100 new_lir->next = current_lir->next;
1101 current_lir->next = new_lir;
1102 new_lir->next->prev = new_lir;
1103}
1104
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001105} // namespace art