blob: f40168b8b861a60929c238f7aa7d34aa11331a15 [file] [log] [blame]
Nicolas Geoffray004c2302015-03-20 10:06:38 +00001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "stack_map.h"
18
David Srbecky71ec1cc2018-05-18 15:57:25 +010019#include <iomanip>
Nicolas Geoffray896f8f72015-03-30 15:44:25 +010020#include <stdint.h>
21
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000022#include "art_method.h"
David Sehr9c4a0152018-04-05 12:23:54 -070023#include "base/indenter.h"
David Srbecky86decb62018-06-05 06:41:10 +010024#include "base/stats.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000025#include "scoped_thread_state_change-inl.h"
Roland Levillain0396ed72015-05-27 15:12:19 +010026
Nicolas Geoffray004c2302015-03-20 10:06:38 +000027namespace art {
28
David Srbecky0b4e5a32018-06-11 16:25:29 +010029BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const {
30 return std::partition_point(
31 stack_maps_.begin(),
32 stack_maps_.end(),
33 [packed_pc](const StackMap& sm) {
34 return sm.GetPackedNativePc() < packed_pc && sm.GetKind() != StackMap::Kind::Catch;
35 });
36}
37
38StackMap CodeInfo::GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa) const {
39 auto it = BinarySearchNativePc(StackMap::PackNativePc(pc, isa));
40 // Start at the lower bound and iterate over all stack maps with the given native pc.
41 for (; it != stack_maps_.end() && (*it).GetNativePcOffset(isa) == pc; ++it) {
42 StackMap::Kind kind = static_cast<StackMap::Kind>((*it).GetKind());
43 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
44 return *it;
45 }
46 }
47 return StackMap();
48}
49
David Srbecky6de88332018-06-03 12:00:11 +010050// Scan backward to determine dex register locations at given stack map.
51// All registers for a stack map are combined - inlined registers are just appended,
52// therefore 'first_dex_register' allows us to select a sub-range to decode.
53void CodeInfo::DecodeDexRegisterMap(uint32_t stack_map_index,
54 uint32_t first_dex_register,
55 /*out*/ DexRegisterMap* map) const {
56 // Count remaining work so we know when we have finished.
57 uint32_t remaining_registers = map->size();
58
59 // Keep scanning backwards and collect the most recent location of each register.
60 for (int32_t s = stack_map_index; s >= 0 && remaining_registers != 0; s--) {
61 StackMap stack_map = GetStackMapAt(s);
62 DCHECK_LE(stack_map_index - s, kMaxDexRegisterMapSearchDistance) << "Unbounded search";
63
64 // The mask specifies which registers where modified in this stack map.
65 // NB: the mask can be shorter than expected if trailing zero bits were removed.
66 uint32_t mask_index = stack_map.GetDexRegisterMaskIndex();
67 if (mask_index == StackMap::kNoValue) {
68 continue; // Nothing changed at this stack map.
69 }
70 BitMemoryRegion mask = dex_register_masks_.GetBitMemoryRegion(mask_index);
71 if (mask.size_in_bits() <= first_dex_register) {
72 continue; // Nothing changed after the first register we are interested in.
73 }
74
75 // The map stores one catalogue index per each modified register location.
76 uint32_t map_index = stack_map.GetDexRegisterMapIndex();
77 DCHECK_NE(map_index, StackMap::kNoValue);
78
79 // Skip initial registers which we are not interested in (to get to inlined registers).
80 map_index += mask.PopCount(0, first_dex_register);
81 mask = mask.Subregion(first_dex_register, mask.size_in_bits() - first_dex_register);
82
83 // Update registers that we see for first time (i.e. most recent value).
84 DexRegisterLocation* regs = map->data();
85 const uint32_t end = std::min<uint32_t>(map->size(), mask.size_in_bits());
86 const size_t kNumBits = BitSizeOf<uint32_t>();
87 for (uint32_t reg = 0; reg < end; reg += kNumBits) {
88 // Process the mask in chunks of kNumBits for performance.
89 uint32_t bits = mask.LoadBits(reg, std::min<uint32_t>(end - reg, kNumBits));
90 while (bits != 0) {
91 uint32_t bit = CTZ(bits);
92 if (regs[reg + bit].GetKind() == DexRegisterLocation::Kind::kInvalid) {
93 regs[reg + bit] = GetDexRegisterCatalogEntry(dex_register_maps_.Get(map_index));
94 remaining_registers--;
95 }
96 map_index++;
97 bits ^= 1u << bit; // Clear the bit.
98 }
99 }
100 }
101
102 // Set any remaining registers to None (which is the default state at first stack map).
103 if (remaining_registers != 0) {
104 DexRegisterLocation* regs = map->data();
105 for (uint32_t r = 0; r < map->size(); r++) {
106 if (regs[r].GetKind() == DexRegisterLocation::Kind::kInvalid) {
107 regs[r] = DexRegisterLocation::None();
108 }
109 }
110 }
111}
112
David Srbecky86decb62018-06-05 06:41:10 +0100113template<typename Accessor>
114static void AddTableSizeStats(const char* table_name,
David Srbeckycf7833e2018-06-14 16:45:22 +0100115 const BitTable<Accessor>& table,
David Srbecky86decb62018-06-05 06:41:10 +0100116 /*out*/ Stats* parent) {
117 Stats* table_stats = parent->Child(table_name);
118 table_stats->AddBits(table.BitSize());
119 table_stats->Child("Header")->AddBits(table.HeaderBitSize());
120 const char* const* column_names = GetBitTableColumnNames<Accessor>();
121 for (size_t c = 0; c < table.NumColumns(); c++) {
122 if (table.NumColumnBits(c) > 0) {
123 Stats* column_stats = table_stats->Child(column_names[c]);
124 column_stats->AddBits(table.NumRows() * table.NumColumnBits(c), table.NumRows());
125 }
126 }
127}
128
129void CodeInfo::AddSizeStats(/*out*/ Stats* parent) const {
130 Stats* stats = parent->Child("CodeInfo");
131 stats->AddBytes(size_);
132 stats->Child("Header")->AddBytes(UnsignedLeb128Size(size_));
133 AddTableSizeStats<StackMap>("StackMaps", stack_maps_, stats);
134 AddTableSizeStats<RegisterMask>("RegisterMasks", register_masks_, stats);
135 AddTableSizeStats<MaskInfo>("StackMasks", stack_masks_, stats);
136 AddTableSizeStats<InvokeInfo>("InvokeInfos", invoke_infos_, stats);
137 AddTableSizeStats<InlineInfo>("InlineInfos", inline_infos_, stats);
138 AddTableSizeStats<MaskInfo>("DexRegisterMasks", dex_register_masks_, stats);
139 AddTableSizeStats<DexRegisterMapInfo>("DexRegisterMaps", dex_register_maps_, stats);
140 AddTableSizeStats<DexRegisterInfo>("DexRegisterCatalog", dex_register_catalog_, stats);
141}
142
David Srbeckye1402122018-06-13 18:20:45 +0100143void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const {
144 if (HasAnyLiveDexRegisters()) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100145 ScopedIndentation indent1(vios);
David Srbeckye1402122018-06-13 18:20:45 +0100146 for (size_t i = 0; i < size(); ++i) {
147 DexRegisterLocation reg = (*this)[i];
148 if (reg.IsLive()) {
149 vios->Stream() << "v" << i << ":" << reg << " ";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100150 }
151 }
152 vios->Stream() << "\n";
153 }
Roland Levillaina552e1c2015-03-26 15:01:03 +0000154}
155
David Srbecky86decb62018-06-05 06:41:10 +0100156template<typename Accessor>
David Srbecky71ec1cc2018-05-18 15:57:25 +0100157static void DumpTable(VariableIndentationOutputStream* vios,
158 const char* table_name,
David Srbeckycf7833e2018-06-14 16:45:22 +0100159 const BitTable<Accessor>& table,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100160 bool verbose,
161 bool is_mask = false) {
162 if (table.NumRows() != 0) {
David Srbecky86decb62018-06-05 06:41:10 +0100163 vios->Stream() << table_name << " BitSize=" << table.BitSize();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100164 vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
David Srbecky86decb62018-06-05 06:41:10 +0100165 const char* const* column_names = GetBitTableColumnNames<Accessor>();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100166 for (size_t c = 0; c < table.NumColumns(); c++) {
167 vios->Stream() << (c != 0 ? " " : "");
David Srbecky86decb62018-06-05 06:41:10 +0100168 vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100169 }
170 vios->Stream() << "}\n";
171 if (verbose) {
172 ScopedIndentation indent1(vios);
173 for (size_t r = 0; r < table.NumRows(); r++) {
174 vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
175 for (size_t c = 0; c < table.NumColumns(); c++) {
176 vios->Stream() << (c != 0 ? " " : "");
177 if (is_mask) {
178 BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
179 for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
180 vios->Stream() << bits.LoadBit(e - b - 1);
181 }
182 } else {
183 vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
184 }
185 }
186 vios->Stream() << "}\n";
187 }
188 }
189 }
David Srbecky61b28a12016-02-25 21:55:03 +0000190}
191
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100192void CodeInfo::Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100193 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100194 bool verbose,
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700195 InstructionSet instruction_set,
196 const MethodInfo& method_info) const {
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100197 vios->Stream()
David Srbecky71ec1cc2018-05-18 15:57:25 +0100198 << "CodeInfo"
199 << " BitSize=" << size_ * kBitsPerByte
200 << "\n";
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100201 ScopedIndentation indent1(vios);
David Srbecky86decb62018-06-05 06:41:10 +0100202 DumpTable<StackMap>(vios, "StackMaps", stack_maps_, verbose);
203 DumpTable<RegisterMask>(vios, "RegisterMasks", register_masks_, verbose);
204 DumpTable<MaskInfo>(vios, "StackMasks", stack_masks_, verbose, true /* is_mask */);
205 DumpTable<InvokeInfo>(vios, "InvokeInfos", invoke_infos_, verbose);
206 DumpTable<InlineInfo>(vios, "InlineInfos", inline_infos_, verbose);
207 DumpTable<MaskInfo>(vios, "DexRegisterMasks", dex_register_masks_, verbose, true /* is_mask */);
208 DumpTable<DexRegisterMapInfo>(vios, "DexRegisterMaps", dex_register_maps_, verbose);
209 DumpTable<DexRegisterInfo>(vios, "DexRegisterCatalog", dex_register_catalog_, verbose);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100210
Roland Levillaina552e1c2015-03-26 15:01:03 +0000211 // Display stack maps along with (live) Dex register maps.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100212 if (verbose) {
213 for (size_t i = 0; i < GetNumberOfStackMaps(); ++i) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100214 StackMap stack_map = GetStackMapAt(i);
David Srbeckyfd89b072018-06-03 12:00:22 +0100215 stack_map.Dump(vios, *this, method_info, code_offset, instruction_set);
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100216 }
217 }
218}
219
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100220void StackMap::Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100221 const CodeInfo& code_info,
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700222 const MethodInfo& method_info,
Roland Levillainf2650d12015-05-28 14:53:28 +0100223 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100224 InstructionSet instruction_set) const {
David Srbecky052f8ca2018-04-26 15:42:54 +0100225 const uint32_t pc_offset = GetNativePcOffset(instruction_set);
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100226 vios->Stream()
David Srbecky71ec1cc2018-05-18 15:57:25 +0100227 << "StackMap[" << Row() << "]"
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100228 << std::hex
David Srbecky71ec1cc2018-05-18 15:57:25 +0100229 << " (native_pc=0x" << code_offset + pc_offset
230 << ", dex_pc=0x" << GetDexPc()
David Srbecky052f8ca2018-04-26 15:42:54 +0100231 << ", register_mask=0x" << code_info.GetRegisterMaskOf(*this)
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100232 << std::dec
233 << ", stack_mask=0b";
David Srbecky052f8ca2018-04-26 15:42:54 +0100234 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(*this);
David Srbecky4b59d102018-05-29 21:46:10 +0000235 for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
David Srbecky45aa5982016-03-18 02:15:09 +0000236 vios->Stream() << stack_mask.LoadBit(e - i - 1);
Roland Levillainf2650d12015-05-28 14:53:28 +0100237 }
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100238 vios->Stream() << ")\n";
David Srbeckye1402122018-06-13 18:20:45 +0100239 code_info.GetDexRegisterMapOf(*this).Dump(vios);
David Srbecky6e69e522018-06-03 12:00:14 +0100240 uint32_t depth = code_info.GetInlineDepthOf(*this);
241 for (size_t d = 0; d < depth; d++) {
242 InlineInfo inline_info = code_info.GetInlineInfoAtDepth(*this, d);
David Srbeckyfd89b072018-06-03 12:00:22 +0100243 inline_info.Dump(vios, code_info, *this, method_info);
Nicolas Geoffray12bdb722015-06-17 09:44:43 +0100244 }
Roland Levillainf2650d12015-05-28 14:53:28 +0100245}
246
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100247void InlineInfo::Dump(VariableIndentationOutputStream* vios,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100248 const CodeInfo& code_info,
David Srbecky6e69e522018-06-03 12:00:14 +0100249 const StackMap& stack_map,
David Srbeckyfd89b072018-06-03 12:00:22 +0100250 const MethodInfo& method_info) const {
David Srbecky6e69e522018-06-03 12:00:14 +0100251 uint32_t depth = Row() - stack_map.GetInlineInfoIndex();
252 vios->Stream()
253 << "InlineInfo[" << Row() << "]"
254 << " (depth=" << depth
255 << std::hex
256 << ", dex_pc=0x" << GetDexPc();
257 if (EncodesArtMethod()) {
258 ScopedObjectAccess soa(Thread::Current());
259 vios->Stream() << ", method=" << GetArtMethod()->PrettyMethod();
260 } else {
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100261 vios->Stream()
David Srbecky6e69e522018-06-03 12:00:14 +0100262 << std::dec
263 << ", method_index=" << GetMethodIndex(method_info);
264 }
265 vios->Stream() << ")\n";
David Srbeckye1402122018-06-13 18:20:45 +0100266 code_info.GetDexRegisterMapAtDepth(depth, stack_map).Dump(vios);
Nicolas Geoffray004c2302015-03-20 10:06:38 +0000267}
268
269} // namespace art