blob: 7a60d4aa20433c6b387377e47d13e00b9078270f [file] [log] [blame]
Calin Juravlec416d332015-04-23 16:01:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000016
Calin Juravlec416d332015-04-23 16:01:43 +010017#include "stack_map_stream.h"
18
David Srbecky049d6812018-05-18 14:46:49 +010019#include <memory>
20
Andreas Gampe90b936d2017-01-31 08:58:55 -080021#include "art_method-inl.h"
David Srbecky45aa5982016-03-18 02:15:09 +000022#include "base/stl_util.h"
David Sehr9e734c72018-01-04 17:56:19 -080023#include "dex/dex_file_types.h"
Nicolas Geoffrayfbdfa6d2017-02-03 10:43:13 +000024#include "optimizing/optimizing_compiler.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000025#include "runtime.h"
26#include "scoped_thread_state_change-inl.h"
David Srbecky71ec1cc2018-05-18 15:57:25 +010027#include "stack_map.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000028
Calin Juravlec416d332015-04-23 16:01:43 +010029namespace art {
30
David Srbecky049d6812018-05-18 14:46:49 +010031constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32
David Srbeckyd02b23f2018-05-29 23:27:22 +010033uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
34 return StackMap::UnpackNativePc(stack_maps_[i].packed_native_pc, instruction_set_);
35}
36
37void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
38 stack_maps_[i].packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_);
39}
40
Calin Juravle4f46ac52015-04-23 18:47:21 +010041void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
42 uint32_t native_pc_offset,
43 uint32_t register_mask,
David Srbecky71ec1cc2018-05-18 15:57:25 +010044 BitVector* stack_mask,
Calin Juravle4f46ac52015-04-23 18:47:21 +010045 uint32_t num_dex_registers,
David Srbecky049d6812018-05-18 14:46:49 +010046 uint8_t inlining_depth) {
David Srbecky71ec1cc2018-05-18 15:57:25 +010047 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
48 in_stack_map_ = true;
49
50 current_stack_map_ = StackMapEntry {
51 .packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_),
52 .dex_pc = dex_pc,
53 .register_mask_index = kNoValue,
54 .stack_mask_index = kNoValue,
55 .inline_info_index = kNoValue,
56 .dex_register_mask_index = kNoValue,
57 .dex_register_map_index = kNoValue,
58 };
59 if (register_mask != 0) {
60 uint32_t shift = LeastSignificantBit(register_mask);
61 RegisterMaskEntry entry = { register_mask >> shift, shift };
62 current_stack_map_.register_mask_index = register_masks_.Dedup(&entry);
Vladimir Marko174b2e22017-10-12 13:34:49 +010063 }
David Srbecky71ec1cc2018-05-18 15:57:25 +010064 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
65 // and it might modify the data before that. Therefore, just store the pointer.
66 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
67 lazy_stack_masks_.push_back(stack_mask);
David Srbecky6eb4d5e2018-06-03 12:00:20 +010068 current_inline_infos_.clear();
David Srbecky71ec1cc2018-05-18 15:57:25 +010069 current_dex_registers_.clear();
70 expected_num_dex_registers_ = num_dex_registers;
71
David Srbecky049d6812018-05-18 14:46:49 +010072 if (kVerifyStackMaps) {
73 size_t stack_map_index = stack_maps_.size();
74 // Create lambda method, which will be executed at the very end to verify data.
75 // Parameters and local variables will be captured(stored) by the lambda "[=]".
76 dchecks_.emplace_back([=](const CodeInfo& code_info) {
77 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
78 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
79 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
80 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
81 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
82 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
83 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
84 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
85 }
86 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
87 if (inlining_depth != 0) {
88 CHECK_EQ(code_info.GetInlineInfoOf(stack_map).GetDepth(), inlining_depth);
89 }
90 CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
91 });
David Srbecky71ec1cc2018-05-18 15:57:25 +010092 }
Calin Juravlec416d332015-04-23 16:01:43 +010093}
94
Calin Juravle4f46ac52015-04-23 18:47:21 +010095void StackMapStream::EndStackMapEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +010096 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
97 in_stack_map_ = false;
98 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
99
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100100 // Generate index into the InlineInfo table.
101 if (!current_inline_infos_.empty()) {
102 current_inline_infos_.back().is_last = InlineInfo::kLast;
103 current_stack_map_.inline_info_index =
104 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
David Srbecky71ec1cc2018-05-18 15:57:25 +0100105 }
106
107 stack_maps_.Add(current_stack_map_);
Calin Juravle4f46ac52015-04-23 18:47:21 +0100108}
109
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100110void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100111 current_dex_registers_.push_back(DexRegisterLocation(kind, value));
Calin Juravlec416d332015-04-23 16:01:43 +0100112
David Srbecky71ec1cc2018-05-18 15:57:25 +0100113 // We have collected all the dex registers for StackMap/InlineInfo - create the map.
114 if (current_dex_registers_.size() == expected_num_dex_registers_) {
115 CreateDexRegisterMap();
Calin Juravlec416d332015-04-23 16:01:43 +0100116 }
117}
118
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800119void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100120 uint32_t packed_native_pc = current_stack_map_.packed_native_pc;
David Srbecky049d6812018-05-18 14:46:49 +0100121 size_t invoke_info_index = invoke_infos_.size();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100122 invoke_infos_.Add(InvokeInfoEntry {
123 .packed_native_pc = packed_native_pc,
124 .invoke_type = invoke_type,
125 .method_info_index = method_infos_.Dedup(&dex_method_index),
126 });
David Srbecky049d6812018-05-18 14:46:49 +0100127
128 if (kVerifyStackMaps) {
129 dchecks_.emplace_back([=](const CodeInfo& code_info) {
130 InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
131 CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
132 StackMap::UnpackNativePc(packed_native_pc, instruction_set_));
133 CHECK_EQ(invoke_info.GetInvokeType(), invoke_type);
134 CHECK_EQ(method_infos_[invoke_info.GetMethodIndexIdx()], dex_method_index);
135 });
136 }
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800137}
138
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000139void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100140 uint32_t dex_pc,
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000141 uint32_t num_dex_registers,
142 const DexFile* outer_dex_file) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100143 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
144 in_inline_info_ = true;
145 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
146
147 InlineInfoEntry entry = {
148 .is_last = InlineInfo::kMore,
149 .dex_pc = dex_pc,
150 .method_info_index = kNoValue,
151 .art_method_hi = kNoValue,
152 .art_method_lo = kNoValue,
153 .dex_register_mask_index = kNoValue,
154 .dex_register_map_index = kNoValue,
155 };
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000156 if (EncodeArtMethodInInlineInfo(method)) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100157 entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method));
158 entry.art_method_lo = Low32Bits(reinterpret_cast<uintptr_t>(method));
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000159 } else {
160 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
161 ScopedObjectAccess soa(Thread::Current());
162 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
163 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100164 uint32_t dex_method_index = method->GetDexMethodIndexUnchecked();
165 entry.method_info_index = method_infos_.Dedup(&dex_method_index);
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000166 }
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100167 current_inline_infos_.push_back(entry);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100168
169 current_dex_registers_.clear();
170 expected_num_dex_registers_ = num_dex_registers;
171
David Srbecky049d6812018-05-18 14:46:49 +0100172 if (kVerifyStackMaps) {
173 size_t stack_map_index = stack_maps_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100174 size_t depth = current_inline_infos_.size() - 1;
David Srbecky049d6812018-05-18 14:46:49 +0100175 dchecks_.emplace_back([=](const CodeInfo& code_info) {
176 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
177 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
178 CHECK_EQ(inline_info.GetDexPcAtDepth(depth), dex_pc);
179 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
180 CHECK_EQ(inline_info.EncodesArtMethodAtDepth(depth), encode_art_method);
181 if (encode_art_method) {
182 CHECK_EQ(inline_info.GetArtMethodAtDepth(depth), method);
183 } else {
184 CHECK_EQ(method_infos_[inline_info.GetMethodIndexIdxAtDepth(depth)],
185 method->GetDexMethodIndexUnchecked());
186 }
187 CHECK_EQ(inline_info.HasDexRegisterMapAtDepth(depth), (num_dex_registers != 0));
188 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100189 }
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100190}
191
192void StackMapStream::EndInlineInfoEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100193 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
194 in_inline_info_ = false;
195 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
Calin Juravlec416d332015-04-23 16:01:43 +0100196}
197
David Srbecky71ec1cc2018-05-18 15:57:25 +0100198// Create dex register map (bitmap + indices + catalogue entries)
199// based on the currently accumulated list of DexRegisterLocations.
200void StackMapStream::CreateDexRegisterMap() {
201 // Create mask and map based on current registers.
202 temp_dex_register_mask_.ClearAllBits();
203 temp_dex_register_map_.clear();
204 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
205 DexRegisterLocation reg = current_dex_registers_[i];
206 if (reg.IsLive()) {
207 DexRegisterEntry entry = DexRegisterEntry {
208 .kind = static_cast<uint32_t>(reg.GetKind()),
209 .packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()),
210 };
211 temp_dex_register_mask_.SetBit(i);
212 temp_dex_register_map_.push_back(dex_register_catalog_.Dedup(&entry));
213 }
Calin Juravlec416d332015-04-23 16:01:43 +0100214 }
Calin Juravlec416d332015-04-23 16:01:43 +0100215
David Srbecky71ec1cc2018-05-18 15:57:25 +0100216 // Set the mask and map for the current StackMap/InlineInfo.
217 uint32_t mask_index = StackMap::kNoValue; // Represents mask with all zero bits.
218 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
219 mask_index = dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
220 temp_dex_register_mask_.GetNumberOfBits());
Vladimir Marko225b6462015-09-28 12:17:40 +0100221 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100222 uint32_t map_index = dex_register_maps_.Dedup(temp_dex_register_map_.data(),
223 temp_dex_register_map_.size());
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100224 if (!current_inline_infos_.empty()) {
225 current_inline_infos_.back().dex_register_mask_index = mask_index;
226 current_inline_infos_.back().dex_register_map_index = map_index;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100227 } else {
228 current_stack_map_.dex_register_mask_index = mask_index;
229 current_stack_map_.dex_register_map_index = map_index;
David Srbecky052f8ca2018-04-26 15:42:54 +0100230 }
David Srbecky049d6812018-05-18 14:46:49 +0100231
232 if (kVerifyStackMaps) {
233 size_t stack_map_index = stack_maps_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100234 int32_t depth = current_inline_infos_.size() - 1;
David Srbecky049d6812018-05-18 14:46:49 +0100235 // We need to make copy of the current registers for later (when the check is run).
236 auto expected_dex_registers = std::make_shared<std::vector<DexRegisterLocation>>(
237 current_dex_registers_.begin(), current_dex_registers_.end());
238 dchecks_.emplace_back([=](const CodeInfo& code_info) {
239 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
240 size_t num_dex_registers = expected_dex_registers->size();
241 DexRegisterMap map = (depth == -1)
242 ? code_info.GetDexRegisterMapOf(stack_map, num_dex_registers)
243 : code_info.GetDexRegisterMapAtDepth(depth,
244 code_info.GetInlineInfoOf(stack_map),
245 num_dex_registers);
246 CHECK_EQ(map.size(), num_dex_registers);
247 for (size_t r = 0; r < num_dex_registers; r++) {
248 CHECK_EQ(expected_dex_registers->at(r), map.Get(r));
249 }
250 });
251 }
Calin Juravlec416d332015-04-23 16:01:43 +0100252}
253
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700254void StackMapStream::FillInMethodInfo(MemoryRegion region) {
255 {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100256 MethodInfo info(region.begin(), method_infos_.size());
257 for (size_t i = 0; i < method_infos_.size(); ++i) {
258 info.SetMethodIndex(i, method_infos_[i]);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700259 }
260 }
David Srbecky049d6812018-05-18 14:46:49 +0100261 if (kVerifyStackMaps) {
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700262 // Check the data matches.
263 MethodInfo info(region.begin());
264 const size_t count = info.NumMethodIndices();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100265 DCHECK_EQ(count, method_infos_.size());
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700266 for (size_t i = 0; i < count; ++i) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100267 DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i]);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700268 }
269 }
270}
271
David Srbecky052f8ca2018-04-26 15:42:54 +0100272size_t StackMapStream::PrepareForFillIn() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100273 static_assert(sizeof(StackMapEntry) == StackMap::kCount * sizeof(uint32_t), "Layout");
274 static_assert(sizeof(InvokeInfoEntry) == InvokeInfo::kCount * sizeof(uint32_t), "Layout");
275 static_assert(sizeof(InlineInfoEntry) == InlineInfo::kCount * sizeof(uint32_t), "Layout");
276 static_assert(sizeof(DexRegisterEntry) == DexRegisterInfo::kCount * sizeof(uint32_t), "Layout");
277 DCHECK_EQ(out_.size(), 0u);
278
279 // Read the stack masks now. The compiler might have updated them.
280 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
281 BitVector* stack_mask = lazy_stack_masks_[i];
282 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
283 stack_maps_[i].stack_mask_index =
284 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
285 }
286 }
287
David Srbecky052f8ca2018-04-26 15:42:54 +0100288 size_t bit_offset = 0;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100289 stack_maps_.Encode(&out_, &bit_offset);
290 register_masks_.Encode(&out_, &bit_offset);
291 stack_masks_.Encode(&out_, &bit_offset);
292 invoke_infos_.Encode(&out_, &bit_offset);
293 inline_infos_.Encode(&out_, &bit_offset);
294 dex_register_masks_.Encode(&out_, &bit_offset);
295 dex_register_maps_.Encode(&out_, &bit_offset);
296 dex_register_catalog_.Encode(&out_, &bit_offset);
David Srbecky45aa5982016-03-18 02:15:09 +0000297
David Srbecky052f8ca2018-04-26 15:42:54 +0100298 return UnsignedLeb128Size(out_.size()) + out_.size();
299}
300
301void StackMapStream::FillInCodeInfo(MemoryRegion region) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100302 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
303 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
David Srbecky052f8ca2018-04-26 15:42:54 +0100304 DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn";
305 DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size());
306
307 uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size());
308 region.CopyFromVector(ptr - region.begin(), out_);
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800309
David Srbecky049d6812018-05-18 14:46:49 +0100310 // Verify all written data (usually only in debug builds).
311 if (kVerifyStackMaps) {
312 CodeInfo code_info(region);
313 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
314 for (const auto& dcheck : dchecks_) {
315 dcheck(code_info);
David Srbecky1bbdfd72016-02-24 16:39:26 +0000316 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100317 }
David Srbecky1bbdfd72016-02-24 16:39:26 +0000318}
319
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700320size_t StackMapStream::ComputeMethodInfoSize() const {
David Srbecky052f8ca2018-04-26 15:42:54 +0100321 DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100322 return MethodInfo::ComputeSize(method_infos_.size());
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700323}
324
Calin Juravlec416d332015-04-23 16:01:43 +0100325} // namespace art