blob: cd115499a6b72bddd8b9246214b9f30cd391920c [file] [log] [blame]
Calin Juravlec416d332015-04-23 16:01:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000016
Calin Juravlec416d332015-04-23 16:01:43 +010017#include "stack_map_stream.h"
18
David Srbecky049d6812018-05-18 14:46:49 +010019#include <memory>
20
Andreas Gampe90b936d2017-01-31 08:58:55 -080021#include "art_method-inl.h"
David Srbecky45aa5982016-03-18 02:15:09 +000022#include "base/stl_util.h"
David Sehr9e734c72018-01-04 17:56:19 -080023#include "dex/dex_file_types.h"
Nicolas Geoffrayfbdfa6d2017-02-03 10:43:13 +000024#include "optimizing/optimizing_compiler.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000025#include "runtime.h"
26#include "scoped_thread_state_change-inl.h"
David Srbecky71ec1cc2018-05-18 15:57:25 +010027#include "stack_map.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000028
Calin Juravlec416d332015-04-23 16:01:43 +010029namespace art {
30
David Srbecky049d6812018-05-18 14:46:49 +010031constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32
David Srbeckyd02b23f2018-05-29 23:27:22 +010033uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
34 return StackMap::UnpackNativePc(stack_maps_[i].packed_native_pc, instruction_set_);
35}
36
37void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
38 stack_maps_[i].packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_);
39}
40
Calin Juravle4f46ac52015-04-23 18:47:21 +010041void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
42 uint32_t native_pc_offset,
43 uint32_t register_mask,
David Srbecky71ec1cc2018-05-18 15:57:25 +010044 BitVector* stack_mask,
Calin Juravle4f46ac52015-04-23 18:47:21 +010045 uint32_t num_dex_registers,
David Srbecky50fac062018-06-13 18:55:35 +010046 uint8_t inlining_depth,
47 StackMap::Kind kind) {
David Srbecky71ec1cc2018-05-18 15:57:25 +010048 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
49 in_stack_map_ = true;
David Srbecky6de88332018-06-03 12:00:11 +010050 // num_dex_registers_ is the constant per-method number of registers.
51 // However we initially don't know what the value is, so lazily initialize it.
52 if (num_dex_registers_ == 0) {
53 num_dex_registers_ = num_dex_registers;
54 } else if (num_dex_registers > 0) {
55 DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count";
56 }
David Srbecky71ec1cc2018-05-18 15:57:25 +010057
58 current_stack_map_ = StackMapEntry {
David Srbecky50fac062018-06-13 18:55:35 +010059 .kind = static_cast<uint32_t>(kind),
David Srbecky71ec1cc2018-05-18 15:57:25 +010060 .packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_),
61 .dex_pc = dex_pc,
62 .register_mask_index = kNoValue,
63 .stack_mask_index = kNoValue,
64 .inline_info_index = kNoValue,
65 .dex_register_mask_index = kNoValue,
66 .dex_register_map_index = kNoValue,
67 };
68 if (register_mask != 0) {
69 uint32_t shift = LeastSignificantBit(register_mask);
70 RegisterMaskEntry entry = { register_mask >> shift, shift };
71 current_stack_map_.register_mask_index = register_masks_.Dedup(&entry);
Vladimir Marko174b2e22017-10-12 13:34:49 +010072 }
David Srbecky71ec1cc2018-05-18 15:57:25 +010073 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
74 // and it might modify the data before that. Therefore, just store the pointer.
75 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
76 lazy_stack_masks_.push_back(stack_mask);
David Srbecky6eb4d5e2018-06-03 12:00:20 +010077 current_inline_infos_.clear();
David Srbecky71ec1cc2018-05-18 15:57:25 +010078 current_dex_registers_.clear();
79 expected_num_dex_registers_ = num_dex_registers;
80
David Srbecky049d6812018-05-18 14:46:49 +010081 if (kVerifyStackMaps) {
82 size_t stack_map_index = stack_maps_.size();
83 // Create lambda method, which will be executed at the very end to verify data.
84 // Parameters and local variables will be captured(stored) by the lambda "[=]".
85 dchecks_.emplace_back([=](const CodeInfo& code_info) {
David Srbecky50fac062018-06-13 18:55:35 +010086 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
87 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
88 instruction_set_);
89 CHECK_EQ(stack_map.Row(), stack_map_index);
90 } else if (kind == StackMap::Kind::Catch) {
91 StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc);
92 CHECK_EQ(stack_map.Row(), stack_map_index);
93 }
David Srbecky049d6812018-05-18 14:46:49 +010094 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
95 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
David Srbecky50fac062018-06-13 18:55:35 +010096 CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
David Srbecky049d6812018-05-18 14:46:49 +010097 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
98 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
99 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
100 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
101 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
102 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
103 }
104 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
David Srbecky6e69e522018-06-03 12:00:14 +0100105 CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth);
David Srbecky049d6812018-05-18 14:46:49 +0100106 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100107 }
Calin Juravlec416d332015-04-23 16:01:43 +0100108}
109
Calin Juravle4f46ac52015-04-23 18:47:21 +0100110void StackMapStream::EndStackMapEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100111 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
112 in_stack_map_ = false;
113 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
114
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100115 // Generate index into the InlineInfo table.
116 if (!current_inline_infos_.empty()) {
117 current_inline_infos_.back().is_last = InlineInfo::kLast;
118 current_stack_map_.inline_info_index =
119 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
David Srbecky71ec1cc2018-05-18 15:57:25 +0100120 }
121
David Srbecky6de88332018-06-03 12:00:11 +0100122 // Generate delta-compressed dex register map.
123 CreateDexRegisterMap();
124
David Srbecky71ec1cc2018-05-18 15:57:25 +0100125 stack_maps_.Add(current_stack_map_);
Calin Juravle4f46ac52015-04-23 18:47:21 +0100126}
127
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100128void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100129 current_dex_registers_.push_back(DexRegisterLocation(kind, value));
Calin Juravlec416d332015-04-23 16:01:43 +0100130}
131
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800132void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100133 uint32_t packed_native_pc = current_stack_map_.packed_native_pc;
David Srbecky049d6812018-05-18 14:46:49 +0100134 size_t invoke_info_index = invoke_infos_.size();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100135 invoke_infos_.Add(InvokeInfoEntry {
136 .packed_native_pc = packed_native_pc,
137 .invoke_type = invoke_type,
138 .method_info_index = method_infos_.Dedup(&dex_method_index),
139 });
David Srbecky049d6812018-05-18 14:46:49 +0100140
141 if (kVerifyStackMaps) {
142 dchecks_.emplace_back([=](const CodeInfo& code_info) {
143 InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
144 CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
145 StackMap::UnpackNativePc(packed_native_pc, instruction_set_));
146 CHECK_EQ(invoke_info.GetInvokeType(), invoke_type);
David Srbeckyd97e0822018-06-03 12:00:24 +0100147 CHECK_EQ(method_infos_[invoke_info.GetMethodInfoIndex()], dex_method_index);
David Srbecky049d6812018-05-18 14:46:49 +0100148 });
149 }
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800150}
151
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000152void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100153 uint32_t dex_pc,
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000154 uint32_t num_dex_registers,
155 const DexFile* outer_dex_file) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100156 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
157 in_inline_info_ = true;
158 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
159
David Srbecky6de88332018-06-03 12:00:11 +0100160 expected_num_dex_registers_ += num_dex_registers;
161
David Srbecky71ec1cc2018-05-18 15:57:25 +0100162 InlineInfoEntry entry = {
163 .is_last = InlineInfo::kMore,
164 .dex_pc = dex_pc,
165 .method_info_index = kNoValue,
166 .art_method_hi = kNoValue,
167 .art_method_lo = kNoValue,
David Srbecky6de88332018-06-03 12:00:11 +0100168 .num_dex_registers = static_cast<uint32_t>(expected_num_dex_registers_),
David Srbecky71ec1cc2018-05-18 15:57:25 +0100169 };
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000170 if (EncodeArtMethodInInlineInfo(method)) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100171 entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method));
172 entry.art_method_lo = Low32Bits(reinterpret_cast<uintptr_t>(method));
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000173 } else {
174 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
175 ScopedObjectAccess soa(Thread::Current());
176 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
177 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100178 uint32_t dex_method_index = method->GetDexMethodIndexUnchecked();
179 entry.method_info_index = method_infos_.Dedup(&dex_method_index);
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000180 }
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100181 current_inline_infos_.push_back(entry);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100182
David Srbecky049d6812018-05-18 14:46:49 +0100183 if (kVerifyStackMaps) {
184 size_t stack_map_index = stack_maps_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100185 size_t depth = current_inline_infos_.size() - 1;
David Srbecky049d6812018-05-18 14:46:49 +0100186 dchecks_.emplace_back([=](const CodeInfo& code_info) {
187 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky6e69e522018-06-03 12:00:14 +0100188 InlineInfo inline_info = code_info.GetInlineInfoAtDepth(stack_map, depth);
189 CHECK_EQ(inline_info.GetDexPc(), dex_pc);
David Srbecky049d6812018-05-18 14:46:49 +0100190 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
David Srbecky6e69e522018-06-03 12:00:14 +0100191 CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
David Srbecky049d6812018-05-18 14:46:49 +0100192 if (encode_art_method) {
David Srbecky6e69e522018-06-03 12:00:14 +0100193 CHECK_EQ(inline_info.GetArtMethod(), method);
David Srbecky049d6812018-05-18 14:46:49 +0100194 } else {
David Srbeckyd97e0822018-06-03 12:00:24 +0100195 CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()],
David Srbecky049d6812018-05-18 14:46:49 +0100196 method->GetDexMethodIndexUnchecked());
197 }
David Srbecky049d6812018-05-18 14:46:49 +0100198 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100199 }
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100200}
201
202void StackMapStream::EndInlineInfoEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100203 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
204 in_inline_info_ = false;
205 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
Calin Juravlec416d332015-04-23 16:01:43 +0100206}
207
David Srbecky6de88332018-06-03 12:00:11 +0100208// Create delta-compressed dex register map based on the current list of DexRegisterLocations.
209// All dex registers for a stack map are concatenated - inlined registers are just appended.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100210void StackMapStream::CreateDexRegisterMap() {
David Srbecky6de88332018-06-03 12:00:11 +0100211 // These are fields rather than local variables so that we can reuse the reserved memory.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100212 temp_dex_register_mask_.ClearAllBits();
213 temp_dex_register_map_.clear();
David Srbecky6de88332018-06-03 12:00:11 +0100214
215 // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
216 if (previous_dex_registers_.size() < current_dex_registers_.size()) {
217 previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
218 dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
219 }
220
221 // Set bit in the mask for each register that has been changed since the previous stack map.
222 // Modified registers are stored in the catalogue and the catalogue index added to the list.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100223 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
224 DexRegisterLocation reg = current_dex_registers_[i];
David Srbecky6de88332018-06-03 12:00:11 +0100225 // Distance is difference between this index and the index of last modification.
226 uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
227 if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
228 DexRegisterEntry entry = DexRegisterEntry{
David Srbecky71ec1cc2018-05-18 15:57:25 +0100229 .kind = static_cast<uint32_t>(reg.GetKind()),
230 .packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()),
231 };
David Srbecky6de88332018-06-03 12:00:11 +0100232 uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100233 temp_dex_register_mask_.SetBit(i);
David Srbecky6de88332018-06-03 12:00:11 +0100234 temp_dex_register_map_.push_back(index);
235 previous_dex_registers_[i] = reg;
236 dex_register_timestamp_[i] = stack_maps_.size();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100237 }
Calin Juravlec416d332015-04-23 16:01:43 +0100238 }
Calin Juravlec416d332015-04-23 16:01:43 +0100239
David Srbecky6de88332018-06-03 12:00:11 +0100240 // Set the mask and map for the current StackMap (which includes inlined registers).
David Srbecky71ec1cc2018-05-18 15:57:25 +0100241 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
David Srbecky6de88332018-06-03 12:00:11 +0100242 current_stack_map_.dex_register_mask_index =
243 dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
244 temp_dex_register_mask_.GetNumberOfBits());
Vladimir Marko225b6462015-09-28 12:17:40 +0100245 }
David Srbecky6de88332018-06-03 12:00:11 +0100246 if (!current_dex_registers_.empty()) {
247 current_stack_map_.dex_register_map_index =
248 dex_register_maps_.Dedup(temp_dex_register_map_.data(),
249 temp_dex_register_map_.size());
David Srbecky052f8ca2018-04-26 15:42:54 +0100250 }
David Srbecky049d6812018-05-18 14:46:49 +0100251
252 if (kVerifyStackMaps) {
253 size_t stack_map_index = stack_maps_.size();
David Srbecky6de88332018-06-03 12:00:11 +0100254 uint32_t depth = current_inline_infos_.size();
David Srbecky049d6812018-05-18 14:46:49 +0100255 // We need to make copy of the current registers for later (when the check is run).
David Srbecky6de88332018-06-03 12:00:11 +0100256 auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
David Srbecky049d6812018-05-18 14:46:49 +0100257 current_dex_registers_.begin(), current_dex_registers_.end());
258 dchecks_.emplace_back([=](const CodeInfo& code_info) {
259 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky6de88332018-06-03 12:00:11 +0100260 uint32_t expected_reg = 0;
261 for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
262 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
David Srbecky049d6812018-05-18 14:46:49 +0100263 }
David Srbecky6de88332018-06-03 12:00:11 +0100264 for (uint32_t d = 0; d < depth; d++) {
265 for (DexRegisterLocation reg : code_info.GetDexRegisterMapAtDepth(d, stack_map)) {
266 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
267 }
268 }
269 CHECK_EQ(expected_reg, expected_dex_registers->size());
David Srbecky049d6812018-05-18 14:46:49 +0100270 });
271 }
Calin Juravlec416d332015-04-23 16:01:43 +0100272}
273
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700274void StackMapStream::FillInMethodInfo(MemoryRegion region) {
275 {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100276 MethodInfo info(region.begin(), method_infos_.size());
277 for (size_t i = 0; i < method_infos_.size(); ++i) {
278 info.SetMethodIndex(i, method_infos_[i]);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700279 }
280 }
David Srbecky049d6812018-05-18 14:46:49 +0100281 if (kVerifyStackMaps) {
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700282 // Check the data matches.
283 MethodInfo info(region.begin());
284 const size_t count = info.NumMethodIndices();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100285 DCHECK_EQ(count, method_infos_.size());
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700286 for (size_t i = 0; i < count; ++i) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100287 DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i]);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700288 }
289 }
290}
291
David Srbecky052f8ca2018-04-26 15:42:54 +0100292size_t StackMapStream::PrepareForFillIn() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100293 static_assert(sizeof(StackMapEntry) == StackMap::kCount * sizeof(uint32_t), "Layout");
294 static_assert(sizeof(InvokeInfoEntry) == InvokeInfo::kCount * sizeof(uint32_t), "Layout");
295 static_assert(sizeof(InlineInfoEntry) == InlineInfo::kCount * sizeof(uint32_t), "Layout");
296 static_assert(sizeof(DexRegisterEntry) == DexRegisterInfo::kCount * sizeof(uint32_t), "Layout");
297 DCHECK_EQ(out_.size(), 0u);
298
299 // Read the stack masks now. The compiler might have updated them.
300 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
301 BitVector* stack_mask = lazy_stack_masks_[i];
302 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
303 stack_maps_[i].stack_mask_index =
304 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
305 }
306 }
307
David Srbecky052f8ca2018-04-26 15:42:54 +0100308 size_t bit_offset = 0;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100309 stack_maps_.Encode(&out_, &bit_offset);
310 register_masks_.Encode(&out_, &bit_offset);
311 stack_masks_.Encode(&out_, &bit_offset);
312 invoke_infos_.Encode(&out_, &bit_offset);
313 inline_infos_.Encode(&out_, &bit_offset);
314 dex_register_masks_.Encode(&out_, &bit_offset);
315 dex_register_maps_.Encode(&out_, &bit_offset);
316 dex_register_catalog_.Encode(&out_, &bit_offset);
David Srbecky6de88332018-06-03 12:00:11 +0100317 EncodeVarintBits(&out_, &bit_offset, num_dex_registers_);
David Srbecky45aa5982016-03-18 02:15:09 +0000318
David Srbecky052f8ca2018-04-26 15:42:54 +0100319 return UnsignedLeb128Size(out_.size()) + out_.size();
320}
321
322void StackMapStream::FillInCodeInfo(MemoryRegion region) {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100323 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
324 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
David Srbecky052f8ca2018-04-26 15:42:54 +0100325 DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn";
326 DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size());
327
328 uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size());
329 region.CopyFromVector(ptr - region.begin(), out_);
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800330
David Srbecky049d6812018-05-18 14:46:49 +0100331 // Verify all written data (usually only in debug builds).
332 if (kVerifyStackMaps) {
333 CodeInfo code_info(region);
334 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
335 for (const auto& dcheck : dchecks_) {
336 dcheck(code_info);
David Srbecky1bbdfd72016-02-24 16:39:26 +0000337 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100338 }
David Srbecky1bbdfd72016-02-24 16:39:26 +0000339}
340
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700341size_t StackMapStream::ComputeMethodInfoSize() const {
David Srbecky052f8ca2018-04-26 15:42:54 +0100342 DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100343 return MethodInfo::ComputeSize(method_infos_.size());
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700344}
345
Calin Juravlec416d332015-04-23 16:01:43 +0100346} // namespace art