blob: c0e32067036ee344485fff277fe98e2ee8a83982 [file] [log] [blame]
Calin Juravlec416d332015-04-23 16:01:43 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000016
Calin Juravlec416d332015-04-23 16:01:43 +010017#include "stack_map_stream.h"
18
David Srbecky049d6812018-05-18 14:46:49 +010019#include <memory>
20
Andreas Gampe90b936d2017-01-31 08:58:55 -080021#include "art_method-inl.h"
David Srbecky45aa5982016-03-18 02:15:09 +000022#include "base/stl_util.h"
David Sehr9e734c72018-01-04 17:56:19 -080023#include "dex/dex_file_types.h"
Nicolas Geoffrayfbdfa6d2017-02-03 10:43:13 +000024#include "optimizing/optimizing_compiler.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000025#include "runtime.h"
26#include "scoped_thread_state_change-inl.h"
David Srbecky71ec1cc2018-05-18 15:57:25 +010027#include "stack_map.h"
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000028
Calin Juravlec416d332015-04-23 16:01:43 +010029namespace art {
30
David Srbecky049d6812018-05-18 14:46:49 +010031constexpr static bool kVerifyStackMaps = kIsDebugBuild;
32
David Srbeckyd02b23f2018-05-29 23:27:22 +010033uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
David Srbeckyf325e282018-06-13 15:02:32 +010034 return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
David Srbeckyd02b23f2018-05-29 23:27:22 +010035}
36
37void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
David Srbeckyf325e282018-06-13 15:02:32 +010038 stack_maps_[i][StackMap::kPackedNativePc] =
39 StackMap::PackNativePc(native_pc_offset, instruction_set_);
David Srbeckyd02b23f2018-05-29 23:27:22 +010040}
41
David Srbeckyf6ba5b32018-06-23 22:05:49 +010042void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
43 size_t core_spill_mask,
44 size_t fp_spill_mask,
45 uint32_t num_dex_registers) {
46 DCHECK(!in_method_) << "Mismatched Begin/End calls";
47 in_method_ = true;
David Srbecky3aaaa212018-07-30 16:46:53 +010048 DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
David Srbeckyf6ba5b32018-06-23 22:05:49 +010049
David Srbecky3aaaa212018-07-30 16:46:53 +010050 DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
51 packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
David Srbeckyf6ba5b32018-06-23 22:05:49 +010052 core_spill_mask_ = core_spill_mask;
53 fp_spill_mask_ = fp_spill_mask;
54 num_dex_registers_ = num_dex_registers;
55}
56
David Srbeckye1412da2019-02-13 17:27:17 +000057void StackMapStream::EndMethod(size_t code_size) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +010058 DCHECK(in_method_) << "Mismatched Begin/End calls";
59 in_method_ = false;
David Srbeckye35ac042019-02-25 18:11:53 +000060 code_size_ = code_size;
David Srbeckye7a91942018-08-01 17:23:53 +010061
62 // Read the stack masks now. The compiler might have updated them.
63 for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
64 BitVector* stack_mask = lazy_stack_masks_[i];
65 if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
66 stack_maps_[i][StackMap::kStackMaskIndex] =
67 stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
68 }
69 }
David Srbeckye1412da2019-02-13 17:27:17 +000070
David Srbeckye35ac042019-02-25 18:11:53 +000071 uint32_t packed_code_size = StackMap::PackNativePc(code_size, instruction_set_);
David Srbeckye1412da2019-02-13 17:27:17 +000072 for (size_t i = 0; i < stack_maps_.size(); i++) {
David Srbeckye35ac042019-02-25 18:11:53 +000073 DCHECK_LE(stack_maps_[i][StackMap::kPackedNativePc], packed_code_size);
David Srbeckye1412da2019-02-13 17:27:17 +000074 }
David Srbeckyf6ba5b32018-06-23 22:05:49 +010075}
76
Calin Juravle4f46ac52015-04-23 18:47:21 +010077void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
78 uint32_t native_pc_offset,
79 uint32_t register_mask,
David Srbecky71ec1cc2018-05-18 15:57:25 +010080 BitVector* stack_mask,
David Srbecky50fac062018-06-13 18:55:35 +010081 StackMap::Kind kind) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +010082 DCHECK(in_method_) << "Call BeginMethod first";
David Srbecky71ec1cc2018-05-18 15:57:25 +010083 DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
84 in_stack_map_ = true;
85
David Srbeckycf7833e2018-06-14 16:45:22 +010086 current_stack_map_ = BitTableBuilder<StackMap>::Entry();
David Srbeckyf325e282018-06-13 15:02:32 +010087 current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
88 current_stack_map_[StackMap::kPackedNativePc] =
89 StackMap::PackNativePc(native_pc_offset, instruction_set_);
90 current_stack_map_[StackMap::kDexPc] = dex_pc;
David Srbecky0b4e5a32018-06-11 16:25:29 +010091 if (stack_maps_.size() > 0) {
92 // Check that non-catch stack maps are sorted by pc.
93 // Catch stack maps are at the end and may be unordered.
94 if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
95 DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
96 } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
97 DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
98 current_stack_map_[StackMap::kPackedNativePc]);
99 }
100 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100101 if (register_mask != 0) {
102 uint32_t shift = LeastSignificantBit(register_mask);
David Srbeckycf7833e2018-06-14 16:45:22 +0100103 BitTableBuilder<RegisterMask>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100104 entry[RegisterMask::kValue] = register_mask >> shift;
105 entry[RegisterMask::kShift] = shift;
106 current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100107 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100108 // The compiler assumes the bit vector will be read during PrepareForFillIn(),
109 // and it might modify the data before that. Therefore, just store the pointer.
110 // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
111 lazy_stack_masks_.push_back(stack_mask);
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100112 current_inline_infos_.clear();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100113 current_dex_registers_.clear();
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100114 expected_num_dex_registers_ = num_dex_registers_;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100115
David Srbecky049d6812018-05-18 14:46:49 +0100116 if (kVerifyStackMaps) {
117 size_t stack_map_index = stack_maps_.size();
118 // Create lambda method, which will be executed at the very end to verify data.
119 // Parameters and local variables will be captured(stored) by the lambda "[=]".
120 dchecks_.emplace_back([=](const CodeInfo& code_info) {
David Srbecky50fac062018-06-13 18:55:35 +0100121 if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
122 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset,
123 instruction_set_);
124 CHECK_EQ(stack_map.Row(), stack_map_index);
125 } else if (kind == StackMap::Kind::Catch) {
126 StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc);
127 CHECK_EQ(stack_map.Row(), stack_map_index);
128 }
David Srbecky049d6812018-05-18 14:46:49 +0100129 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
130 CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
David Srbecky50fac062018-06-13 18:55:35 +0100131 CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
David Srbecky049d6812018-05-18 14:46:49 +0100132 CHECK_EQ(stack_map.GetDexPc(), dex_pc);
133 CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
134 BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
135 CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
136 for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
137 CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
138 }
David Srbecky049d6812018-05-18 14:46:49 +0100139 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100140 }
Calin Juravlec416d332015-04-23 16:01:43 +0100141}
142
Calin Juravle4f46ac52015-04-23 18:47:21 +0100143void StackMapStream::EndStackMapEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100144 DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
145 in_stack_map_ = false;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100146
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100147 // Generate index into the InlineInfo table.
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100148 size_t inlining_depth = current_inline_infos_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100149 if (!current_inline_infos_.empty()) {
David Srbeckyf325e282018-06-13 15:02:32 +0100150 current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
151 current_stack_map_[StackMap::kInlineInfoIndex] =
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100152 inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
David Srbecky71ec1cc2018-05-18 15:57:25 +0100153 }
154
David Srbecky6de88332018-06-03 12:00:11 +0100155 // Generate delta-compressed dex register map.
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100156 size_t num_dex_registers = current_dex_registers_.size();
157 if (!current_dex_registers_.empty()) {
158 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
159 CreateDexRegisterMap();
160 }
David Srbecky6de88332018-06-03 12:00:11 +0100161
David Srbecky71ec1cc2018-05-18 15:57:25 +0100162 stack_maps_.Add(current_stack_map_);
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100163
164 if (kVerifyStackMaps) {
165 size_t stack_map_index = stack_maps_.size() - 1;
166 dchecks_.emplace_back([=](const CodeInfo& code_info) {
167 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
168 CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
169 CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
David Srbecky93bd3612018-07-02 19:30:18 +0100170 CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100171 });
172 }
Calin Juravle4f46ac52015-04-23 18:47:21 +0100173}
174
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000175void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100176 uint32_t dex_pc,
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000177 uint32_t num_dex_registers,
178 const DexFile* outer_dex_file) {
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100179 DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100180 DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
181 in_inline_info_ = true;
182 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
183
David Srbecky6de88332018-06-03 12:00:11 +0100184 expected_num_dex_registers_ += num_dex_registers;
185
David Srbeckycf7833e2018-06-14 16:45:22 +0100186 BitTableBuilder<InlineInfo>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100187 entry[InlineInfo::kIsLast] = InlineInfo::kMore;
188 entry[InlineInfo::kDexPc] = dex_pc;
189 entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000190 if (EncodeArtMethodInInlineInfo(method)) {
David Srbeckyf325e282018-06-13 15:02:32 +0100191 entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
192 entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000193 } else {
194 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
195 ScopedObjectAccess soa(Thread::Current());
196 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
197 }
Vladimir Markoc945e0d2018-07-18 17:26:45 +0100198 uint32_t dex_method_index = method->GetDexMethodIndex();
David Srbeckyf325e282018-06-13 15:02:32 +0100199 entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index});
Nicolas Geoffray5d37c152017-01-12 13:25:19 +0000200 }
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100201 current_inline_infos_.push_back(entry);
David Srbecky71ec1cc2018-05-18 15:57:25 +0100202
David Srbecky049d6812018-05-18 14:46:49 +0100203 if (kVerifyStackMaps) {
204 size_t stack_map_index = stack_maps_.size();
David Srbecky6eb4d5e2018-06-03 12:00:20 +0100205 size_t depth = current_inline_infos_.size() - 1;
David Srbecky049d6812018-05-18 14:46:49 +0100206 dchecks_.emplace_back([=](const CodeInfo& code_info) {
207 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky93bd3612018-07-02 19:30:18 +0100208 InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
David Srbecky6e69e522018-06-03 12:00:14 +0100209 CHECK_EQ(inline_info.GetDexPc(), dex_pc);
David Srbecky049d6812018-05-18 14:46:49 +0100210 bool encode_art_method = EncodeArtMethodInInlineInfo(method);
David Srbecky6e69e522018-06-03 12:00:14 +0100211 CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
David Srbecky049d6812018-05-18 14:46:49 +0100212 if (encode_art_method) {
David Srbecky6e69e522018-06-03 12:00:14 +0100213 CHECK_EQ(inline_info.GetArtMethod(), method);
David Srbecky049d6812018-05-18 14:46:49 +0100214 } else {
David Srbecky8cd54542018-07-15 23:58:44 +0100215 CHECK_EQ(code_info.GetMethodIndexOf(inline_info), method->GetDexMethodIndex());
David Srbecky049d6812018-05-18 14:46:49 +0100216 }
David Srbecky049d6812018-05-18 14:46:49 +0100217 });
David Srbecky71ec1cc2018-05-18 15:57:25 +0100218 }
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100219}
220
221void StackMapStream::EndInlineInfoEntry() {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100222 DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
223 in_inline_info_ = false;
224 DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
Calin Juravlec416d332015-04-23 16:01:43 +0100225}
226
David Srbecky6de88332018-06-03 12:00:11 +0100227// Create delta-compressed dex register map based on the current list of DexRegisterLocations.
228// All dex registers for a stack map are concatenated - inlined registers are just appended.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100229void StackMapStream::CreateDexRegisterMap() {
David Srbecky6de88332018-06-03 12:00:11 +0100230 // These are fields rather than local variables so that we can reuse the reserved memory.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100231 temp_dex_register_mask_.ClearAllBits();
232 temp_dex_register_map_.clear();
David Srbecky6de88332018-06-03 12:00:11 +0100233
234 // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
235 if (previous_dex_registers_.size() < current_dex_registers_.size()) {
236 previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
237 dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
238 }
239
240 // Set bit in the mask for each register that has been changed since the previous stack map.
241 // Modified registers are stored in the catalogue and the catalogue index added to the list.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100242 for (size_t i = 0; i < current_dex_registers_.size(); i++) {
243 DexRegisterLocation reg = current_dex_registers_[i];
David Srbecky6de88332018-06-03 12:00:11 +0100244 // Distance is difference between this index and the index of last modification.
245 uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
246 if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
David Srbeckycf7833e2018-06-14 16:45:22 +0100247 BitTableBuilder<DexRegisterInfo>::Entry entry;
David Srbeckyf325e282018-06-13 15:02:32 +0100248 entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
249 entry[DexRegisterInfo::kPackedValue] =
250 DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
David Srbecky6de88332018-06-03 12:00:11 +0100251 uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
David Srbecky71ec1cc2018-05-18 15:57:25 +0100252 temp_dex_register_mask_.SetBit(i);
David Srbeckyf325e282018-06-13 15:02:32 +0100253 temp_dex_register_map_.push_back({index});
David Srbecky6de88332018-06-03 12:00:11 +0100254 previous_dex_registers_[i] = reg;
255 dex_register_timestamp_[i] = stack_maps_.size();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100256 }
Calin Juravlec416d332015-04-23 16:01:43 +0100257 }
Calin Juravlec416d332015-04-23 16:01:43 +0100258
David Srbecky6de88332018-06-03 12:00:11 +0100259 // Set the mask and map for the current StackMap (which includes inlined registers).
David Srbecky71ec1cc2018-05-18 15:57:25 +0100260 if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
David Srbeckyf325e282018-06-13 15:02:32 +0100261 current_stack_map_[StackMap::kDexRegisterMaskIndex] =
David Srbecky6de88332018-06-03 12:00:11 +0100262 dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
263 temp_dex_register_mask_.GetNumberOfBits());
Vladimir Marko225b6462015-09-28 12:17:40 +0100264 }
David Srbecky6de88332018-06-03 12:00:11 +0100265 if (!current_dex_registers_.empty()) {
David Srbeckyf325e282018-06-13 15:02:32 +0100266 current_stack_map_[StackMap::kDexRegisterMapIndex] =
David Srbecky6de88332018-06-03 12:00:11 +0100267 dex_register_maps_.Dedup(temp_dex_register_map_.data(),
268 temp_dex_register_map_.size());
David Srbecky052f8ca2018-04-26 15:42:54 +0100269 }
David Srbecky049d6812018-05-18 14:46:49 +0100270
271 if (kVerifyStackMaps) {
272 size_t stack_map_index = stack_maps_.size();
David Srbecky049d6812018-05-18 14:46:49 +0100273 // We need to make copy of the current registers for later (when the check is run).
David Srbecky6de88332018-06-03 12:00:11 +0100274 auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
David Srbecky049d6812018-05-18 14:46:49 +0100275 current_dex_registers_.begin(), current_dex_registers_.end());
276 dchecks_.emplace_back([=](const CodeInfo& code_info) {
277 StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
David Srbecky6de88332018-06-03 12:00:11 +0100278 uint32_t expected_reg = 0;
279 for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
280 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
David Srbecky049d6812018-05-18 14:46:49 +0100281 }
David Srbecky93bd3612018-07-02 19:30:18 +0100282 for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
283 DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
284 for (DexRegisterLocation reg : map) {
David Srbecky6de88332018-06-03 12:00:11 +0100285 CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
286 }
287 }
288 CHECK_EQ(expected_reg, expected_dex_registers->size());
David Srbecky049d6812018-05-18 14:46:49 +0100289 });
290 }
Calin Juravlec416d332015-04-23 16:01:43 +0100291}
292
David Srbeckyb73323c2018-07-15 23:58:44 +0100293template<typename Writer, typename Builder>
294ALWAYS_INLINE static void EncodeTable(Writer& out, const Builder& bit_table) {
295 out.WriteBit(false); // Is not deduped.
296 bit_table.Encode(out);
297}
298
David Srbeckye7a91942018-08-01 17:23:53 +0100299ScopedArenaVector<uint8_t> StackMapStream::Encode() {
300 DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
301 DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
David Srbecky71ec1cc2018-05-18 15:57:25 +0100302
David Srbeckye7a91942018-08-01 17:23:53 +0100303 ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
304 BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
David Srbeckye35ac042019-02-25 18:11:53 +0000305 out.WriteVarint(code_size_);
David Srbecky0c3aa312018-08-03 14:52:32 +0100306 out.WriteVarint(packed_frame_size_);
307 out.WriteVarint(core_spill_mask_);
308 out.WriteVarint(fp_spill_mask_);
309 out.WriteVarint(num_dex_registers_);
David Srbeckyb73323c2018-07-15 23:58:44 +0100310 EncodeTable(out, stack_maps_);
David Srbeckyb73323c2018-07-15 23:58:44 +0100311 EncodeTable(out, register_masks_);
312 EncodeTable(out, stack_masks_);
David Srbeckya2d29a32018-08-03 11:06:38 +0100313 EncodeTable(out, inline_infos_);
314 EncodeTable(out, method_infos_);
David Srbeckyb73323c2018-07-15 23:58:44 +0100315 EncodeTable(out, dex_register_masks_);
316 EncodeTable(out, dex_register_maps_);
317 EncodeTable(out, dex_register_catalog_);
David Srbecky45aa5982016-03-18 02:15:09 +0000318
David Srbeckya38e6cf2018-06-26 18:13:49 +0100319 // Verify that we can load the CodeInfo and check some essentials.
David Srbeckye7a91942018-08-01 17:23:53 +0100320 CodeInfo code_info(buffer.data());
321 CHECK_EQ(code_info.Size(), buffer.size());
David Srbeckya38e6cf2018-06-26 18:13:49 +0100322 CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800323
David Srbecky049d6812018-05-18 14:46:49 +0100324 // Verify all written data (usually only in debug builds).
325 if (kVerifyStackMaps) {
David Srbecky049d6812018-05-18 14:46:49 +0100326 for (const auto& dcheck : dchecks_) {
327 dcheck(code_info);
David Srbecky1bbdfd72016-02-24 16:39:26 +0000328 }
David Srbecky71ec1cc2018-05-18 15:57:25 +0100329 }
David Srbeckye7a91942018-08-01 17:23:53 +0100330
331 return buffer;
David Srbecky1bbdfd72016-02-24 16:39:26 +0000332}
333
Calin Juravlec416d332015-04-23 16:01:43 +0100334} // namespace art