Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 16 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 17 | #include "stack_map_stream.h" |
| 18 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 19 | #include <memory> |
| 20 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 21 | #include "art_method-inl.h" |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 22 | #include "base/stl_util.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 23 | #include "dex/dex_file_types.h" |
Nicolas Geoffray | fbdfa6d | 2017-02-03 10:43:13 +0000 | [diff] [blame] | 24 | #include "optimizing/optimizing_compiler.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 25 | #include "runtime.h" |
| 26 | #include "scoped_thread_state_change-inl.h" |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 27 | #include "stack_map.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 28 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 29 | namespace art { |
| 30 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 31 | constexpr static bool kVerifyStackMaps = kIsDebugBuild; |
| 32 | |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 33 | uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 34 | return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 35 | } |
| 36 | |
| 37 | void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 38 | stack_maps_[i][StackMap::kPackedNativePc] = |
| 39 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 40 | } |
| 41 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 42 | void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, |
| 43 | uint32_t native_pc_offset, |
| 44 | uint32_t register_mask, |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 45 | BitVector* stack_mask, |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 46 | uint32_t num_dex_registers, |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 47 | uint8_t inlining_depth, |
| 48 | StackMap::Kind kind) { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 49 | DCHECK(!in_stack_map_) << "Mismatched Begin/End calls"; |
| 50 | in_stack_map_ = true; |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 51 | // num_dex_registers_ is the constant per-method number of registers. |
| 52 | // However we initially don't know what the value is, so lazily initialize it. |
| 53 | if (num_dex_registers_ == 0) { |
| 54 | num_dex_registers_ = num_dex_registers; |
| 55 | } else if (num_dex_registers > 0) { |
| 56 | DCHECK_EQ(num_dex_registers_, num_dex_registers) << "Inconsistent register count"; |
| 57 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 58 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 59 | current_stack_map_ = BitTableBuilder<StackMap>::Entry(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 60 | current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); |
| 61 | current_stack_map_[StackMap::kPackedNativePc] = |
| 62 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
| 63 | current_stack_map_[StackMap::kDexPc] = dex_pc; |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame^] | 64 | if (stack_maps_.size() > 0) { |
| 65 | // Check that non-catch stack maps are sorted by pc. |
| 66 | // Catch stack maps are at the end and may be unordered. |
| 67 | if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) { |
| 68 | DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch); |
| 69 | } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) { |
| 70 | DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc], |
| 71 | current_stack_map_[StackMap::kPackedNativePc]); |
| 72 | } |
| 73 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 74 | if (register_mask != 0) { |
| 75 | uint32_t shift = LeastSignificantBit(register_mask); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 76 | BitTableBuilder<RegisterMask>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 77 | entry[RegisterMask::kValue] = register_mask >> shift; |
| 78 | entry[RegisterMask::kShift] = shift; |
| 79 | current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 80 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 81 | // The compiler assumes the bit vector will be read during PrepareForFillIn(), |
| 82 | // and it might modify the data before that. Therefore, just store the pointer. |
| 83 | // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h. |
| 84 | lazy_stack_masks_.push_back(stack_mask); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 85 | current_inline_infos_.clear(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 86 | current_dex_registers_.clear(); |
| 87 | expected_num_dex_registers_ = num_dex_registers; |
| 88 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 89 | if (kVerifyStackMaps) { |
| 90 | size_t stack_map_index = stack_maps_.size(); |
| 91 | // Create lambda method, which will be executed at the very end to verify data. |
| 92 | // Parameters and local variables will be captured(stored) by the lambda "[=]". |
| 93 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 94 | if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) { |
| 95 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, |
| 96 | instruction_set_); |
| 97 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 98 | } else if (kind == StackMap::Kind::Catch) { |
| 99 | StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc); |
| 100 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 101 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 102 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
| 103 | CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset); |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 104 | CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind)); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 105 | CHECK_EQ(stack_map.GetDexPc(), dex_pc); |
| 106 | CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask); |
| 107 | BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map); |
| 108 | CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0); |
| 109 | for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) { |
| 110 | CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b)); |
| 111 | } |
| 112 | CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0)); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 113 | CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 114 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 115 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 116 | } |
| 117 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 118 | void StackMapStream::EndStackMapEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 119 | DCHECK(in_stack_map_) << "Mismatched Begin/End calls"; |
| 120 | in_stack_map_ = false; |
| 121 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 122 | |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 123 | // Generate index into the InlineInfo table. |
| 124 | if (!current_inline_infos_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 125 | current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; |
| 126 | current_stack_map_[StackMap::kInlineInfoIndex] = |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 127 | inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size()); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 128 | } |
| 129 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 130 | // Generate delta-compressed dex register map. |
| 131 | CreateDexRegisterMap(); |
| 132 | |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 133 | stack_maps_.Add(current_stack_map_); |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 134 | } |
| 135 | |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 136 | void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 137 | uint32_t packed_native_pc = current_stack_map_[StackMap::kPackedNativePc]; |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 138 | size_t invoke_info_index = invoke_infos_.size(); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 139 | BitTableBuilder<InvokeInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 140 | entry[InvokeInfo::kPackedNativePc] = packed_native_pc; |
| 141 | entry[InvokeInfo::kInvokeType] = invoke_type; |
| 142 | entry[InvokeInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); |
| 143 | invoke_infos_.Add(entry); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 144 | |
| 145 | if (kVerifyStackMaps) { |
| 146 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 147 | InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index); |
| 148 | CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_), |
| 149 | StackMap::UnpackNativePc(packed_native_pc, instruction_set_)); |
| 150 | CHECK_EQ(invoke_info.GetInvokeType(), invoke_type); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 151 | CHECK_EQ(method_infos_[invoke_info.GetMethodInfoIndex()][0], dex_method_index); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 152 | }); |
| 153 | } |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 154 | } |
| 155 | |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 156 | void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 157 | uint32_t dex_pc, |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 158 | uint32_t num_dex_registers, |
| 159 | const DexFile* outer_dex_file) { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 160 | DCHECK(!in_inline_info_) << "Mismatched Begin/End calls"; |
| 161 | in_inline_info_ = true; |
| 162 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 163 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 164 | expected_num_dex_registers_ += num_dex_registers; |
| 165 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 166 | BitTableBuilder<InlineInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 167 | entry[InlineInfo::kIsLast] = InlineInfo::kMore; |
| 168 | entry[InlineInfo::kDexPc] = dex_pc; |
| 169 | entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 170 | if (EncodeArtMethodInInlineInfo(method)) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 171 | entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method)); |
| 172 | entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method)); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 173 | } else { |
| 174 | if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) { |
| 175 | ScopedObjectAccess soa(Thread::Current()); |
| 176 | DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile())); |
| 177 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 178 | uint32_t dex_method_index = method->GetDexMethodIndexUnchecked(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 179 | entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 180 | } |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 181 | current_inline_infos_.push_back(entry); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 182 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 183 | if (kVerifyStackMaps) { |
| 184 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 185 | size_t depth = current_inline_infos_.size() - 1; |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 186 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 187 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 188 | InlineInfo inline_info = code_info.GetInlineInfoAtDepth(stack_map, depth); |
| 189 | CHECK_EQ(inline_info.GetDexPc(), dex_pc); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 190 | bool encode_art_method = EncodeArtMethodInInlineInfo(method); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 191 | CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 192 | if (encode_art_method) { |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 193 | CHECK_EQ(inline_info.GetArtMethod(), method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 194 | } else { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 195 | CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()][0], |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 196 | method->GetDexMethodIndexUnchecked()); |
| 197 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 198 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 199 | } |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 200 | } |
| 201 | |
| 202 | void StackMapStream::EndInlineInfoEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 203 | DCHECK(in_inline_info_) << "Mismatched Begin/End calls"; |
| 204 | in_inline_info_ = false; |
| 205 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 206 | } |
| 207 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 208 | // Create delta-compressed dex register map based on the current list of DexRegisterLocations. |
| 209 | // All dex registers for a stack map are concatenated - inlined registers are just appended. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 210 | void StackMapStream::CreateDexRegisterMap() { |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 211 | // These are fields rather than local variables so that we can reuse the reserved memory. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 212 | temp_dex_register_mask_.ClearAllBits(); |
| 213 | temp_dex_register_map_.clear(); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 214 | |
| 215 | // Ensure that the arrays that hold previous state are big enough to be safely indexed below. |
| 216 | if (previous_dex_registers_.size() < current_dex_registers_.size()) { |
| 217 | previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None()); |
| 218 | dex_register_timestamp_.resize(current_dex_registers_.size(), 0u); |
| 219 | } |
| 220 | |
| 221 | // Set bit in the mask for each register that has been changed since the previous stack map. |
| 222 | // Modified registers are stored in the catalogue and the catalogue index added to the list. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 223 | for (size_t i = 0; i < current_dex_registers_.size(); i++) { |
| 224 | DexRegisterLocation reg = current_dex_registers_[i]; |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 225 | // Distance is difference between this index and the index of last modification. |
| 226 | uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i]; |
| 227 | if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 228 | BitTableBuilder<DexRegisterInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 229 | entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind()); |
| 230 | entry[DexRegisterInfo::kPackedValue] = |
| 231 | DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 232 | uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 233 | temp_dex_register_mask_.SetBit(i); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 234 | temp_dex_register_map_.push_back({index}); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 235 | previous_dex_registers_[i] = reg; |
| 236 | dex_register_timestamp_[i] = stack_maps_.size(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 237 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 238 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 239 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 240 | // Set the mask and map for the current StackMap (which includes inlined registers). |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 241 | if (temp_dex_register_mask_.GetNumberOfBits() != 0) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 242 | current_stack_map_[StackMap::kDexRegisterMaskIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 243 | dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(), |
| 244 | temp_dex_register_mask_.GetNumberOfBits()); |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 245 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 246 | if (!current_dex_registers_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 247 | current_stack_map_[StackMap::kDexRegisterMapIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 248 | dex_register_maps_.Dedup(temp_dex_register_map_.data(), |
| 249 | temp_dex_register_map_.size()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 250 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 251 | |
| 252 | if (kVerifyStackMaps) { |
| 253 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 254 | uint32_t depth = current_inline_infos_.size(); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 255 | // We need to make copy of the current registers for later (when the check is run). |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 256 | auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>( |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 257 | current_dex_registers_.begin(), current_dex_registers_.end()); |
| 258 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 259 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 260 | uint32_t expected_reg = 0; |
| 261 | for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) { |
| 262 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 263 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 264 | for (uint32_t d = 0; d < depth; d++) { |
| 265 | for (DexRegisterLocation reg : code_info.GetDexRegisterMapAtDepth(d, stack_map)) { |
| 266 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
| 267 | } |
| 268 | } |
| 269 | CHECK_EQ(expected_reg, expected_dex_registers->size()); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 270 | }); |
| 271 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 272 | } |
| 273 | |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 274 | void StackMapStream::FillInMethodInfo(MemoryRegion region) { |
| 275 | { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 276 | MethodInfo info(region.begin(), method_infos_.size()); |
| 277 | for (size_t i = 0; i < method_infos_.size(); ++i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 278 | info.SetMethodIndex(i, method_infos_[i][0]); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 279 | } |
| 280 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 281 | if (kVerifyStackMaps) { |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 282 | // Check the data matches. |
| 283 | MethodInfo info(region.begin()); |
| 284 | const size_t count = info.NumMethodIndices(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 285 | DCHECK_EQ(count, method_infos_.size()); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 286 | for (size_t i = 0; i < count; ++i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 287 | DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i][0]); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 288 | } |
| 289 | } |
| 290 | } |
| 291 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 292 | size_t StackMapStream::PrepareForFillIn() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 293 | DCHECK_EQ(out_.size(), 0u); |
| 294 | |
| 295 | // Read the stack masks now. The compiler might have updated them. |
| 296 | for (size_t i = 0; i < lazy_stack_masks_.size(); i++) { |
| 297 | BitVector* stack_mask = lazy_stack_masks_[i]; |
| 298 | if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 299 | stack_maps_[i][StackMap::kStackMaskIndex] = |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 300 | stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits()); |
| 301 | } |
| 302 | } |
| 303 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 304 | size_t bit_offset = 0; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 305 | stack_maps_.Encode(&out_, &bit_offset); |
| 306 | register_masks_.Encode(&out_, &bit_offset); |
| 307 | stack_masks_.Encode(&out_, &bit_offset); |
| 308 | invoke_infos_.Encode(&out_, &bit_offset); |
| 309 | inline_infos_.Encode(&out_, &bit_offset); |
| 310 | dex_register_masks_.Encode(&out_, &bit_offset); |
| 311 | dex_register_maps_.Encode(&out_, &bit_offset); |
| 312 | dex_register_catalog_.Encode(&out_, &bit_offset); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 313 | EncodeVarintBits(&out_, &bit_offset, num_dex_registers_); |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 314 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 315 | return UnsignedLeb128Size(out_.size()) + out_.size(); |
| 316 | } |
| 317 | |
| 318 | void StackMapStream::FillInCodeInfo(MemoryRegion region) { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 319 | DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls"; |
| 320 | DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls"; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 321 | DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn"; |
| 322 | DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size()); |
| 323 | |
| 324 | uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size()); |
| 325 | region.CopyFromVector(ptr - region.begin(), out_); |
Mathieu Chartier | 1a20b68 | 2017-01-31 14:25:16 -0800 | [diff] [blame] | 326 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 327 | // Verify all written data (usually only in debug builds). |
| 328 | if (kVerifyStackMaps) { |
| 329 | CodeInfo code_info(region); |
| 330 | CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size()); |
| 331 | for (const auto& dcheck : dchecks_) { |
| 332 | dcheck(code_info); |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 333 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 334 | } |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 335 | } |
| 336 | |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 337 | size_t StackMapStream::ComputeMethodInfoSize() const { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 338 | DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 339 | return MethodInfo::ComputeSize(method_infos_.size()); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 340 | } |
| 341 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 342 | } // namespace art |