Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 16 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 17 | #include "stack_map_stream.h" |
| 18 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 19 | #include <memory> |
| 20 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 21 | #include "art_method-inl.h" |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 22 | #include "base/stl_util.h" |
David Sehr | 9e734c7 | 2018-01-04 17:56:19 -0800 | [diff] [blame] | 23 | #include "dex/dex_file_types.h" |
Nicolas Geoffray | fbdfa6d | 2017-02-03 10:43:13 +0000 | [diff] [blame] | 24 | #include "optimizing/optimizing_compiler.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 25 | #include "runtime.h" |
| 26 | #include "scoped_thread_state_change-inl.h" |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 27 | #include "stack_map.h" |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 28 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 29 | namespace art { |
| 30 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 31 | constexpr static bool kVerifyStackMaps = kIsDebugBuild; |
| 32 | |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 33 | uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 34 | return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 35 | } |
| 36 | |
| 37 | void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 38 | stack_maps_[i][StackMap::kPackedNativePc] = |
| 39 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
David Srbecky | d02b23f | 2018-05-29 23:27:22 +0100 | [diff] [blame] | 40 | } |
| 41 | |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 42 | void StackMapStream::BeginMethod(size_t frame_size_in_bytes, |
| 43 | size_t core_spill_mask, |
| 44 | size_t fp_spill_mask, |
| 45 | uint32_t num_dex_registers) { |
| 46 | DCHECK(!in_method_) << "Mismatched Begin/End calls"; |
| 47 | in_method_ = true; |
| 48 | DCHECK_EQ(frame_size_in_bytes_, 0u) << "BeginMethod was already called"; |
| 49 | |
| 50 | frame_size_in_bytes_ = frame_size_in_bytes; |
| 51 | core_spill_mask_ = core_spill_mask; |
| 52 | fp_spill_mask_ = fp_spill_mask; |
| 53 | num_dex_registers_ = num_dex_registers; |
| 54 | } |
| 55 | |
| 56 | void StackMapStream::EndMethod() { |
| 57 | DCHECK(in_method_) << "Mismatched Begin/End calls"; |
| 58 | in_method_ = false; |
| 59 | } |
| 60 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 61 | void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, |
| 62 | uint32_t native_pc_offset, |
| 63 | uint32_t register_mask, |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 64 | BitVector* stack_mask, |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 65 | StackMap::Kind kind) { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 66 | DCHECK(in_method_) << "Call BeginMethod first"; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 67 | DCHECK(!in_stack_map_) << "Mismatched Begin/End calls"; |
| 68 | in_stack_map_ = true; |
| 69 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 70 | current_stack_map_ = BitTableBuilder<StackMap>::Entry(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 71 | current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind); |
| 72 | current_stack_map_[StackMap::kPackedNativePc] = |
| 73 | StackMap::PackNativePc(native_pc_offset, instruction_set_); |
| 74 | current_stack_map_[StackMap::kDexPc] = dex_pc; |
David Srbecky | 0b4e5a3 | 2018-06-11 16:25:29 +0100 | [diff] [blame] | 75 | if (stack_maps_.size() > 0) { |
| 76 | // Check that non-catch stack maps are sorted by pc. |
| 77 | // Catch stack maps are at the end and may be unordered. |
| 78 | if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) { |
| 79 | DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch); |
| 80 | } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) { |
| 81 | DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc], |
| 82 | current_stack_map_[StackMap::kPackedNativePc]); |
| 83 | } |
| 84 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 85 | if (register_mask != 0) { |
| 86 | uint32_t shift = LeastSignificantBit(register_mask); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 87 | BitTableBuilder<RegisterMask>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 88 | entry[RegisterMask::kValue] = register_mask >> shift; |
| 89 | entry[RegisterMask::kShift] = shift; |
| 90 | current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry); |
Vladimir Marko | 174b2e2 | 2017-10-12 13:34:49 +0100 | [diff] [blame] | 91 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 92 | // The compiler assumes the bit vector will be read during PrepareForFillIn(), |
| 93 | // and it might modify the data before that. Therefore, just store the pointer. |
| 94 | // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h. |
| 95 | lazy_stack_masks_.push_back(stack_mask); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 96 | current_inline_infos_.clear(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 97 | current_dex_registers_.clear(); |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 98 | expected_num_dex_registers_ = num_dex_registers_; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 99 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 100 | if (kVerifyStackMaps) { |
| 101 | size_t stack_map_index = stack_maps_.size(); |
| 102 | // Create lambda method, which will be executed at the very end to verify data. |
| 103 | // Parameters and local variables will be captured(stored) by the lambda "[=]". |
| 104 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 105 | if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) { |
| 106 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, |
| 107 | instruction_set_); |
| 108 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 109 | } else if (kind == StackMap::Kind::Catch) { |
| 110 | StackMap stack_map = code_info.GetCatchStackMapForDexPc(dex_pc); |
| 111 | CHECK_EQ(stack_map.Row(), stack_map_index); |
| 112 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 113 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
| 114 | CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset); |
David Srbecky | 50fac06 | 2018-06-13 18:55:35 +0100 | [diff] [blame] | 115 | CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind)); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 116 | CHECK_EQ(stack_map.GetDexPc(), dex_pc); |
| 117 | CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask); |
| 118 | BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map); |
| 119 | CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0); |
| 120 | for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) { |
| 121 | CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b)); |
| 122 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 123 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 124 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 125 | } |
| 126 | |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 127 | void StackMapStream::EndStackMapEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 128 | DCHECK(in_stack_map_) << "Mismatched Begin/End calls"; |
| 129 | in_stack_map_ = false; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 130 | |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 131 | // Generate index into the InlineInfo table. |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 132 | size_t inlining_depth = current_inline_infos_.size(); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 133 | if (!current_inline_infos_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 134 | current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast; |
| 135 | current_stack_map_[StackMap::kInlineInfoIndex] = |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 136 | inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size()); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 137 | } |
| 138 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 139 | // Generate delta-compressed dex register map. |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 140 | size_t num_dex_registers = current_dex_registers_.size(); |
| 141 | if (!current_dex_registers_.empty()) { |
| 142 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 143 | CreateDexRegisterMap(); |
| 144 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 145 | |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 146 | stack_maps_.Add(current_stack_map_); |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 147 | |
| 148 | if (kVerifyStackMaps) { |
| 149 | size_t stack_map_index = stack_maps_.size() - 1; |
| 150 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 151 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
| 152 | CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0)); |
| 153 | CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0)); |
| 154 | CHECK_EQ(code_info.GetInlineDepthOf(stack_map), inlining_depth); |
| 155 | }); |
| 156 | } |
Calin Juravle | 4f46ac5 | 2015-04-23 18:47:21 +0100 | [diff] [blame] | 157 | } |
| 158 | |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 159 | void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 160 | uint32_t packed_native_pc = current_stack_map_[StackMap::kPackedNativePc]; |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 161 | size_t invoke_info_index = invoke_infos_.size(); |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 162 | BitTableBuilder<InvokeInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 163 | entry[InvokeInfo::kPackedNativePc] = packed_native_pc; |
| 164 | entry[InvokeInfo::kInvokeType] = invoke_type; |
| 165 | entry[InvokeInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); |
| 166 | invoke_infos_.Add(entry); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 167 | |
| 168 | if (kVerifyStackMaps) { |
| 169 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 170 | InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index); |
| 171 | CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_), |
| 172 | StackMap::UnpackNativePc(packed_native_pc, instruction_set_)); |
| 173 | CHECK_EQ(invoke_info.GetInvokeType(), invoke_type); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 174 | CHECK_EQ(method_infos_[invoke_info.GetMethodInfoIndex()][0], dex_method_index); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 175 | }); |
| 176 | } |
Mathieu Chartier | d776ff0 | 2017-01-17 09:32:18 -0800 | [diff] [blame] | 177 | } |
| 178 | |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 179 | void StackMapStream::BeginInlineInfoEntry(ArtMethod* method, |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 180 | uint32_t dex_pc, |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 181 | uint32_t num_dex_registers, |
| 182 | const DexFile* outer_dex_file) { |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 183 | DCHECK(in_stack_map_) << "Call BeginStackMapEntry first"; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 184 | DCHECK(!in_inline_info_) << "Mismatched Begin/End calls"; |
| 185 | in_inline_info_ = true; |
| 186 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
| 187 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 188 | expected_num_dex_registers_ += num_dex_registers; |
| 189 | |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 190 | BitTableBuilder<InlineInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 191 | entry[InlineInfo::kIsLast] = InlineInfo::kMore; |
| 192 | entry[InlineInfo::kDexPc] = dex_pc; |
| 193 | entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 194 | if (EncodeArtMethodInInlineInfo(method)) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 195 | entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method)); |
| 196 | entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method)); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 197 | } else { |
| 198 | if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) { |
| 199 | ScopedObjectAccess soa(Thread::Current()); |
| 200 | DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile())); |
| 201 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 202 | uint32_t dex_method_index = method->GetDexMethodIndexUnchecked(); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 203 | entry[InlineInfo::kMethodInfoIndex] = method_infos_.Dedup({dex_method_index}); |
Nicolas Geoffray | 5d37c15 | 2017-01-12 13:25:19 +0000 | [diff] [blame] | 204 | } |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 205 | current_inline_infos_.push_back(entry); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 206 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 207 | if (kVerifyStackMaps) { |
| 208 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 6eb4d5e | 2018-06-03 12:00:20 +0100 | [diff] [blame] | 209 | size_t depth = current_inline_infos_.size() - 1; |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 210 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 211 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 212 | InlineInfo inline_info = code_info.GetInlineInfoAtDepth(stack_map, depth); |
| 213 | CHECK_EQ(inline_info.GetDexPc(), dex_pc); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 214 | bool encode_art_method = EncodeArtMethodInInlineInfo(method); |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 215 | CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 216 | if (encode_art_method) { |
David Srbecky | 6e69e52 | 2018-06-03 12:00:14 +0100 | [diff] [blame] | 217 | CHECK_EQ(inline_info.GetArtMethod(), method); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 218 | } else { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 219 | CHECK_EQ(method_infos_[inline_info.GetMethodInfoIndex()][0], |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 220 | method->GetDexMethodIndexUnchecked()); |
| 221 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 222 | }); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 223 | } |
Nicolas Geoffray | b1d0f3f | 2015-05-14 12:41:51 +0100 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | void StackMapStream::EndInlineInfoEntry() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 227 | DCHECK(in_inline_info_) << "Mismatched Begin/End calls"; |
| 228 | in_inline_info_ = false; |
| 229 | DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size()); |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 230 | } |
| 231 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 232 | // Create delta-compressed dex register map based on the current list of DexRegisterLocations. |
| 233 | // All dex registers for a stack map are concatenated - inlined registers are just appended. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 234 | void StackMapStream::CreateDexRegisterMap() { |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 235 | // These are fields rather than local variables so that we can reuse the reserved memory. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 236 | temp_dex_register_mask_.ClearAllBits(); |
| 237 | temp_dex_register_map_.clear(); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 238 | |
| 239 | // Ensure that the arrays that hold previous state are big enough to be safely indexed below. |
| 240 | if (previous_dex_registers_.size() < current_dex_registers_.size()) { |
| 241 | previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None()); |
| 242 | dex_register_timestamp_.resize(current_dex_registers_.size(), 0u); |
| 243 | } |
| 244 | |
| 245 | // Set bit in the mask for each register that has been changed since the previous stack map. |
| 246 | // Modified registers are stored in the catalogue and the catalogue index added to the list. |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 247 | for (size_t i = 0; i < current_dex_registers_.size(); i++) { |
| 248 | DexRegisterLocation reg = current_dex_registers_[i]; |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 249 | // Distance is difference between this index and the index of last modification. |
| 250 | uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i]; |
| 251 | if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) { |
David Srbecky | cf7833e | 2018-06-14 16:45:22 +0100 | [diff] [blame] | 252 | BitTableBuilder<DexRegisterInfo>::Entry entry; |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 253 | entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind()); |
| 254 | entry[DexRegisterInfo::kPackedValue] = |
| 255 | DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 256 | uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 257 | temp_dex_register_mask_.SetBit(i); |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 258 | temp_dex_register_map_.push_back({index}); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 259 | previous_dex_registers_[i] = reg; |
| 260 | dex_register_timestamp_[i] = stack_maps_.size(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 261 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 262 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 263 | |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 264 | // Set the mask and map for the current StackMap (which includes inlined registers). |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 265 | if (temp_dex_register_mask_.GetNumberOfBits() != 0) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 266 | current_stack_map_[StackMap::kDexRegisterMaskIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 267 | dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(), |
| 268 | temp_dex_register_mask_.GetNumberOfBits()); |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 269 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 270 | if (!current_dex_registers_.empty()) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 271 | current_stack_map_[StackMap::kDexRegisterMapIndex] = |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 272 | dex_register_maps_.Dedup(temp_dex_register_map_.data(), |
| 273 | temp_dex_register_map_.size()); |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 274 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 275 | |
| 276 | if (kVerifyStackMaps) { |
| 277 | size_t stack_map_index = stack_maps_.size(); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 278 | uint32_t depth = current_inline_infos_.size(); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 279 | // We need to make copy of the current registers for later (when the check is run). |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 280 | auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>( |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 281 | current_dex_registers_.begin(), current_dex_registers_.end()); |
| 282 | dchecks_.emplace_back([=](const CodeInfo& code_info) { |
| 283 | StackMap stack_map = code_info.GetStackMapAt(stack_map_index); |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 284 | uint32_t expected_reg = 0; |
| 285 | for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) { |
| 286 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 287 | } |
David Srbecky | 6de8833 | 2018-06-03 12:00:11 +0100 | [diff] [blame] | 288 | for (uint32_t d = 0; d < depth; d++) { |
| 289 | for (DexRegisterLocation reg : code_info.GetDexRegisterMapAtDepth(d, stack_map)) { |
| 290 | CHECK_EQ((*expected_dex_registers)[expected_reg++], reg); |
| 291 | } |
| 292 | } |
| 293 | CHECK_EQ(expected_reg, expected_dex_registers->size()); |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 294 | }); |
| 295 | } |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 296 | } |
| 297 | |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 298 | void StackMapStream::FillInMethodInfo(MemoryRegion region) { |
| 299 | { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 300 | MethodInfo info(region.begin(), method_infos_.size()); |
| 301 | for (size_t i = 0; i < method_infos_.size(); ++i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 302 | info.SetMethodIndex(i, method_infos_[i][0]); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 303 | } |
| 304 | } |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 305 | if (kVerifyStackMaps) { |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 306 | // Check the data matches. |
| 307 | MethodInfo info(region.begin()); |
| 308 | const size_t count = info.NumMethodIndices(); |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 309 | DCHECK_EQ(count, method_infos_.size()); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 310 | for (size_t i = 0; i < count; ++i) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 311 | DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i][0]); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 312 | } |
| 313 | } |
| 314 | } |
| 315 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 316 | size_t StackMapStream::PrepareForFillIn() { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 317 | DCHECK_EQ(out_.size(), 0u); |
| 318 | |
| 319 | // Read the stack masks now. The compiler might have updated them. |
| 320 | for (size_t i = 0; i < lazy_stack_masks_.size(); i++) { |
| 321 | BitVector* stack_mask = lazy_stack_masks_[i]; |
| 322 | if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) { |
David Srbecky | f325e28 | 2018-06-13 15:02:32 +0100 | [diff] [blame] | 323 | stack_maps_[i][StackMap::kStackMaskIndex] = |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 324 | stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits()); |
| 325 | } |
| 326 | } |
| 327 | |
David Srbecky | f6ba5b3 | 2018-06-23 22:05:49 +0100 | [diff] [blame^] | 328 | EncodeUnsignedLeb128(&out_, frame_size_in_bytes_); |
| 329 | EncodeUnsignedLeb128(&out_, core_spill_mask_); |
| 330 | EncodeUnsignedLeb128(&out_, fp_spill_mask_); |
| 331 | EncodeUnsignedLeb128(&out_, num_dex_registers_); |
| 332 | BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&out_, out_.size() * kBitsPerByte); |
David Srbecky | 078d7ba | 2018-06-21 15:36:48 +0100 | [diff] [blame] | 333 | stack_maps_.Encode(out); |
| 334 | register_masks_.Encode(out); |
| 335 | stack_masks_.Encode(out); |
| 336 | invoke_infos_.Encode(out); |
| 337 | inline_infos_.Encode(out); |
| 338 | dex_register_masks_.Encode(out); |
| 339 | dex_register_maps_.Encode(out); |
| 340 | dex_register_catalog_.Encode(out); |
David Srbecky | 45aa598 | 2016-03-18 02:15:09 +0000 | [diff] [blame] | 341 | |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 342 | return UnsignedLeb128Size(out_.size()) + out_.size(); |
| 343 | } |
| 344 | |
| 345 | void StackMapStream::FillInCodeInfo(MemoryRegion region) { |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 346 | DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls"; |
| 347 | DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls"; |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 348 | DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn"; |
| 349 | DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) + out_.size()); |
| 350 | |
| 351 | uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size()); |
| 352 | region.CopyFromVector(ptr - region.begin(), out_); |
Mathieu Chartier | 1a20b68 | 2017-01-31 14:25:16 -0800 | [diff] [blame] | 353 | |
David Srbecky | 049d681 | 2018-05-18 14:46:49 +0100 | [diff] [blame] | 354 | // Verify all written data (usually only in debug builds). |
| 355 | if (kVerifyStackMaps) { |
| 356 | CodeInfo code_info(region); |
| 357 | CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size()); |
| 358 | for (const auto& dcheck : dchecks_) { |
| 359 | dcheck(code_info); |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 360 | } |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 361 | } |
David Srbecky | 1bbdfd7 | 2016-02-24 16:39:26 +0000 | [diff] [blame] | 362 | } |
| 363 | |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 364 | size_t StackMapStream::ComputeMethodInfoSize() const { |
David Srbecky | 052f8ca | 2018-04-26 15:42:54 +0100 | [diff] [blame] | 365 | DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__; |
David Srbecky | 71ec1cc | 2018-05-18 15:57:25 +0100 | [diff] [blame] | 366 | return MethodInfo::ComputeSize(method_infos_.size()); |
Mathieu Chartier | cbcedbf | 2017-03-12 22:24:50 -0700 | [diff] [blame] | 367 | } |
| 368 | |
Calin Juravle | c416d33 | 2015-04-23 16:01:43 +0100 | [diff] [blame] | 369 | } // namespace art |