blob: 9d30115bb1d44afe0c37c9375c2ee65eefb92221 [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Elliott Hughes68e76522011-10-05 13:22:16 -070020#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080021#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070022
Andreas Gampe03ec9302015-08-27 17:41:47 -070023#include "base/macros.h"
24#include "base/mutex.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010025#include "quick/quick_method_frame_info.h"
David Srbecky93bd3612018-07-02 19:30:18 +010026#include "stack_map.h"
Ian Rogerse63db272014-07-15 15:36:11 -070027
Elliott Hughes68e76522011-10-05 13:22:16 -070028namespace art {
29
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080031class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032} // namespace mirror
33
Mathieu Chartiere401d142015-04-22 13:56:20 -070034class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070036class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010037class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010038class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070039class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010040union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070041
Ian Rogers2bcb4a42012-11-08 10:39:18 -080042// The kind of vreg being accessed in calls to Set/GetVReg.
43enum VRegKind {
44 kReferenceVReg,
45 kIntVReg,
46 kFloatVReg,
47 kLongLoVReg,
48 kLongHiVReg,
49 kDoubleLoVReg,
50 kDoubleHiVReg,
51 kConstant,
52 kImpreciseConstant,
53 kUndefined,
54};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070055std::ostream& operator<<(std::ostream& os, const VRegKind& rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080056
Mingyao Yang063fc772016-08-02 11:02:54 -070057// Size in bytes of the should_deoptimize flag on stack.
58// We just need 4 bytes for our purpose regardless of the architecture. Frame size
59// calculation will automatically do alignment for the final frame size.
60static constexpr size_t kShouldDeoptimizeFlagSize = 4;
61
Andreas Gampe36a296f2017-06-13 14:11:11 -070062/*
63 * Our current stack layout.
64 * The Dalvik registers come first, followed by the
65 * Method*, followed by other special temporaries if any, followed by
66 * regular compiler temporary. As of now we only have the Method* as
67 * as a special compiler temporary.
68 * A compiler temporary can be thought of as a virtual register that
69 * does not exist in the dex but holds intermediate values to help
70 * optimizations and code generation. A special compiler temporary is
71 * one whose location in frame is well known while non-special ones
72 * do not have a requirement on location in frame as long as code
73 * generator itself knows how to access them.
74 *
75 * TODO: Update this documentation?
76 *
77 * +-------------------------------+
78 * | IN[ins-1] | {Note: resides in caller's frame}
79 * | . |
80 * | IN[0] |
81 * | caller's ArtMethod | ... ArtMethod*
82 * +===============================+ {Note: start of callee's frame}
83 * | core callee-save spill | {variable sized}
84 * +-------------------------------+
85 * | fp callee-save spill |
86 * +-------------------------------+
87 * | filler word | {For compatibility, if V[locals-1] used as wide
88 * +-------------------------------+
89 * | V[locals-1] |
90 * | V[locals-2] |
91 * | . |
92 * | . | ... (reg == 2)
93 * | V[1] | ... (reg == 1)
94 * | V[0] | ... (reg == 0) <---- "locals_start"
95 * +-------------------------------+
96 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
97 * +-------------------------------+
98 * | Compiler temp region | ... (reg >= max_num_special_temps)
99 * | . |
100 * | . |
101 * | V[max_num_special_temps + 1] |
102 * | V[max_num_special_temps + 0] |
103 * +-------------------------------+
104 * | OUT[outs-1] |
105 * | OUT[outs-2] |
106 * | . |
107 * | OUT[0] |
108 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
109 * +===============================+
110 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800111
Ian Rogers0399dde2012-06-06 17:09:28 -0700112class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100113 public:
114 // This enum defines a flag to control whether inlined frames are included
115 // when walking the stack.
116 enum class StackWalkKind {
117 kIncludeInlinedFrames,
118 kSkipInlinedFrames,
119 };
120
Ian Rogers0399dde2012-06-06 17:09:28 -0700121 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800122 StackVisitor(Thread* thread,
123 Context* context,
124 StackWalkKind walk_kind,
125 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700126
Nicolas Geoffray33856502015-10-20 15:52:58 +0100127 bool GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700128 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100129
Ian Rogers0399dde2012-06-06 17:09:28 -0700130 public:
131 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700132 StackVisitor(const StackVisitor&) = default;
133 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700134
135 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700136 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700137
Andreas Gampe585da952016-12-02 14:52:29 -0800138 enum class CountTransitions {
139 kYes,
140 kNo,
141 };
142
143 template <CountTransitions kCount = CountTransitions::kYes>
Vladimir Marko2196c652017-11-30 16:16:07 +0000144 void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700145
Andreas Gampec7d878d2018-11-19 18:42:06 +0000146 // Convenience helper function to walk the stack with a lambda as a visitor.
147 template <CountTransitions kCountTransitions = CountTransitions::kYes,
148 typename T>
149 ALWAYS_INLINE static void WalkStack(const T& fn,
150 Thread* thread,
151 Context* context,
152 StackWalkKind walk_kind,
153 bool check_suspended = true,
154 bool include_transitions = false)
155 REQUIRES_SHARED(Locks::mutator_lock_) {
156 class LambdaStackVisitor : public StackVisitor {
157 public:
158 LambdaStackVisitor(const T& fn,
159 Thread* thread,
160 Context* context,
161 StackWalkKind walk_kind,
162 bool check_suspended = true)
163 : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
164
165 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
166 return fn_(this);
167 }
168
169 private:
170 T fn_;
171 };
172 LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
173 visitor.template WalkStack<kCountTransitions>(include_transitions);
174 }
175
Sebastien Hertz26f72862015-09-15 09:52:07 +0200176 Thread* GetThread() const {
177 return thread_;
178 }
179
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700180 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700181
Alex Lightdba61482016-12-21 08:20:29 -0800182 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
183 // doesn't work with inlined methods.
184 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
185
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100186 ArtMethod* GetOuterMethod() const {
187 return *GetCurrentQuickFrame();
188 }
189
Ian Rogers0399dde2012-06-06 17:09:28 -0700190 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800191 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700192 }
193
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700194 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700195
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700196 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800197
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700198 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700199
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700200 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700201 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800202 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700203 }
204
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700205 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700206 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700207 return GetFrameHeight() + 1;
208 }
209
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700210 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700211 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100212 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700213 }
214 return num_frames_;
215 }
216
Andreas Gampe140da3b2016-11-08 16:01:00 -0800217 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700218 return cur_depth_;
219 }
220
Ian Rogers5cf98192014-05-29 21:31:50 -0700221 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700223 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700224
Mathieu Chartiere401d142015-04-22 13:56:20 -0700225 bool GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700226 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700227
Mathieu Chartiere401d142015-04-22 13:56:20 -0700228 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200229 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700230 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200231
Mingyao Yang636b9252015-07-31 16:40:24 -0700232 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
233 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700234 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700235 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700236
Mingyao Yang99170c62015-07-06 11:10:37 -0700237 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
238 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700239 bool SetVRegPair(ArtMethod* m,
240 uint16_t vreg,
241 uint64_t new_value,
242 VRegKind kind_lo,
243 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700244 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700245
Mathieu Chartier815873e2014-02-13 18:02:13 -0800246 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700247
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700248 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700249
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700250 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700251
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100252 bool IsInInlinedFrame() const {
David Srbecky93bd3612018-07-02 19:30:18 +0100253 return !current_inline_frames_.empty();
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100254 }
255
David Srbecky93bd3612018-07-02 19:30:18 +0100256 InlineInfo GetCurrentInlinedFrame() const {
257 return current_inline_frames_.back();
David Brazdilefc3f022015-10-28 12:19:06 -0500258 }
259
Ian Rogers0399dde2012-06-06 17:09:28 -0700260 uintptr_t GetCurrentQuickFramePc() const {
261 return cur_quick_frame_pc_;
262 }
263
Mathieu Chartiere401d142015-04-22 13:56:20 -0700264 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700265 return cur_quick_frame_;
266 }
267
268 ShadowFrame* GetCurrentShadowFrame() const {
269 return cur_shadow_frame_;
270 }
271
Mathieu Chartiere401d142015-04-22 13:56:20 -0700272 HandleScope* GetCurrentHandleScope(size_t pointer_size) const {
273 ArtMethod** sp = GetCurrentQuickFrame();
274 // Skip ArtMethod*; handle scope comes next;
275 return reinterpret_cast<HandleScope*>(reinterpret_cast<uintptr_t>(sp) + pointer_size);
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700276 }
277
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700278 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800279
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100280 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700281 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800282
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700283 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800284
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100285 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
286 return cur_oat_quick_method_header_;
287 }
288
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700289 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100290
Ian Rogers0399dde2012-06-06 17:09:28 -0700291 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700292 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800293 StackVisitor(Thread* thread,
294 Context* context,
295 StackWalkKind walk_kind,
296 size_t num_frames,
297 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700298 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700299
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100300 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
301 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
302 }
303 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
304 DCHECK(IsAccessibleRegister(reg, is_float));
305 return is_float ? GetFPR(reg) : GetGPR(reg);
306 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100307
308 bool IsAccessibleGPR(uint32_t reg) const;
309 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100310
311 bool IsAccessibleFPR(uint32_t reg) const;
312 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200313
Mingyao Yang99170c62015-07-06 11:10:37 -0700314 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700315 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700316 bool GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100317 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700318 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100319
Mingyao Yang99170c62015-07-06 11:10:37 -0700320 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
321 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700322 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700323 bool GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100324 VRegKind kind_lo, VRegKind kind_hi,
325 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700326 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100327 bool GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, VRegKind kind_lo,
328 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700329 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100330
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700331 void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700332
Ian Rogers7a22fa62013-01-23 12:16:16 -0800333 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100334 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700335 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700336 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700337 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100338 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700339 // Lazily computed, number of frames in the stack.
340 size_t num_frames_;
341 // Depth of the frame we're currently at.
342 size_t cur_depth_;
David Srbecky93bd3612018-07-02 19:30:18 +0100343 // Current inlined frames of the method we are currently at.
344 // We keep poping frames from the end as we visit the frames.
345 BitTableRange<InlineInfo> current_inline_frames_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700346
Ian Rogers0399dde2012-06-06 17:09:28 -0700347 protected:
348 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800349 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700350};
351
Elliott Hughes68e76522011-10-05 13:22:16 -0700352} // namespace art
353
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700354#endif // ART_RUNTIME_STACK_H_