Add fast path for exception vreg copying.
It is not necessary to decode CodeInfo in GetVReg
since the caller already did it.
Test: ./art/test.py -b --host --64 --interpreter
Change-Id: I0f8941f43acdc0f2c43b78ef87d3e796e320c959
diff --git a/runtime/stack.h b/runtime/stack.h
index aa741df..ad73e75 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -17,6 +17,7 @@
#ifndef ART_RUNTIME_STACK_H_
#define ART_RUNTIME_STACK_H_
+#include <optional>
#include <stdint.h>
#include <string>
@@ -223,7 +224,12 @@
bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
REQUIRES_SHARED(Locks::mutator_lock_);
- bool GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const
+ bool GetVReg(ArtMethod* m,
+ uint16_t vreg,
+ VRegKind kind,
+ uint32_t* val,
+ std::optional<DexRegisterLocation> location =
+ std::optional<DexRegisterLocation>()) const
REQUIRES_SHARED(Locks::mutator_lock_);
bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
@@ -330,6 +336,8 @@
VRegKind kind_lo, VRegKind kind_hi,
uint64_t* val) const
REQUIRES_SHARED(Locks::mutator_lock_);
+ bool GetVRegFromOptimizedCode(DexRegisterLocation location, VRegKind kind, uint32_t* val) const
+ REQUIRES_SHARED(Locks::mutator_lock_);
bool GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, VRegKind kind_lo,
uint64_t* val) const
REQUIRES_SHARED(Locks::mutator_lock_);