Move mirror::ArtMethod to native
Optimizing + quick tests are passing, devices boot.
TODO: Test and fix bugs in mips64.
Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS.
Some of the savings are from removal of virtual methods and direct
methods object arrays.
Bug: 19264997
Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d
diff --git a/runtime/arch/arch_test.cc b/runtime/arch/arch_test.cc
index 5733ab6..40e2cd3 100644
--- a/runtime/arch/arch_test.cc
+++ b/runtime/arch/arch_test.cc
@@ -16,8 +16,8 @@
#include <stdint.h>
+#include "art_method-inl.h"
#include "common_runtime_test.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -38,7 +38,7 @@
t->TransitionFromSuspendedToRunnable(); // So we can create callee-save methods.
r->SetInstructionSet(isa);
- mirror::ArtMethod* save_method = r->CreateCalleeSaveMethod();
+ ArtMethod* save_method = r->CreateCalleeSaveMethod();
r->SetCalleeSaveMethod(save_method, type);
QuickMethodFrameInfo frame_info = save_method->GetQuickFrameInfo();
EXPECT_EQ(frame_info.FrameSizeInBytes(), save_size) << "Expected and real size differs for "
diff --git a/runtime/arch/arm/context_arm.cc b/runtime/arch/arm/context_arm.cc
index c0e658c..403d348 100644
--- a/runtime/arch/arm/context_arm.cc
+++ b/runtime/arch/arm/context_arm.cc
@@ -16,8 +16,8 @@
#include "context_arm.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -36,7 +36,7 @@
}
void ArmContext::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/arm/fault_handler_arm.cc b/runtime/arch/arm/fault_handler_arm.cc
index d84cb53..90b0d53 100644
--- a/runtime/arch/arm/fault_handler_arm.cc
+++ b/runtime/arch/arm/fault_handler_arm.cc
@@ -18,13 +18,13 @@
#include "fault_handler.h"
#include <sys/ucontext.h>
+
+#include "art_method-inl.h"
#include "base/macros.h"
#include "base/hex_dump.h"
#include "globals.h"
#include "base/logging.h"
#include "base/hex_dump.h"
-#include "mirror/art_method.h"
-#include "mirror/art_method-inl.h"
#include "thread.h"
#include "thread-inl.h"
@@ -65,7 +65,7 @@
}
void FaultManager::GetMethodAndReturnPcAndSp(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context,
- mirror::ArtMethod** out_method,
+ ArtMethod** out_method,
uintptr_t* out_return_pc, uintptr_t* out_sp) {
struct ucontext* uc = reinterpret_cast<struct ucontext*>(context);
struct sigcontext *sc = reinterpret_cast<struct sigcontext*>(&uc->uc_mcontext);
@@ -81,10 +81,10 @@
uintptr_t* overflow_addr = reinterpret_cast<uintptr_t*>(
reinterpret_cast<uint8_t*>(*out_sp) - GetStackOverflowReservedBytes(kArm));
if (overflow_addr == fault_addr) {
- *out_method = reinterpret_cast<mirror::ArtMethod*>(sc->arm_r0);
+ *out_method = reinterpret_cast<ArtMethod*>(sc->arm_r0);
} else {
// The method is at the top of the stack.
- *out_method = reinterpret_cast<mirror::ArtMethod*>(reinterpret_cast<uintptr_t*>(*out_sp)[0]);
+ *out_method = reinterpret_cast<ArtMethod*>(reinterpret_cast<uintptr_t*>(*out_sp)[0]);
}
// Work out the return PC. This will be the address of the instruction
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 6e53ba4..064f5a6 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -419,7 +419,7 @@
mov r4, #SUSPEND_CHECK_INTERVAL @ reset r4 to suspend check interval
#endif
- ldr ip, [r0, #MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32] @ get pointer to the code
+ ldr ip, [r0, #ART_METHOD_QUICK_CODE_OFFSET_32] @ get pointer to the code
blx ip @ call the method
mov sp, r11 @ restore the stack pointer
diff --git a/runtime/arch/arm/quick_entrypoints_cc_arm.cc b/runtime/arch/arm/quick_entrypoints_cc_arm.cc
index a3acd7e..ce531f0 100644
--- a/runtime/arch/arm/quick_entrypoints_cc_arm.cc
+++ b/runtime/arch/arm/quick_entrypoints_cc_arm.cc
@@ -14,23 +14,23 @@
* limitations under the License.
*/
-#include "mirror/art_method.h"
+#include "art_method.h"
#include "utils.h" // For RoundUp().
namespace art {
// Assembly stub that does the final part of the up-call into Java.
-extern "C" void art_quick_invoke_stub_internal(mirror::ArtMethod*, uint32_t*, uint32_t,
+extern "C" void art_quick_invoke_stub_internal(ArtMethod*, uint32_t*, uint32_t,
Thread* self, JValue* result, uint32_t, uint32_t*,
uint32_t*);
template <bool kIsStatic>
-static void quick_invoke_reg_setup(mirror::ArtMethod* method, uint32_t* args, uint32_t args_size,
+static void quick_invoke_reg_setup(ArtMethod* method, uint32_t* args, uint32_t args_size,
Thread* self, JValue* result, const char* shorty) {
// Note: We do not follow aapcs ABI in quick code for both softfp and hardfp.
uint32_t core_reg_args[4]; // r0 ~ r3
uint32_t fp_reg_args[16]; // s0 ~ s15 (d0 ~ d7)
- uint32_t gpr_index = 1; // Index into core registers. Reserve r0 for mirror::ArtMethod*.
+ uint32_t gpr_index = 1; // Index into core registers. Reserve r0 for ArtMethod*.
uint32_t fpr_index = 0; // Index into float registers.
uint32_t fpr_double_index = 0; // Index into float registers for doubles.
uint32_t arg_index = 0; // Index into argument array.
@@ -99,16 +99,16 @@
core_reg_args, fp_reg_args);
}
-// Called by art::mirror::ArtMethod::Invoke to do entry into a non-static method.
+// Called by art::ArtMethod::Invoke to do entry into a non-static method.
// TODO: migrate into an assembly implementation as with ARM64.
-extern "C" void art_quick_invoke_stub(mirror::ArtMethod* method, uint32_t* args, uint32_t args_size,
+extern "C" void art_quick_invoke_stub(ArtMethod* method, uint32_t* args, uint32_t args_size,
Thread* self, JValue* result, const char* shorty) {
quick_invoke_reg_setup<false>(method, args, args_size, self, result, shorty);
}
-// Called by art::mirror::ArtMethod::Invoke to do entry into a static method.
+// Called by art::ArtMethod::Invoke to do entry into a static method.
// TODO: migrate into an assembly implementation as with ARM64.
-extern "C" void art_quick_invoke_static_stub(mirror::ArtMethod* method, uint32_t* args,
+extern "C" void art_quick_invoke_static_stub(ArtMethod* method, uint32_t* args,
uint32_t args_size, Thread* self, JValue* result,
const char* shorty) {
quick_invoke_reg_setup<true>(method, args, args_size, self, result, shorty);
diff --git a/runtime/arch/arm64/context_arm64.cc b/runtime/arch/arm64/context_arm64.cc
index 5488f9d..60becc6 100644
--- a/runtime/arch/arm64/context_arm64.cc
+++ b/runtime/arch/arm64/context_arm64.cc
@@ -18,8 +18,8 @@
#include "context_arm64.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -38,7 +38,7 @@
}
void Arm64Context::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/arm64/fault_handler_arm64.cc b/runtime/arch/arm64/fault_handler_arm64.cc
index 0448c76..3e9ad0d 100644
--- a/runtime/arch/arm64/fault_handler_arm64.cc
+++ b/runtime/arch/arm64/fault_handler_arm64.cc
@@ -16,14 +16,15 @@
#include "fault_handler.h"
+
#include <sys/ucontext.h>
+
+#include "art_method-inl.h"
#include "base/macros.h"
#include "globals.h"
#include "base/logging.h"
#include "base/hex_dump.h"
#include "registers_arm64.h"
-#include "mirror/art_method.h"
-#include "mirror/art_method-inl.h"
#include "thread.h"
#include "thread-inl.h"
@@ -53,7 +54,7 @@
}
void FaultManager::GetMethodAndReturnPcAndSp(siginfo_t* siginfo ATTRIBUTE_UNUSED, void* context,
- mirror::ArtMethod** out_method,
+ ArtMethod** out_method,
uintptr_t* out_return_pc, uintptr_t* out_sp) {
struct ucontext *uc = reinterpret_cast<struct ucontext *>(context);
struct sigcontext *sc = reinterpret_cast<struct sigcontext*>(&uc->uc_mcontext);
@@ -69,10 +70,10 @@
uintptr_t* overflow_addr = reinterpret_cast<uintptr_t*>(
reinterpret_cast<uint8_t*>(*out_sp) - GetStackOverflowReservedBytes(kArm64));
if (overflow_addr == fault_addr) {
- *out_method = reinterpret_cast<mirror::ArtMethod*>(sc->regs[0]);
+ *out_method = reinterpret_cast<ArtMethod*>(sc->regs[0]);
} else {
// The method is at the top of the stack.
- *out_method = (reinterpret_cast<StackReference<mirror::ArtMethod>* >(*out_sp)[0]).AsMirrorPtr();
+ *out_method = *reinterpret_cast<ArtMethod**>(*out_sp);
}
// Work out the return PC. This will be the address of the instruction
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 7eb6c16..790158e 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -34,7 +34,7 @@
THIS_LOAD_REQUIRES_READ_BARRIER
// Loads appropriate callee-save-method.
- ldr wIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
+ ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
sub sp, sp, #176
.cfi_adjust_cfa_offset 176
@@ -98,7 +98,7 @@
THIS_LOAD_REQUIRES_READ_BARRIER
// Loads appropriate callee-save-method.
- ldr wIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
+ ldr xIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
sub sp, sp, #96
.cfi_adjust_cfa_offset 96
@@ -252,7 +252,7 @@
// xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
THIS_LOAD_REQUIRES_READ_BARRIER
- ldr wIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
+ ldr xIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
@@ -481,14 +481,14 @@
.macro INVOKE_STUB_CREATE_FRAME
SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
-SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE
+SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
mov x9, sp // Save stack pointer.
.cfi_register sp,x9
add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
- sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
+ sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
mov sp, x10 // Set new SP.
@@ -538,7 +538,7 @@
// W2 - args length
// X9 - destination address.
// W10 - temporary
- add x9, sp, #4 // Destination address is bottom of stack + null.
+ add x9, sp, #8 // Destination address is bottom of stack + null.
// Use \@ to differentiate between macro invocations.
.LcopyParams\@:
@@ -552,18 +552,14 @@
.LendCopyParams\@:
- // Store null into StackReference<Method>* at bottom of frame.
- str wzr, [sp]
-
-#if (STACK_REFERENCE_SIZE != 4)
-#error "STACK_REFERENCE_SIZE(ARM64) size not as expected."
-#endif
+ // Store null into ArtMethod* at bottom of frame.
+ str xzr, [sp]
.endm
.macro INVOKE_STUB_CALL_AND_RETURN
// load method-> METHOD_QUICK_CODE_OFFSET
- ldr x9, [x0 , #MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64]
+ ldr x9, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
// Branch to method.
blr x9
@@ -654,7 +650,7 @@
* | uint32_t out[n-1] |
* | : : | Outs
* | uint32_t out[0] |
- * | StackRef<ArtMethod> | <- SP value=null
+ * | ArtMethod* | <- SP value=null
* +----------------------+
*
* Outgoing registers:
@@ -1273,7 +1269,7 @@
.extern \entrypoint
ENTRY \name
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
- ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
+ ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
mov x2, xSELF // pass Thread::Current
bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
@@ -1285,7 +1281,7 @@
.extern \entrypoint
ENTRY \name
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
- ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
+ ldr x2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
mov x3, xSELF // pass Thread::Current
bl \entrypoint
RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
@@ -1297,7 +1293,7 @@
.extern \entrypoint
ENTRY \name
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
- ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
+ ldr x3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
mov x4, xSELF // pass Thread::Current
bl \entrypoint
RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
@@ -1360,7 +1356,7 @@
ENTRY art_quick_set64_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
mov x3, x1 // Store value
- ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
+ ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
mov x2, x3 // Put value param
mov x3, xSELF // pass Thread::Current
bl artSet64StaticFromCode
@@ -1437,7 +1433,7 @@
bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
cbz x0, 1f
mov xIP0, x0 // Remember returned code pointer in xIP0.
- ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
+ ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
br xIP0
1:
diff --git a/runtime/arch/mips/context_mips.cc b/runtime/arch/mips/context_mips.cc
index 24892e9..53f2b65 100644
--- a/runtime/arch/mips/context_mips.cc
+++ b/runtime/arch/mips/context_mips.cc
@@ -16,8 +16,8 @@
#include "context_mips.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -36,7 +36,7 @@
}
void MipsContext::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/mips/fault_handler_mips.cc b/runtime/arch/mips/fault_handler_mips.cc
index c9949d4..abe495b 100644
--- a/runtime/arch/mips/fault_handler_mips.cc
+++ b/runtime/arch/mips/fault_handler_mips.cc
@@ -35,7 +35,7 @@
void FaultManager::GetMethodAndReturnPcAndSp(siginfo_t* siginfo ATTRIBUTE_UNUSED,
void* context ATTRIBUTE_UNUSED,
- mirror::ArtMethod** out_method ATTRIBUTE_UNUSED,
+ ArtMethod** out_method ATTRIBUTE_UNUSED,
uintptr_t* out_return_pc ATTRIBUTE_UNUSED,
uintptr_t* out_sp ATTRIBUTE_UNUSED) {
}
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index c00d6cb..9e1dab6 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -523,7 +523,7 @@
lw $a1, 4($sp) # copy arg value for a1
lw $a2, 8($sp) # copy arg value for a2
lw $a3, 12($sp) # copy arg value for a3
- lw $t9, MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32($a0) # get pointer to the code
+ lw $t9, ART_METHOD_QUICK_CODE_OFFSET_32($a0) # get pointer to the code
jalr $t9 # call the method
sw $zero, 0($sp) # store null for method* at bottom of frame
move $sp, $fp # restore the stack
diff --git a/runtime/arch/mips64/context_mips64.cc b/runtime/arch/mips64/context_mips64.cc
index 8ce6cf0..6637c37 100644
--- a/runtime/arch/mips64/context_mips64.cc
+++ b/runtime/arch/mips64/context_mips64.cc
@@ -16,8 +16,8 @@
#include "context_mips64.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -36,7 +36,7 @@
}
void Mips64Context::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/mips64/fault_handler_mips64.cc b/runtime/arch/mips64/fault_handler_mips64.cc
index 7b5cd49..277c2b2 100644
--- a/runtime/arch/mips64/fault_handler_mips64.cc
+++ b/runtime/arch/mips64/fault_handler_mips64.cc
@@ -35,7 +35,7 @@
void FaultManager::GetMethodAndReturnPcAndSp(siginfo_t* siginfo ATTRIBUTE_UNUSED,
void* context ATTRIBUTE_UNUSED,
- mirror::ArtMethod** out_method ATTRIBUTE_UNUSED,
+ ArtMethod** out_method ATTRIBUTE_UNUSED,
uintptr_t* out_return_pc ATTRIBUTE_UNUSED,
uintptr_t* out_sp ATTRIBUTE_UNUSED) {
}
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index 031f85f..c62e035 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -90,8 +90,8 @@
ld $v0, %got(_ZN3art7Runtime9instance_E)($gp)
ld $v0, 0($v0)
THIS_LOAD_REQUIRES_READ_BARRIER
- lwu $v0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET($v0)
- sw $v0, 0($sp) # Place Method* at bottom of stack.
+ ld $v0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET($v0)
+ sd $v0, 0($sp) # Place ArtMethod* at bottom of stack.
sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
.endm
@@ -133,8 +133,8 @@
ld $v0, %got(_ZN3art7Runtime9instance_E)($gp)
ld $v0, 0($v0)
THIS_LOAD_REQUIRES_READ_BARRIER
- lwu $v0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET($v0)
- sw $v0, 0($sp) # Place Method* at bottom of stack.
+ ld $v0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET($v0)
+ sd $v0, 0($sp) # Place Method* at bottom of stack.
sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
.endm
@@ -256,14 +256,14 @@
ld $v0, %got(_ZN3art7Runtime9instance_E)($gp)
ld $v0, 0($v0)
THIS_LOAD_REQUIRES_READ_BARRIER
- lwu $v0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET($v0)
- sw $v0, 0($sp) # Place Method* at bottom of stack.
+ ld $v0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET($v0)
+ sd $v0, 0($sp) # Place Method* at bottom of stack.
sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
.endm
.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_A0
SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
- sw $a0, 0($sp) # Place Method* at bottom of stack.
+ sd $a0, 0($sp) # Place Method* at bottom of stack.
sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
.endm
@@ -641,7 +641,7 @@
move $s1, $a3 # move managed thread pointer into s1 (rSELF)
move $s8, $sp # save sp in s8 (fp)
- daddiu $t3, $a2, 20 # add 4 for method* and 16 for stack alignment
+ daddiu $t3, $a2, 24 # add 8 for ArtMethod* and 16 for stack alignment
dsrl $t3, $t3, 4 # shift the frame size right 4
dsll $t3, $t3, 4 # shift the frame size left 4 to align to 16 bytes
dsubu $sp, $sp, $t3 # reserve stack space for argument array
@@ -670,9 +670,9 @@
call_fn:
# call method (a0 and a1 have been untouched)
lwu $a1, 0($a1) # make a1 = this ptr
- sw $a1, 4($sp) # copy this ptr (skip 4 bytes for method*)
- sw $zero, 0($sp) # store null for method* at bottom of frame
- ld $t9, MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code
+ sw $a1, 8($sp) # copy this ptr (skip 8 bytes for ArtMethod*)
+ sd $zero, 0($sp) # store null for ArtMethod* at bottom of frame
+ ld $t9, ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code
jalr $t9 # call the method
nop
move $sp, $s8 # restore sp
@@ -744,7 +744,7 @@
move $s1, $a3 # move managed thread pointer into s1 (rSELF)
move $s8, $sp # save sp in s8 (fp)
- daddiu $t3, $a2, 20 # add 4 for method* and 16 for stack alignment
+ daddiu $t3, $a2, 24 # add 8 for ArtMethod* and 16 for stack alignment
dsrl $t3, $t3, 4 # shift the frame size right 4
dsll $t3, $t3, 4 # shift the frame size left 4 to align to 16 bytes
dsubu $sp, $sp, $t3 # reserve stack space for argument array
@@ -752,7 +752,7 @@
daddiu $t0, $a5, 1 # t0 = shorty[1] (skip 1 for return type)
move $t1, $a1 # t1 = arg_array
move $t2, $a2 # t2 = number of argument bytes remain
- daddiu $v0, $sp, 4 # v0 points to where to copy arg_array
+ daddiu $v0, $sp, 8 # v0 points to where to copy arg_array
LOOP_OVER_SHORTY_LOADING_REG a1, f13, call_sfn
LOOP_OVER_SHORTY_LOADING_REG a2, f14, call_sfn
LOOP_OVER_SHORTY_LOADING_REG a3, f15, call_sfn
@@ -773,8 +773,8 @@
call_sfn:
# call method (a0 has been untouched)
- sw $zero, 0($sp) # store null for method* at bottom of frame
- ld $t9, MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code
+ sd $zero, 0($sp) # store null for ArtMethod* at bottom of frame
+ ld $t9, ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code
jalr $t9 # call the method
nop
move $sp, $s8 # restore sp
@@ -821,7 +821,7 @@
.extern artHandleFillArrayDataFromCode
ENTRY art_quick_handle_fill_data
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case exception allocation triggers GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artHandleFillArrayDataFromCode # (payload offset, Array*, method, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -969,7 +969,7 @@
.extern artGetBooleanStaticFromCode
ENTRY art_quick_get_boolean_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetBooleanStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -981,7 +981,7 @@
.extern artGetByteStaticFromCode
ENTRY art_quick_get_byte_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetByteStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -993,7 +993,7 @@
.extern artGetCharStaticFromCode
ENTRY art_quick_get_char_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetCharStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1005,7 +1005,7 @@
.extern artGetShortStaticFromCode
ENTRY art_quick_get_short_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetShortStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1017,7 +1017,7 @@
.extern artGet32StaticFromCode
ENTRY art_quick_get32_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGet32StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1029,7 +1029,7 @@
.extern artGet64StaticFromCode
ENTRY art_quick_get64_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGet64StaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1041,7 +1041,7 @@
.extern artGetObjStaticFromCode
ENTRY art_quick_get_obj_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetObjStaticFromCode # (uint32_t field_idx, const Method* referrer, Thread*)
move $a2, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1053,7 +1053,7 @@
.extern artGetBooleanInstanceFromCode
ENTRY art_quick_get_boolean_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetBooleanInstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1065,7 +1065,7 @@
.extern artGetByteInstanceFromCode
ENTRY art_quick_get_byte_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetByteInstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1077,7 +1077,7 @@
.extern artGetCharInstanceFromCode
ENTRY art_quick_get_char_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetCharInstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1089,7 +1089,7 @@
.extern artGetShortInstanceFromCode
ENTRY art_quick_get_short_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetShortInstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1101,7 +1101,7 @@
.extern artGet32InstanceFromCode
ENTRY art_quick_get32_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGet32InstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1113,7 +1113,7 @@
.extern artGet64InstanceFromCode
ENTRY art_quick_get64_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGet64InstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1125,7 +1125,7 @@
.extern artGetObjInstanceFromCode
ENTRY art_quick_get_obj_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artGetObjInstanceFromCode # (field_idx, Object*, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_NO_EXCEPTION
@@ -1137,7 +1137,7 @@
.extern artSet8StaticFromCode
ENTRY art_quick_set8_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet8StaticFromCode # (field_idx, new_val, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1149,7 +1149,7 @@
.extern artSet16StaticFromCode
ENTRY art_quick_set16_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet16StaticFromCode # (field_idx, new_val, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1161,7 +1161,7 @@
.extern artSet32StaticFromCode
ENTRY art_quick_set32_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet32StaticFromCode # (field_idx, new_val, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1174,7 +1174,7 @@
ENTRY art_quick_set64_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
move $a2, $a1 # pass new_val
- lwu $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a1, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet64StaticFromCode # (field_idx, referrer, new_val, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1186,7 +1186,7 @@
.extern artSetObjStaticFromCode
ENTRY art_quick_set_obj_static
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a2, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSetObjStaticFromCode # (field_idx, new_val, referrer, Thread*)
move $a3, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1198,7 +1198,7 @@
.extern artSet8InstanceFromCode
ENTRY art_quick_set8_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet8InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*)
move $a4, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1210,7 +1210,7 @@
.extern artSet16InstanceFromCode
ENTRY art_quick_set16_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet16InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*)
move $a4, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1222,7 +1222,7 @@
.extern artSet32InstanceFromCode
ENTRY art_quick_set32_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet32InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*)
move $a4, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1234,7 +1234,7 @@
.extern artSet64InstanceFromCode
ENTRY art_quick_set64_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSet64InstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*)
move $a4, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1246,7 +1246,7 @@
.extern artSetObjInstanceFromCode
ENTRY art_quick_set_obj_instance
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME # save callee saves in case of GC
- lwu $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
+ ld $a3, FRAME_SIZE_REFS_ONLY_CALLEE_SAVE($sp) # pass referrer's Method*
jal artSetObjInstanceFromCode # (field_idx, Object*, new_val, referrer, Thread*)
move $a4, rSELF # pass Thread::Current
RETURN_IF_ZERO
@@ -1378,7 +1378,7 @@
jal artQuickResolutionTrampoline # (Method* called, receiver, Thread*, SP)
move $a3, $sp # pass $sp
beq $v0, $zero, 1f
- lwu $a0, 0($sp) # load resolved method in $a0
+ ld $a0, 0($sp) # load resolved method in $a0
# artQuickResolutionTrampoline puts resolved method in *SP
RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
move $t9, $v0 # code pointer must be in $t9 to generate the global pointer
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index 23b7cfa..62a6962 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -17,10 +17,10 @@
#include <cstdio>
#include "art_field-inl.h"
+#include "art_method-inl.h"
#include "class_linker-inl.h"
#include "common_runtime_test.h"
#include "entrypoints/quick/quick_entrypoints_enum.h"
-#include "mirror/art_method-inl.h"
#include "mirror/class-inl.h"
#include "mirror/string-inl.h"
#include "scoped_thread_state_change.h"
@@ -70,7 +70,7 @@
// TODO: Set up a frame according to referrer's specs.
size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
- mirror::ArtMethod* referrer) {
+ ArtMethod* referrer) {
// Push a transition back into managed code onto the linked list in thread.
ManagedStack fragment;
self->PushManagedStackFragment(&fragment);
@@ -420,7 +420,7 @@
// TODO: Set up a frame according to referrer's specs.
size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
- Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
+ Thread* self, ArtMethod* referrer, size_t hidden) {
// Push a transition back into managed code onto the linked list in thread.
ManagedStack fragment;
self->PushManagedStackFragment(&fragment);
@@ -776,7 +776,7 @@
// Method with 32b arg0, 64b arg1
size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
- mirror::ArtMethod* referrer) {
+ ArtMethod* referrer) {
#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
defined(__aarch64__)
// Just pass through.
@@ -1282,7 +1282,8 @@
{
// Use an arbitrary method from c to use as referrer
size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
- reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
+ // arbitrary
+ reinterpret_cast<size_t>(c->GetVirtualMethod(0, sizeof(void*))),
0U,
StubTest::GetEntrypoint(self, kQuickAllocObject),
self);
@@ -1297,7 +1298,7 @@
{
// We can use null in the second argument as we do not need a method here (not used in
// resolved/initialized cases)
- size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
+ size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
self);
@@ -1311,7 +1312,7 @@
{
// We can use null in the second argument as we do not need a method here (not used in
// resolved/initialized cases)
- size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
+ size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
self);
@@ -1367,7 +1368,7 @@
}
self->ClearException();
- size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
+ size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), 0u, 0U,
StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
self);
EXPECT_TRUE(self->IsExceptionPending());
@@ -1417,7 +1418,8 @@
// Use an arbitrary method from c to use as referrer
size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
10U,
- reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
+ // arbitrary
+ reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0, sizeof(void*))),
StubTest::GetEntrypoint(self, kQuickAllocArray),
self);
@@ -1554,7 +1556,7 @@
static void GetSetBooleanStatic(ArtField* f, Thread* self,
- mirror::ArtMethod* referrer, StubTest* test)
+ ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1584,7 +1586,7 @@
std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
#endif
}
-static void GetSetByteStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
+static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
@@ -1616,7 +1618,7 @@
static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
- mirror::ArtMethod* referrer, StubTest* test)
+ ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1651,7 +1653,7 @@
#endif
}
static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1685,7 +1687,7 @@
#endif
}
-static void GetSetCharStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
+static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
@@ -1716,7 +1718,7 @@
#endif
}
static void GetSetShortStatic(ArtField* f, Thread* self,
- mirror::ArtMethod* referrer, StubTest* test)
+ ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1747,7 +1749,7 @@
}
static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1781,7 +1783,7 @@
#endif
}
static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1815,7 +1817,7 @@
#endif
}
-static void GetSet32Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
+static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
@@ -1852,7 +1854,7 @@
static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1893,7 +1895,7 @@
(defined(__x86_64__) && !defined(__APPLE__))
static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
- mirror::ArtMethod* referrer, StubTest* test)
+ ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
reinterpret_cast<size_t>(val),
@@ -1912,7 +1914,7 @@
}
#endif
-static void GetSetObjStatic(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
+static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
@@ -1936,7 +1938,7 @@
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
static void set_and_check_instance(ArtField* f, mirror::Object* trg,
- mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
+ mirror::Object* val, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
test->Invoke3WithReferrer(static_cast<size_t>(f->GetDexFieldIndex()),
@@ -1960,7 +1962,7 @@
#endif
static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
@@ -1982,7 +1984,7 @@
// TODO: Complete these tests for 32b architectures.
-static void GetSet64Static(ArtField* f, Thread* self, mirror::ArtMethod* referrer,
+static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
@@ -2014,7 +2016,7 @@
static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
- Thread* self, mirror::ArtMethod* referrer, StubTest* test)
+ Thread* self, ArtMethod* referrer, StubTest* test)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
defined(__aarch64__)
@@ -2060,11 +2062,11 @@
CHECK(o != nullptr);
ScopedObjectAccess soa(self);
- StackHandleScope<4> hs(self);
+ StackHandleScope<3> hs(self);
Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
// Need a method as a referrer
- Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
+ ArtMethod* m = c->GetDirectMethod(0, sizeof(void*));
// Play with it...
@@ -2079,27 +2081,27 @@
}
switch (type) {
case Primitive::Type::kPrimBoolean:
- GetSetBooleanStatic(f, self, m.Get(), test);
+ GetSetBooleanStatic(f, self, m, test);
break;
case Primitive::Type::kPrimByte:
- GetSetByteStatic(f, self, m.Get(), test);
+ GetSetByteStatic(f, self, m, test);
break;
case Primitive::Type::kPrimChar:
- GetSetCharStatic(f, self, m.Get(), test);
+ GetSetCharStatic(f, self, m, test);
break;
case Primitive::Type::kPrimShort:
- GetSetShortStatic(f, self, m.Get(), test);
+ GetSetShortStatic(f, self, m, test);
break;
case Primitive::Type::kPrimInt:
- GetSet32Static(f, self, m.Get(), test);
+ GetSet32Static(f, self, m, test);
break;
case Primitive::Type::kPrimLong:
- GetSet64Static(f, self, m.Get(), test);
+ GetSet64Static(f, self, m, test);
break;
case Primitive::Type::kPrimNot:
// Don't try array.
if (f->GetTypeDescriptor()[0] != '[') {
- GetSetObjStatic(f, self, m.Get(), test);
+ GetSetObjStatic(f, self, m, test);
}
break;
default:
@@ -2118,27 +2120,27 @@
}
switch (type) {
case Primitive::Type::kPrimBoolean:
- GetSetBooleanInstance(&obj, f, self, m.Get(), test);
+ GetSetBooleanInstance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimByte:
- GetSetByteInstance(&obj, f, self, m.Get(), test);
+ GetSetByteInstance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimChar:
- GetSetCharInstance(&obj, f, self, m.Get(), test);
+ GetSetCharInstance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimShort:
- GetSetShortInstance(&obj, f, self, m.Get(), test);
+ GetSetShortInstance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimInt:
- GetSet32Instance(&obj, f, self, m.Get(), test);
+ GetSet32Instance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimLong:
- GetSet64Instance(&obj, f, self, m.Get(), test);
+ GetSet64Instance(&obj, f, self, m, test);
break;
case Primitive::Type::kPrimNot:
// Don't try array.
if (f->GetTypeDescriptor()[0] != '[') {
- GetSetObjInstance(&obj, f, self, m.Get(), test);
+ GetSetObjInstance(&obj, f, self, m, test);
}
break;
default:
@@ -2235,17 +2237,18 @@
ASSERT_NE(nullptr, arraylist_jclass);
jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
ASSERT_NE(nullptr, arraylist_constructor);
- jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
+ jmethodID contains_jmethod = env->GetMethodID(
+ arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
ASSERT_NE(nullptr, contains_jmethod);
jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
ASSERT_NE(nullptr, add_jmethod);
- // Get mirror representation.
- Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
+ // Get representation.
+ ArtMethod* contains_amethod = soa.DecodeMethod(contains_jmethod);
// Patch up ArrayList.contains.
- if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
- contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
+ if (contains_amethod->GetEntryPointFromQuickCompiledCode() == nullptr) {
+ contains_amethod->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
}
@@ -2254,11 +2257,12 @@
// Load List and used methods (JNI).
jclass list_jclass = env->FindClass("java/util/List");
ASSERT_NE(nullptr, list_jclass);
- jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
+ jmethodID inf_contains_jmethod = env->GetMethodID(
+ list_jclass, "contains", "(Ljava/lang/Object;)Z");
ASSERT_NE(nullptr, inf_contains_jmethod);
// Get mirror representation.
- Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
+ ArtMethod* inf_contains = soa.DecodeMethod(inf_contains_jmethod);
// Object
@@ -2287,8 +2291,8 @@
Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
reinterpret_cast<size_t>(obj.Get()),
StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
- self, contains_amethod.Get(),
- static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
+ self, contains_amethod,
+ static_cast<size_t>(inf_contains->GetDexMethodIndex()));
ASSERT_FALSE(self->IsExceptionPending());
EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
@@ -2301,33 +2305,31 @@
// Contains.
- result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
- reinterpret_cast<size_t>(obj.Get()),
- StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
- self, contains_amethod.Get(),
- static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
+ result = Invoke3WithReferrerAndHidden(
+ 0U, reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(obj.Get()),
+ StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline), self, contains_amethod,
+ static_cast<size_t>(inf_contains->GetDexMethodIndex()));
ASSERT_FALSE(self->IsExceptionPending());
EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
// 2. regular interface trampoline
- result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
+ result = Invoke3WithReferrer(static_cast<size_t>(inf_contains->GetDexMethodIndex()),
reinterpret_cast<size_t>(array_list.Get()),
reinterpret_cast<size_t>(obj.Get()),
StubTest::GetEntrypoint(self,
kQuickInvokeInterfaceTrampolineWithAccessCheck),
- self, contains_amethod.Get());
+ self, contains_amethod);
ASSERT_FALSE(self->IsExceptionPending());
EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
- result = Invoke3WithReferrer(static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()),
- reinterpret_cast<size_t>(array_list.Get()),
- reinterpret_cast<size_t>(array_list.Get()),
- StubTest::GetEntrypoint(self,
- kQuickInvokeInterfaceTrampolineWithAccessCheck),
- self, contains_amethod.Get());
+ result = Invoke3WithReferrer(
+ static_cast<size_t>(inf_contains->GetDexMethodIndex()),
+ reinterpret_cast<size_t>(array_list.Get()), reinterpret_cast<size_t>(array_list.Get()),
+ StubTest::GetEntrypoint(self, kQuickInvokeInterfaceTrampolineWithAccessCheck), self,
+ contains_amethod);
ASSERT_FALSE(self->IsExceptionPending());
EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
diff --git a/runtime/arch/x86/context_x86.cc b/runtime/arch/x86/context_x86.cc
index 06bae75..7096c82 100644
--- a/runtime/arch/x86/context_x86.cc
+++ b/runtime/arch/x86/context_x86.cc
@@ -16,8 +16,8 @@
#include "context_x86.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -35,7 +35,7 @@
}
void X86Context::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/x86/fault_handler_x86.cc b/runtime/arch/x86/fault_handler_x86.cc
index 2de69aa..d7c4cb1 100644
--- a/runtime/arch/x86/fault_handler_x86.cc
+++ b/runtime/arch/x86/fault_handler_x86.cc
@@ -16,13 +16,14 @@
#include "fault_handler.h"
+
#include <sys/ucontext.h>
+
+#include "art_method-inl.h"
#include "base/macros.h"
#include "globals.h"
#include "base/logging.h"
#include "base/hex_dump.h"
-#include "mirror/art_method.h"
-#include "mirror/art_method-inl.h"
#include "thread.h"
#include "thread-inl.h"
@@ -248,7 +249,7 @@
}
void FaultManager::GetMethodAndReturnPcAndSp(siginfo_t* siginfo, void* context,
- mirror::ArtMethod** out_method,
+ ArtMethod** out_method,
uintptr_t* out_return_pc, uintptr_t* out_sp) {
struct ucontext* uc = reinterpret_cast<struct ucontext*>(context);
*out_sp = static_cast<uintptr_t>(uc->CTX_ESP);
@@ -267,10 +268,10 @@
reinterpret_cast<uint8_t*>(*out_sp) - GetStackOverflowReservedBytes(kX86));
#endif
if (overflow_addr == fault_addr) {
- *out_method = reinterpret_cast<mirror::ArtMethod*>(uc->CTX_METHOD);
+ *out_method = reinterpret_cast<ArtMethod*>(uc->CTX_METHOD);
} else {
// The method is at the top of the stack.
- *out_method = (reinterpret_cast<StackReference<mirror::ArtMethod>* >(*out_sp)[0]).AsMirrorPtr();
+ *out_method = *reinterpret_cast<ArtMethod**>(*out_sp);
}
uint8_t* pc = reinterpret_cast<uint8_t*>(uc->CTX_EIP);
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 8207360..e0397cc 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -472,7 +472,7 @@
// Nothing left to load.
.Lgpr_setup_finished:
mov 20(%ebp), %eax // move method pointer into eax
- call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
+ call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
mov %ebp, %esp // restore stack pointer
CFI_DEF_CFA_REGISTER(esp)
POP edi // pop edi
@@ -589,7 +589,7 @@
// Nothing left to load.
.Lgpr_setup_finished2:
mov 20(%ebp), %eax // move method pointer into eax
- call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
+ call *ART_METHOD_QUICK_CODE_OFFSET_32(%eax) // call the method
mov %ebp, %esp // restore stack pointer
CFI_DEF_CFA_REGISTER(esp)
POP edi // pop edi
diff --git a/runtime/arch/x86_64/context_x86_64.cc b/runtime/arch/x86_64/context_x86_64.cc
index 2c4532c..1fe2ef8 100644
--- a/runtime/arch/x86_64/context_x86_64.cc
+++ b/runtime/arch/x86_64/context_x86_64.cc
@@ -16,8 +16,8 @@
#include "context_x86_64.h"
+#include "art_method-inl.h"
#include "base/bit_utils.h"
-#include "mirror/art_method-inl.h"
#include "quick/quick_method_frame_info.h"
namespace art {
@@ -35,7 +35,7 @@
}
void X86_64Context::FillCalleeSaves(const StackVisitor& fr) {
- mirror::ArtMethod* method = fr.GetMethod();
+ ArtMethod* method = fr.GetMethod();
const QuickMethodFrameInfo frame_info = method->GetQuickFrameInfo();
int spill_pos = 0;
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 7bb18a4..48f59f3 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -57,7 +57,7 @@
PUSH r12 // Callee save.
PUSH rbp // Callee save.
PUSH rbx // Callee save.
- // Create space for FPR args, plus space for StackReference<ArtMethod>.
+ // Create space for FPR args, plus space for ArtMethod*.
subq MACRO_LITERAL(4 * 8 + 8), %rsp
CFI_ADJUST_CFA_OFFSET(4 * 8 + 8)
// Save FPRs.
@@ -67,7 +67,7 @@
movq %xmm15, 32(%rsp)
// R10 := ArtMethod* for save all callee save frame method.
THIS_LOAD_REQUIRES_READ_BARRIER
- movl RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10d
+ movq RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
// Store ArtMethod* to bottom of stack.
movq %r10, 0(%rsp)
// Store rsp as the top quick frame.
@@ -100,7 +100,7 @@
PUSH r12 // Callee save.
PUSH rbp // Callee save.
PUSH rbx // Callee save.
- // Create space for FPR args, plus space for StackReference<ArtMethod>.
+ // Create space for FPR args, plus space for ArtMethod*.
subq LITERAL(8 + 4 * 8), %rsp
CFI_ADJUST_CFA_OFFSET(8 + 4 * 8)
// Save FPRs.
@@ -110,7 +110,7 @@
movq %xmm15, 32(%rsp)
// R10 := ArtMethod* for refs only callee save frame method.
THIS_LOAD_REQUIRES_READ_BARRIER
- movl RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10d
+ movq RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
// Store ArtMethod* to bottom of stack.
movq %r10, 0(%rsp)
// Store rsp as the stop quick frame.
@@ -164,13 +164,12 @@
PUSH rbx // Callee save.
PUSH rdx // Quick arg 2.
PUSH rcx // Quick arg 3.
- // Create space for FPR args and create 2 slots, 1 of padding and 1 for the
- // StackReference<ArtMethod>.
+ // Create space for FPR args and create 2 slots for ArtMethod*.
subq MACRO_LITERAL(80 + 4 * 8), %rsp
CFI_ADJUST_CFA_OFFSET(80 + 4 * 8)
// R10 := ArtMethod* for ref and args callee save frame method.
THIS_LOAD_REQUIRES_READ_BARRIER
- movl RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10d
+ movq RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
// Save FPRs.
movq %xmm0, 16(%rsp)
movq %xmm1, 24(%rsp)
@@ -210,8 +209,7 @@
PUSH rbx // Callee save.
PUSH rdx // Quick arg 2.
PUSH rcx // Quick arg 3.
- // Create space for FPR args and create 2 slots, 1 of padding and 1 for the
- // StackReference<ArtMethod>.
+ // Create space for FPR args and create 2 slots for ArtMethod*.
subq LITERAL(80 + 4 * 8), %rsp
CFI_ADJUST_CFA_OFFSET(80 + 4 * 8)
// Save FPRs.
@@ -504,13 +502,13 @@
#if (STACK_REFERENCE_SIZE != 4)
#error "STACK_REFERENCE_SIZE(X86_64) size not as expected."
#endif
- movl LITERAL(0), (%rsp) // Store null for method*
+ movq LITERAL(0), (%rsp) // Store null for method*
movl %r10d, %ecx // Place size of args in rcx.
movq %rdi, %rax // rax := method to be called
movq %rsi, %r11 // r11 := arg_array
- leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the
- // stack arguments.
+ leaq 8(%rsp), %rdi // rdi is pointing just above the ArtMethod* in the stack
+ // arguments.
// Copy arg array into stack.
rep movsb // while (rcx--) { *rdi++ = *rsi++ }
leaq 1(%r9), %r10 // r10 := shorty + 1 ; ie skip return arg character
@@ -522,7 +520,7 @@
LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished
LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished
.Lgpr_setup_finished:
- call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
+ call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
movq %rbp, %rsp // Restore stack pointer.
POP r15 // Pop r15
POP r14 // Pop r14
@@ -598,12 +596,12 @@
#if (STACK_REFERENCE_SIZE != 4)
#error "STACK_REFERENCE_SIZE(X86_64) size not as expected."
#endif
- movl LITERAL(0), (%rsp) // Store null for method*
+ movq LITERAL(0), (%rsp) // Store null for method*
movl %r10d, %ecx // Place size of args in rcx.
movq %rdi, %rax // rax := method to be called
movq %rsi, %r11 // r11 := arg_array
- leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the
+ leaq 8(%rsp), %rdi // rdi is pointing just above the ArtMethod* in the
// stack arguments.
// Copy arg array into stack.
rep movsb // while (rcx--) { *rdi++ = *rsi++ }
@@ -615,7 +613,7 @@
LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished2
LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished2
.Lgpr_setup_finished2:
- call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
+ call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
movq %rbp, %rsp // Restore stack pointer.
POP r15 // Pop r15
POP r14 // Pop r14
@@ -749,7 +747,7 @@
MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
DEFINE_FUNCTION VAR(c_name, 0)
- movl 8(%rsp), %esi // pass referrer
+ movq 8(%rsp), %rsi // pass referrer
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
// arg0 is in rdi
movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
@@ -761,7 +759,7 @@
MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
DEFINE_FUNCTION VAR(c_name, 0)
- movl 8(%rsp), %edx // pass referrer
+ movq 8(%rsp), %rdx // pass referrer
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
// arg0 and arg1 are in rdi/rsi
movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
@@ -773,7 +771,7 @@
MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
DEFINE_FUNCTION VAR(c_name, 0)
- movl 8(%rsp), %ecx // pass referrer
+ movq 8(%rsp), %rcx // pass referrer
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
// arg0, arg1, and arg2 are in rdi/rsi/rdx
movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current()
@@ -920,7 +918,7 @@
// Fast path tlab allocation.
// RDI: uint32_t type_idx, RSI: ArtMethod*
// RDX, RCX, R8, R9: free. RAX: return val.
- movl MIRROR_ART_METHOD_DEX_CACHE_TYPES_OFFSET(%rsi), %edx // Load dex cache resolved types array
+ movl ART_METHOD_DEX_CACHE_TYPES_OFFSET(%rsi), %edx // Load dex cache resolved types array
// Load the class
movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdx, %rdi, MIRROR_OBJECT_ARRAY_COMPONENT_SIZE), %edx
testl %edx, %edx // Check null class
@@ -1307,7 +1305,7 @@
// This is singled out as the argument order is different.
DEFINE_FUNCTION art_quick_set64_static
movq %rsi, %rdx // pass new_val
- movl 8(%rsp), %esi // pass referrer
+ movq 8(%rsp), %rsi // pass referrer
SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
// field_idx is in rdi
movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
@@ -1391,7 +1389,6 @@
* | XMM2 | float arg 3
* | XMM1 | float arg 2
* | XMM0 | float arg 1
- * | Padding |
* | RDI/Method* | <- sp
* #-------------------#
* | Scratch Alloca | 5K scratch space