Merge "Use macros to remove CFI information for MAC OS/X"
diff --git a/build/Android.common.mk b/build/Android.common.mk
index 026dccb..415d810 100644
--- a/build/Android.common.mk
+++ b/build/Android.common.mk
@@ -87,7 +87,9 @@
# Clang build.
# ART_TARGET_CLANG := true
-# ART_HOST_CLANG := true
+ifeq ($(HOST_OS),darwin)
+ART_HOST_CLANG := true
+endif
# directory used for dalvik-cache on device
ART_DALVIK_CACHE_DIR := /data/dalvik-cache
@@ -119,7 +121,7 @@
-Wall \
-Werror \
-Wextra \
- -Wstrict-aliasing=3 \
+ -Wstrict-aliasing \
-fstrict-aliasing
ifeq ($(ART_SMALL_MODE),true)
diff --git a/runtime/arch/x86/asm_support_x86.S b/runtime/arch/x86/asm_support_x86.S
index 3701b22..9ec1995 100644
--- a/runtime/arch/x86/asm_support_x86.S
+++ b/runtime/arch/x86/asm_support_x86.S
@@ -31,11 +31,26 @@
// are mangled with an extra underscore prefix. The use of $x for arguments
// mean that literals need to be represented with $$x in macros.
#define SYMBOL(name) _ ## name
+ #define PLT_SYMBOL(name) _ ## name
#define VAR(name,index) SYMBOL($index)
+ #define PLT_VAR(name, index) SYMBOL($index)
#define REG_VAR(name,index) %$index
#define CALL_MACRO(name,index) $index
#define LITERAL(value) $value
#define MACRO_LITERAL(value) $$value
+
+ // Mac OS' doesn't like cfi_* directives
+ #define CFI_STARTPROC
+ #define CFI_ENDPROC
+ #define CFI_ADJUST_CFA_OFFSET(size)
+ #define CFI_DEF_CFA(reg,size)
+ #define CFI_DEF_CFA_REGISTER(reg)
+ #define CFI_RESTORE(reg)
+ #define CFI_REL_OFFSET(reg,size)
+
+ // Mac OS' doesn't support certain directives
+ #define FUNCTION_TYPE(name)
+ #define SIZE(name)
#else
// Regular gas(1) lets you name macro parameters.
#define MACRO0(macro_name) .macro macro_name
@@ -51,11 +66,25 @@
// special character meaning care needs to be taken when passing registers as macro arguments.
.altmacro
#define SYMBOL(name) name
+ #define PLT_SYMBOL(name) name@PLT
#define VAR(name,index) name&
+ #define PLT_VAR(name, index) name&@PLT
#define REG_VAR(name,index) %name
#define CALL_MACRO(name,index) name&
#define LITERAL(value) $value
#define MACRO_LITERAL(value) $value
+
+ // CFI support
+ #define CFI_STARTPROC .cfi_startproc
+ #define CFI_ENDPROC .cfi_endproc
+ #define CFI_ADJUST_CFA_OFFSET(size) .cfi_adjust_cfa_offset size
+ #define CFI_DEF_CFA(reg,size) .cfi_def_cfa reg,size
+ #define CFI_DEF_CFA_REGISTER(reg) .cfi_def_cfa_register reg
+ #define CFI_RESTORE(reg) .cfi_restore reg
+ #define CFI_REL_OFFSET(reg,size) .cfi_rel_offset reg,size
+
+ #define FUNCTION_TYPE(name) .type name&, @function
+ #define SIZE(name) .size name, .-name
#endif
/* Cache alignment for function entry */
@@ -64,40 +93,40 @@
END_MACRO
MACRO1(DEFINE_FUNCTION, c_name)
- .type VAR(c_name, 0), @function
+ FUNCTION_TYPE(\c_name)
.globl VAR(c_name, 0)
ALIGN_FUNCTION_ENTRY
VAR(c_name, 0):
- .cfi_startproc
+ CFI_STARTPROC
END_MACRO
MACRO1(END_FUNCTION, c_name)
- .cfi_endproc
- .size \c_name, .-\c_name
+ CFI_ENDPROC
+ SIZE(\c_name)
END_MACRO
MACRO1(PUSH, reg)
pushl REG_VAR(reg, 0)
- .cfi_adjust_cfa_offset 4
- .cfi_rel_offset REG_VAR(reg, 0), 0
+ CFI_ADJUST_CFA_OFFSET(4)
+ CFI_REL_OFFSET(REG_VAR(reg, 0), 0)
END_MACRO
MACRO1(POP, reg)
popl REG_VAR(reg,0)
- .cfi_adjust_cfa_offset -4
- .cfi_restore REG_VAR(reg,0)
+ CFI_ADJUST_CFA_OFFSET(-4)
+ CFI_RESTORE(REG_VAR(reg,0))
END_MACRO
MACRO1(UNIMPLEMENTED,name)
- .type VAR(name, 0), @function
+ FUNCTION_TYPE(\name)
.globl VAR(name, 0)
ALIGN_FUNCTION_ENTRY
VAR(name, 0):
- .cfi_startproc
+ CFI_STARTPROC
int3
int3
- .cfi_endproc
- .size \name, .-\name
+ CFI_ENDPROC
+ SIZE(\name)
END_MACRO
MACRO0(SETUP_GOT_NOSAVE)
diff --git a/runtime/arch/x86/jni_entrypoints_x86.S b/runtime/arch/x86/jni_entrypoints_x86.S
index e394819..72047d5 100644
--- a/runtime/arch/x86/jni_entrypoints_x86.S
+++ b/runtime/arch/x86/jni_entrypoints_x86.S
@@ -21,14 +21,14 @@
*/
DEFINE_FUNCTION art_jni_dlsym_lookup_stub
subl LITERAL(4), %esp // align stack
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_GOT // pushes ebx
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
- call SYMBOL(artFindNativeMethod)@PLT // (Thread*)
+ CFI_ADJUST_CFA_OFFSET(4)
+ call PLT_SYMBOL(artFindNativeMethod) // (Thread*)
UNDO_SETUP_GOT
addl LITERAL(8), %esp // restore the stack
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
cmpl LITERAL(0), %eax // check if returned method code is null
je no_native_code_found // if null, jump to return to handle
jmp *%eax // otherwise, tail call to intended method
diff --git a/runtime/arch/x86/portable_entrypoints_x86.S b/runtime/arch/x86/portable_entrypoints_x86.S
index a1f6b2d..48de7c1 100644
--- a/runtime/arch/x86/portable_entrypoints_x86.S
+++ b/runtime/arch/x86/portable_entrypoints_x86.S
@@ -31,7 +31,7 @@
PUSH ebp // save ebp
PUSH ebx // save ebx
mov %esp, %ebp // copy value of stack pointer into base pointer
- .cfi_def_cfa_register ebp
+ CFI_DEF_CFA_REGISTER(ebp)
mov 20(%ebp), %ebx // get arg array size
addl LITERAL(28), %ebx // reserve space for return addr, method*, ebx, and ebp in frame
andl LITERAL(0xFFFFFFF0), %ebx // align frame size to 16 bytes
@@ -42,7 +42,7 @@
pushl 20(%ebp) // push size of region to memcpy
pushl 16(%ebp) // push arg array as source of memcpy
pushl %eax // push stack pointer as destination of memcpy
- call SYMBOL(memcpy)@PLT // (void*, const void*, size_t)
+ call PLT_SYMBOL(memcpy) // (void*, const void*, size_t)
addl LITERAL(12), %esp // pop arguments to memcpy
mov 12(%ebp), %eax // move method pointer into eax
mov %eax, (%esp) // push method pointer onto stack
@@ -69,7 +69,7 @@
DEFINE_FUNCTION art_portable_proxy_invoke_handler
PUSH ebp // Set up frame.
movl %esp, %ebp
- .cfi_def_cfa_register %ebp
+ CFI_DEF_CFA_REGISTER(%ebp)
subl LITERAL(4), %esp // Align stack
SETUP_GOT // pushes ebx
leal 8(%ebp), %edx // %edx = ArtMethod** called_addr
@@ -79,11 +79,11 @@
pushl %fs:THREAD_SELF_OFFSET // Pass thread.
pushl %ecx // Pass receiver.
pushl %eax // Pass called.
- call SYMBOL(artPortableProxyInvokeHandler)@PLT // (called, receiver, Thread*, &called)
+ call PLT_SYMBOL(artPortableProxyInvokeHandler) // (called, receiver, Thread*, &called)
UNDO_SETUP_GOT
leave
- .cfi_restore %ebp
- .cfi_def_cfa %esp, 4
+ CFI_RESTORE(%ebp)
+ CFI_DEF_CFA(%esp, 4)
movd %eax, %xmm0 // Place return value also into floating point return value.
movd %edx, %xmm1
punpckldq %xmm1, %xmm0
@@ -93,7 +93,7 @@
DEFINE_FUNCTION art_portable_resolution_trampoline
PUSH ebp // Set up frame.
movl %esp, %ebp
- .cfi_def_cfa_register %ebp
+ CFI_DEF_CFA_REGISTER(%ebp)
subl LITERAL(4), %esp // Align stack
SETUP_GOT // pushes ebx
leal 8(%ebp), %edx // %edx = ArtMethod** called_addr
@@ -103,11 +103,11 @@
pushl %fs:THREAD_SELF_OFFSET // Pass thread.
pushl %ecx // Pass receiver.
pushl %eax // Pass called.
- call SYMBOL(artPortableResolutionTrampoline)@PLT // (called, receiver, Thread*, &called)
+ call PLT_SYMBOL(artPortableResolutionTrampoline) // (called, receiver, Thread*, &called)
UNDO_SETUP_GOT
leave
- .cfi_restore %ebp
- .cfi_def_cfa %esp, 4
+ CFI_RESTORE(%ebp)
+ CFI_DEF_CFA(%esp, 4)
testl %eax, %eax
jz resolve_fail
jmp * %eax
@@ -118,7 +118,7 @@
DEFINE_FUNCTION art_portable_to_interpreter_bridge
PUSH ebp // Set up frame.
movl %esp, %ebp
- .cfi_def_cfa_register %ebp
+ CFI_DEF_CFA_REGISTER(%ebp)
subl LITERAL(8), %esp // Align stack
SETUP_GOT
leal 8(%ebp), %edx // %edx = ArtMethod** called_addr
@@ -126,10 +126,10 @@
pushl %edx // Pass called_addr.
pushl %fs:THREAD_SELF_OFFSET // Pass thread.
pushl %eax // Pass called.
- call SYMBOL(artPortableToInterpreterBridge)@PLT // (called, Thread*, &called)
+ call PLT_SYMBOL(artPortableToInterpreterBridge) // (called, Thread*, &called)
UNDO_SETUP_GOT
leave
- .cfi_restore %ebp
- .cfi_def_cfa %esp, 4
+ CFI_RESTORE(%ebp)
+ CFI_DEF_CFA(%esp, 4)
ret
END_FUNCTION art_portable_to_interpreter_bridge
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 69738ba..3adc46a 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -16,8 +16,6 @@
#include "asm_support_x86.S"
-#include "arch/quick_alloc_entrypoints.S"
-
// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
/*
@@ -29,7 +27,7 @@
PUSH esi
PUSH ebp
subl MACRO_LITERAL(16), %esp // Grow stack by 4 words, bottom word will hold Method*
- .cfi_adjust_cfa_offset 16
+ CFI_ADJUST_CFA_OFFSET(16)
END_MACRO
/*
@@ -41,7 +39,7 @@
PUSH esi
PUSH ebp
subl MACRO_LITERAL(16), %esp // Grow stack by 4 words, bottom word will hold Method*
- .cfi_adjust_cfa_offset 16
+ CFI_ADJUST_CFA_OFFSET(16)
END_MACRO
MACRO0(RESTORE_REF_ONLY_CALLEE_SAVE_FRAME)
@@ -49,7 +47,7 @@
POP ebp // Restore callee saves (ebx is saved/restored by the upcall)
POP esi
POP edi
- .cfi_adjust_cfa_offset -28
+ CFI_ADJUST_CFA_OFFSET(-28)
END_MACRO
/*
@@ -68,7 +66,7 @@
MACRO0(RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME)
addl MACRO_LITERAL(4), %esp // Remove padding
- .cfi_adjust_cfa_offset -4
+ CFI_ADJUST_CFA_OFFSET(-4)
POP ecx // Restore args except eax
POP edx
POP ebx
@@ -86,12 +84,12 @@
mov %esp, %ecx
// Outgoing argument set up
subl MACRO_LITERAL(8), %esp // Alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ecx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_GOT_NOSAVE // clobbers ebx (harmless here)
- call SYMBOL(artDeliverPendingExceptionFromCode)@PLT // artDeliverPendingExceptionFromCode(Thread*, SP)
+ call PLT_SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP)
int3 // unreached
END_MACRO
@@ -101,12 +99,12 @@
mov %esp, %ecx
// Outgoing argument set up
subl MACRO_LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ecx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_GOT_NOSAVE // clobbers ebx (harmless here)
- call VAR(cxx_name, 1)@PLT // cxx_name(Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP)
int3 // unreached
END_FUNCTION VAR(c_name, 0)
END_MACRO
@@ -119,10 +117,10 @@
PUSH eax // alignment padding
PUSH ecx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH eax // pass arg1
SETUP_GOT_NOSAVE // clobbers ebx (harmless here)
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP)
int3 // unreached
END_FUNCTION VAR(c_name, 0)
END_MACRO
@@ -134,11 +132,11 @@
// Outgoing argument set up
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass arg2
PUSH eax // pass arg1
SETUP_GOT_NOSAVE // clobbers ebx (harmless here)
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, arg2, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP)
int3 // unreached
END_FUNCTION VAR(c_name, 0)
END_MACRO
@@ -206,18 +204,18 @@
// Outgoing argument set up
SETUP_GOT_NOSAVE
subl MACRO_LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
pushl 32(%edx) // pass caller Method*
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass arg2
PUSH eax // pass arg1
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, arg2, arg3, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP)
movl %edx, %edi // save code pointer in EDI
addl MACRO_LITERAL(36), %esp // Pop arguments skip eax
- .cfi_adjust_cfa_offset -36
+ CFI_ADJUST_CFA_OFFSET(-36)
POP ecx // Restore args except eax
POP edx
POP ebx
@@ -231,7 +229,7 @@
ret
1:
addl MACRO_LITERAL(4), %esp // Pop code pointer off stack
- .cfi_adjust_cfa_offset -4
+ CFI_ADJUST_CFA_OFFSET(-4)
DELIVER_PENDING_EXCEPTION
END_FUNCTION VAR(c_name, 0)
END_MACRO
@@ -259,7 +257,7 @@
PUSH ebp // save ebp
PUSH ebx // save ebx
mov %esp, %ebp // copy value of stack pointer into base pointer
- .cfi_def_cfa_register ebp
+ CFI_DEF_CFA_REGISTER(ebp)
mov 20(%ebp), %ebx // get arg array size
addl LITERAL(28), %ebx // reserve space for return addr, method*, ebx, and ebp in frame
andl LITERAL(0xFFFFFFF0), %ebx // align frame size to 16 bytes
@@ -270,7 +268,7 @@
pushl 20(%ebp) // push size of region to memcpy
pushl 16(%ebp) // push arg array as source of memcpy
pushl %eax // push stack pointer as destination of memcpy
- call SYMBOL(memcpy)@PLT // (void*, const void*, size_t)
+ call PLT_SYMBOL(memcpy) // (void*, const void*, size_t)
addl LITERAL(12), %esp // pop arguments to memcpy
movl LITERAL(0), (%esp) // store NULL for method*
mov 12(%ebp), %eax // move method pointer into eax
@@ -279,7 +277,7 @@
mov 12(%esp), %ebx // copy arg3 into ebx
call *METHOD_CODE_OFFSET(%eax) // call the method
mov %ebp, %esp // restore stack pointer
- .cfi_def_cfa_register esp
+ CFI_DEF_CFA_REGISTER(esp)
POP ebx // pop ebx
POP ebp // pop ebp
mov 20(%esp), %ecx // get result pointer
@@ -303,13 +301,13 @@
SETUP_GOT_NOSAVE // clobbers ebx (harmless here)
// Outgoing argument set up
subl MACRO_LITERAL(8), %esp // push padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
- call VAR(cxx_name, 1)@PLT // cxx_name(Thread*, SP)
+ CFI_ADJUST_CFA_OFFSET(4)
+ call PLT_VAR(cxx_name, 1) // cxx_name(Thread*, SP)
addl MACRO_LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
CALL_MACRO(return_macro, 2) // return or deliver exception
END_FUNCTION VAR(c_name, 0)
@@ -324,11 +322,11 @@
PUSH eax // push padding
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH eax // pass arg1
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, Thread*, SP)
addl MACRO_LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
CALL_MACRO(return_macro, 2) // return or deliver exception
END_FUNCTION VAR(c_name, 0)
@@ -342,12 +340,12 @@
// Outgoing argument set up
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass arg2
PUSH eax // pass arg1
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, arg2, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, Thread*, SP)
addl MACRO_LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
CALL_MACRO(return_macro, 2) // return or deliver exception
END_FUNCTION VAR(c_name, 0)
@@ -359,17 +357,17 @@
mov %esp, %ebx // remember SP
// Outgoing argument set up
subl MACRO_LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass arg3
PUSH ecx // pass arg2
PUSH eax // pass arg1
SETUP_GOT_NOSAVE // clobbers EBX
- call VAR(cxx_name, 1)@PLT // cxx_name(arg1, arg2, arg3, Thread*, SP)
+ call PLT_VAR(cxx_name, 1) // cxx_name(arg1, arg2, arg3, Thread*, SP)
addl MACRO_LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
CALL_MACRO(return_macro, 2) // return or deliver exception
END_FUNCTION VAR(c_name, 0)
@@ -401,7 +399,78 @@
END_MACRO
// Generate the allocation entrypoints for each allocator.
-GENERATE_ALL_ALLOC_ENTRYPOINTS
+// TODO: use arch/quick_alloc_entrypoints.S. Currently we don't as we need to use concatenation
+// macros to work around differences between OS/X's as and binutils as (OS/X lacks named arguments
+// to macros and the VAR macro won't concatenate arguments properly), this also breaks having
+// multi-line macros that use each other (hence using 1 macro per newline below).
+#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(c_suffix, cxx_suffix) \
+ TWO_ARG_DOWNCALL art_quick_alloc_object ## c_suffix, artAllocObjectFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
+ TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check ## c_suffix, artAllocObjectFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(c_suffix, cxx_suffix) \
+ THREE_ARG_DOWNCALL art_quick_alloc_array ## c_suffix, artAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
+ THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check ## c_suffix, artAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(c_suffix, cxx_suffix) \
+ THREE_ARG_DOWNCALL art_quick_check_and_alloc_array ## c_suffix, artCheckAndAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
+ THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check ## c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc, DlMalloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc, DlMalloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc, DlMalloc)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc_instrumented, DlMallocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc, RosAlloc)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc, RosAlloc)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc_instrumented, RosAllocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer, BumpPointer)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer, BumpPointer)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer, BumpPointer)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer_instrumented, BumpPointerInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab, TLAB)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab, TLAB)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
+
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab_instrumented, TLABInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
+GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
@@ -445,11 +514,11 @@
PUSH eax // push padding
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH eax // pass object
- call artLockObjectFromCode@PLT // artLockObjectFromCode(object, Thread*, SP)
- addl MACRO_LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ call PLT_SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*, SP)
+ addl LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO
END_FUNCTION art_quick_lock_object
@@ -479,11 +548,11 @@
PUSH eax // push padding
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH eax // pass object
- call artUnlockObjectFromCode@PLT // artUnlockObjectFromCode(object, Thread*, SP)
- addl MACRO_LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ call PLT_SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*, SP)
+ addl LITERAL(16), %esp // pop arguments
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO
END_FUNCTION art_quick_unlock_object
@@ -493,9 +562,9 @@
PUSH eax // alignment padding
PUSH ecx // pass arg2 - obj->klass
PUSH eax // pass arg1 - checked class
- call SYMBOL(artIsAssignableFromCode)@PLT // (Class* klass, Class* ref_klass)
+ call PLT_SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass)
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
END_FUNCTION art_quick_is_assignable
@@ -504,26 +573,26 @@
PUSH eax // alignment padding
PUSH ecx // pass arg2 - obj->klass
PUSH eax // pass arg1 - checked class
- call SYMBOL(artIsAssignableFromCode)@PLT // (Class* klass, Class* ref_klass)
+ call PLT_SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass)
testl %eax, %eax
jz 1f // jump forward if not assignable
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
1:
POP eax // pop arguments
POP ecx
addl LITERAL(4), %esp
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
mov %esp, %edx
// Outgoing argument set up
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass arg2
PUSH eax // pass arg1
- call SYMBOL(artThrowClassCastException)@PLT // (Class* a, Class* b, Thread*, SP)
+ call PLT_SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*, SP)
int3 // unreached
END_FUNCTION art_quick_check_cast
@@ -568,14 +637,14 @@
PUSH ecx
PUSH edx
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
pushl CLASS_OFFSET(%edx) // pass arg2 - type of the value to be stored
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ebx // pass arg1 - component type of the array
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artIsAssignableFromCode)@PLT // (Class* a, Class* b)
+ call PLT_SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b)
addl LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
testl %eax, %eax
jz throw_array_store_exception
POP edx
@@ -595,10 +664,10 @@
// Outgoing argument set up
PUSH ecx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass arg2 - value
PUSH eax // pass arg1 - array
- call SYMBOL(artThrowArrayStoreException)@PLT // (array, value, Thread*, SP)
+ call PLT_SYMBOL(artThrowArrayStoreException) // (array, value, Thread*, SP)
int3 // unreached
END_FUNCTION art_quick_aput_obj
@@ -607,9 +676,9 @@
PUSH edx // pass arg3
PUSH ecx // pass arg2
PUSH eax // pass arg1
- call SYMBOL(memcpy)@PLT // (void*, const void*, size_t)
+ call PLT_SYMBOL(memcpy) // (void*, const void*, size_t)
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
END_FUNCTION art_quick_memcpy
@@ -617,17 +686,17 @@
DEFINE_FUNCTION art_quick_fmod
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass arg4 b.hi
PUSH edx // pass arg3 b.lo
PUSH ecx // pass arg2 a.hi
PUSH eax // pass arg1 a.lo
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(fmod)@PLT // (jdouble a, jdouble b)
+ call PLT_SYMBOL(fmod) // (jdouble a, jdouble b)
fstpl (%esp) // pop return value off fp stack
movsd (%esp), %xmm0 // place into %xmm0
addl LITERAL(28), %esp // pop arguments
- .cfi_adjust_cfa_offset -28
+ CFI_ADJUST_CFA_OFFSET(-28)
ret
END_FUNCTION art_quick_fmod
@@ -636,11 +705,11 @@
PUSH ecx // pass arg2 b
PUSH eax // pass arg1 a
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(fmodf)@PLT // (jfloat a, jfloat b)
+ call PLT_SYMBOL(fmodf) // (jfloat a, jfloat b)
fstps (%esp) // pop return value off fp stack
movss (%esp), %xmm0 // place into %xmm0
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
END_FUNCTION art_quick_fmodf
@@ -651,7 +720,7 @@
fstpl (%esp) // pop value off fp stack as double
movsd (%esp), %xmm0 // place into %xmm0
addl LITERAL(8), %esp // pop arguments
- .cfi_adjust_cfa_offset -8
+ CFI_ADJUST_CFA_OFFSET(-8)
ret
END_FUNCTION art_quick_l2d
@@ -662,7 +731,7 @@
fstps (%esp) // pop value off fp stack as a single
movss (%esp), %xmm0 // place into %xmm0
addl LITERAL(8), %esp // pop argument
- .cfi_adjust_cfa_offset -8
+ CFI_ADJUST_CFA_OFFSET(-8)
ret
END_FUNCTION art_quick_l2f
@@ -671,20 +740,20 @@
PUSH ecx // pass arg2 a.hi
PUSH eax // pass arg1 a.lo
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(art_d2l)@PLT // (jdouble a)
+ call PLT_SYMBOL(art_d2l) // (jdouble a)
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
END_FUNCTION art_quick_d2l
DEFINE_FUNCTION art_quick_f2l
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
SETUP_GOT_NOSAVE // clobbers EBX
PUSH eax // pass arg1 a
- call SYMBOL(art_f2l)@PLT // (jfloat a)
+ call PLT_SYMBOL(art_f2l) // (jfloat a)
addl LITERAL(12), %esp // pop arguments
- .cfi_adjust_cfa_offset -12
+ CFI_ADJUST_CFA_OFFSET(-12)
ret
END_FUNCTION art_quick_f2l
@@ -704,29 +773,29 @@
DEFINE_FUNCTION art_quick_ldiv
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass arg4 b.hi
PUSH edx // pass arg3 b.lo
PUSH ecx // pass arg2 a.hi
PUSH eax // pass arg1 a.lo
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artLdiv)@PLT // (jlong a, jlong b)
+ call PLT_SYMBOL(artLdiv) // (jlong a, jlong b)
addl LITERAL(28), %esp // pop arguments
- .cfi_adjust_cfa_offset -28
+ CFI_ADJUST_CFA_OFFSET(-28)
ret
END_FUNCTION art_quick_ldiv
DEFINE_FUNCTION art_quick_lmod
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass arg4 b.hi
PUSH edx // pass arg3 b.lo
PUSH ecx // pass arg2 a.hi
PUSH eax // pass arg1 a.lo
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artLmod)@PLT // (jlong a, jlong b)
+ call PLT_SYMBOL(artLmod) // (jlong a, jlong b)
addl LITERAL(28), %esp // pop arguments
- .cfi_adjust_cfa_offset -28
+ CFI_ADJUST_CFA_OFFSET(-28)
ret
END_FUNCTION art_quick_lmod
@@ -782,19 +851,19 @@
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
mov 32(%ebx), %ebx // get referrer
PUSH ebx // pass referrer
PUSH edx // pass new_val
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSet32InstanceFromCode)@PLT // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ call PLT_SYMBOL(artSet32InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set32_instance
@@ -802,19 +871,19 @@
DEFINE_FUNCTION art_quick_set64_instance
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH esp // pass SP-8
addl LITERAL(8), (%esp) // fix SP on stack by adding 8
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ebx // pass high half of new_val
PUSH edx // pass low half of new_val
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSet64InstanceFromCode)@PLT // (field_idx, Object*, new_val, Thread*, SP)
+ call PLT_SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set64_instance
@@ -823,19 +892,19 @@
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
mov 32(%ebx), %ebx // get referrer
PUSH ebx // pass referrer
PUSH edx // pass new_val
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSetObjInstanceFromCode)@PLT // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ call PLT_SYMBOL(artSetObjInstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set_obj_instance
@@ -845,17 +914,17 @@
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGet32InstanceFromCode)@PLT // (field_idx, Object*, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGet32InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get32_instance
@@ -865,17 +934,17 @@
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGet64InstanceFromCode)@PLT // (field_idx, Object*, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get64_instance
@@ -885,17 +954,17 @@
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
PUSH ecx // pass object
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGetObjInstanceFromCode)@PLT // (field_idx, Object*, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGetObjInstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get_obj_instance
@@ -905,17 +974,17 @@
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
PUSH ecx // pass new_val
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSet32StaticFromCode)@PLT // (field_idx, new_val, referrer, Thread*, SP)
+ call PLT_SYMBOL(artSet32StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set32_static
@@ -924,19 +993,19 @@
SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
mov %esp, %ebx // remember SP
subl LITERAL(8), %esp // alignment padding
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
mov 32(%ebx), %ebx // get referrer
PUSH edx // pass high half of new_val
PUSH ecx // pass low half of new_val
PUSH ebx // pass referrer
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSet64StaticFromCode)@PLT // (field_idx, referrer, new_val, Thread*, SP)
+ call PLT_SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
END_FUNCTION art_quick_set64_static
@@ -946,15 +1015,15 @@
mov %esp, %ebx // remember SP
mov 32(%esp), %edx // get referrer
subl LITERAL(12), %esp // alignment padding
- .cfi_adjust_cfa_offset 12
+ CFI_ADJUST_CFA_OFFSET(12)
PUSH ebx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // pass referrer
PUSH ecx // pass new_val
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artSetObjStaticFromCode)@PLT // (field_idx, new_val, referrer, Thread*, SP)
+ call PLT_SYMBOL(artSetObjStaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
addl LITERAL(32), %esp // pop arguments
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_IF_EAX_ZERO // return or deliver exception
@@ -966,13 +1035,13 @@
mov 32(%esp), %ecx // get referrer
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass referrer
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGet32StaticFromCode)@PLT // (field_idx, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGet32StaticFromCode) // (field_idx, referrer, Thread*, SP)
addl LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get32_static
@@ -983,13 +1052,13 @@
mov 32(%esp), %ecx // get referrer
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass referrer
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGet64StaticFromCode)@PLT // (field_idx, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGet64StaticFromCode) // (field_idx, referrer, Thread*, SP)
addl LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get64_static
@@ -1000,13 +1069,13 @@
mov 32(%esp), %ecx // get referrer
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass referrer
PUSH eax // pass field_idx
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artGetObjStaticFromCode)@PLT // (field_idx, referrer, Thread*, SP)
+ call PLT_SYMBOL(artGetObjStaticFromCode) // (field_idx, referrer, Thread*, SP)
addl LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_get_obj_static
@@ -1015,16 +1084,16 @@
SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save frame and Method*
PUSH esp // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass receiver
PUSH eax // pass proxy method
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artQuickProxyInvokeHandler)@PLT // (proxy method, receiver, Thread*, SP)
+ call PLT_SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
movd %eax, %xmm0 // place return value also into floating point return value
movd %edx, %xmm1
punpckldq %xmm1, %xmm0
addl LITERAL(44), %esp // pop arguments
- .cfi_adjust_cfa_offset -44
+ CFI_ADJUST_CFA_OFFSET(-44)
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_proxy_invoke_handler
@@ -1046,11 +1115,11 @@
SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
PUSH esp // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass receiver
PUSH eax // pass method
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artQuickResolutionTrampoline)@PLT // (Method* called, receiver, Thread*, SP)
+ call PLT_SYMBOL(artQuickResolutionTrampoline) // (Method* called, receiver, Thread*, SP)
movl %eax, %edi // remember code pointer in EDI
addl LITERAL(16), %esp // pop arguments
test %eax, %eax // if code pointer is NULL goto deliver pending exception
@@ -1074,15 +1143,15 @@
PUSH eax // alignment padding
PUSH edx // pass SP
pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current()
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH eax // pass method
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artQuickToInterpreterBridge)@PLT // (method, Thread*, SP)
+ call PLT_SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP)
movd %eax, %xmm0 // place return value also into floating point return value
movd %edx, %xmm1
punpckldq %xmm1, %xmm0
addl LITERAL(16), %esp // pop arguments
- .cfi_adjust_cfa_offset -16
+ CFI_ADJUST_CFA_OFFSET(-16)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
END_FUNCTION art_quick_to_interpreter_bridge
@@ -1095,20 +1164,20 @@
movl %esp, %edx // Save SP.
PUSH eax // Save eax which will be clobbered by the callee-save method.
subl LITERAL(8), %esp // Align stack.
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
pushl 40(%esp) // Pass LR.
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH edx // Pass SP.
pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current().
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // Pass receiver.
PUSH eax // Pass Method*.
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artInstrumentationMethodEntryFromCode)@PLT // (Method*, Object*, Thread*, SP, LR)
+ call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, SP, LR)
addl LITERAL(28), %esp // Pop arguments upto saved Method*.
movl 28(%esp), %edi // Restore edi.
movl %eax, 28(%esp) // Place code* over edi, just under return pc.
- movl LITERAL(SYMBOL(art_quick_instrumentation_exit)@PLT), 32(%esp)
+ movl LITERAL(SYMBOL(art_quick_instrumentation_exit)), 32(%esp)
// Place instrumentation exit as return pc.
movl (%esp), %eax // Restore eax.
movl 8(%esp), %ecx // Restore ecx.
@@ -1125,32 +1194,32 @@
SETUP_REF_ONLY_CALLEE_SAVE_FRAME
mov %esp, %ecx // Remember SP
subl LITERAL(8), %esp // Save float return value.
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
movd %xmm0, (%esp)
PUSH edx // Save gpr return value.
PUSH eax
subl LITERAL(8), %esp // Align stack
movd %xmm0, (%esp)
subl LITERAL(8), %esp // Pass float return value.
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
movd %xmm0, (%esp)
PUSH edx // Pass gpr return value.
PUSH eax
PUSH ecx // Pass SP.
pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current.
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artInstrumentationMethodExitFromCode)@PLT // (Thread*, SP, gpr_result, fpr_result)
+ call PLT_SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_result, fpr_result)
mov %eax, %ecx // Move returned link register.
addl LITERAL(32), %esp // Pop arguments.
- .cfi_adjust_cfa_offset -32
+ CFI_ADJUST_CFA_OFFSET(-32)
movl %edx, %ebx // Move returned link register for deopt
// (ebx is pretending to be our LR).
POP eax // Restore gpr return value.
POP edx
movd (%esp), %xmm0 // Restore fpr return value.
addl LITERAL(8), %esp
- .cfi_adjust_cfa_offset -8
+ CFI_ADJUST_CFA_OFFSET(-8)
RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
addl LITERAL(4), %esp // Remove fake return pc.
jmp *%ecx // Return.
@@ -1165,12 +1234,12 @@
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
mov %esp, %ecx // Remember SP.
subl LITERAL(8), %esp // Align stack.
- .cfi_adjust_cfa_offset 8
+ CFI_ADJUST_CFA_OFFSET(8)
PUSH ecx // Pass SP.
pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current().
- .cfi_adjust_cfa_offset 4
+ CFI_ADJUST_CFA_OFFSET(4)
SETUP_GOT_NOSAVE // clobbers EBX
- call SYMBOL(artDeoptimize)@PLT // artDeoptimize(Thread*, SP)
+ call PLT_SYMBOL(artDeoptimize) // artDeoptimize(Thread*, SP)
int3 // Unreachable.
END_FUNCTION art_quick_deoptimize