Merge "ART: Update and correct assemble_x86.cc"
diff --git a/compiler/dex/compiler_enums.h b/compiler/dex/compiler_enums.h
index cbb2c30..05ab8ca 100644
--- a/compiler/dex/compiler_enums.h
+++ b/compiler/dex/compiler_enums.h
@@ -129,7 +129,7 @@
kMirOpLast,
};
-enum MIROptimizationFlagPositons {
+enum MIROptimizationFlagPositions {
kMIRIgnoreNullCheck = 0,
kMIRNullCheckOnly,
kMIRIgnoreRangeCheck,
@@ -141,6 +141,7 @@
kMIRIgnoreSuspendCheck,
kMIRDup,
kMIRMark, // Temporary node mark.
+ kMIRLastMIRFlag,
};
// For successor_block_list.
diff --git a/compiler/dex/quick/codegen_util.cc b/compiler/dex/quick/codegen_util.cc
index 3961954..9f84e09 100644
--- a/compiler/dex/quick/codegen_util.cc
+++ b/compiler/dex/quick/codegen_util.cc
@@ -1050,10 +1050,11 @@
int Mir2Lir::ComputeFrameSize() {
/* Figure out the frame size */
static const uint32_t kAlignMask = kStackAlignment - 1;
- uint32_t size = ((num_core_spills_ + num_fp_spills_ +
- 1 /* filler word */ + cu_->num_regs + cu_->num_outs)
- * sizeof(uint32_t)) +
- GetNumBytesForCompilerTempSpillRegion();
+ uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set)
+ + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set)
+ + sizeof(uint32_t) // Filler.
+ + (cu_->num_regs + cu_->num_outs) * sizeof(uint32_t)
+ + GetNumBytesForCompilerTempSpillRegion();
/* Align and set */
return (size + kAlignMask) & ~(kAlignMask);
}
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index ff316e5..8b85d71 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -132,6 +132,12 @@
}
}
+ // Make all registers available for the return value.
+ for (size_t i = 0, e = GetNumberOfRegisters(); i < e; ++i) {
+ blocked_registers_[i] = false;
+ }
+ SetupBlockedRegisters(blocked_registers_);
+
Location result_location = locations->Out();
if (result_location.IsUnallocated()) {
switch (result_location.GetPolicy()) {
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 9ccf6c9..7b56718 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -870,18 +870,138 @@
UNIMPLEMENTED art_quick_lshl
UNIMPLEMENTED art_quick_lshr
UNIMPLEMENTED art_quick_lushr
-UNIMPLEMENTED art_quick_set32_instance
-UNIMPLEMENTED art_quick_set64_instance
-UNIMPLEMENTED art_quick_set_obj_instance
-UNIMPLEMENTED art_quick_get32_instance
-UNIMPLEMENTED art_quick_get64_instance
-UNIMPLEMENTED art_quick_get_obj_instance
-UNIMPLEMENTED art_quick_set32_static
-UNIMPLEMENTED art_quick_set64_static
-UNIMPLEMENTED art_quick_set_obj_static
-UNIMPLEMENTED art_quick_get32_static
-UNIMPLEMENTED art_quick_get64_static
-UNIMPLEMENTED art_quick_get_obj_static
+
+DEFINE_FUNCTION art_quick_set32_instance
+ movq 8(%rsp), %rcx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx, Object* and new_val are in rdi/rsi/rdx
+ movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current()
+ movq %rsp, %r9 // pass SP
+ call PLT_SYMBOL(artSet32InstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set32_instance
+
+DEFINE_FUNCTION art_quick_set64_instance
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx, Object* and new_val are in rdi/rsi/rdx
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artSet64InstanceFromCode) // (field_idx, Object*, new_val, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set64_instance
+
+DEFINE_FUNCTION art_quick_set_obj_instance
+ movq 8(%rsp), %rcx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx, Object* and new_val are in rdi/rsi/rdx
+ movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current()
+ movq %rsp, %r9 // pass SP
+ call PLT_SYMBOL(artSetObjInstanceFromCode) // (field_idx, Object*, new_val, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set_obj_instance
+
+DEFINE_FUNCTION art_quick_get32_instance
+ movq 8(%rsp), %rdx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx and Object* are in rdi/rsi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artGet32InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
+END_FUNCTION art_quick_get32_instance
+
+DEFINE_FUNCTION art_quick_get64_instance
+ movq 8(%rsp), %rdx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx and Object* are in rdi/rsi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artGet64InstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
+END_FUNCTION art_quick_get64_instance
+
+DEFINE_FUNCTION art_quick_get_obj_instance
+ movq 8(%rsp), %rdx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx and Object* are in rdi/rsi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artGetObjInstanceFromCode) // (field_idx, Object*, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
+END_FUNCTION art_quick_get_obj_instance
+
+DEFINE_FUNCTION art_quick_set32_static
+ movq 8(%rsp), %rdx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx and new_val are in rdi/rsi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artSet32StaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set32_static
+
+DEFINE_FUNCTION art_quick_set64_static
+ movq %rsi, %rdx // pass new_val
+ movq 8(%rsp), %rsi // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx is in rdi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_IF_EAX_ZERO // return or deliver exception
+END_FUNCTION art_quick_set64_static
+
+DEFINE_FUNCTION art_quick_set_obj_static
+ movq 8(%rsp), %rdx // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx and new_val are in rdi/rsi
+ movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
+ movq %rsp, %r8 // pass SP
+ call PLT_SYMBOL(artSetObjStaticFromCode) // (field_idx, new_val, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION
+END_FUNCTION art_quick_set_obj_static
+
+DEFINE_FUNCTION art_quick_get32_static
+ movq 8(%rsp), %rsi // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx is in rdi
+ movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
+ movq %rsp, %rcx // pass SP
+ call PLT_SYMBOL(artGet32StaticFromCode) // (field_idx, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION
+END_FUNCTION art_quick_get32_static
+
+DEFINE_FUNCTION art_quick_get64_static
+ movq 8(%rsp), %rsi // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx is in rdi
+ movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
+ movq %rsp, %rcx // pass SP
+ call PLT_SYMBOL(artGet64StaticFromCode) // (field_idx, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION
+END_FUNCTION art_quick_get64_static
+
+DEFINE_FUNCTION art_quick_get_obj_static
+ movq 8(%rsp), %rsi // pass referrer
+ SETUP_REF_ONLY_CALLEE_SAVE_FRAME
+ // field_idx is in rdi
+ movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
+ movq %rsp, %rcx // pass SP
+ call PLT_SYMBOL(artGetObjStaticFromCode) // (field_idx, referrer, Thread*, SP)
+ RESTORE_REF_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
+ RETURN_OR_DELIVER_PENDING_EXCEPTION
+END_FUNCTION art_quick_get_obj_static
DEFINE_FUNCTION art_quick_proxy_invoke_handler
// Save callee and GPR args, mixed together to agree with core spills bitmap of ref. and args