Opt Compiler: ARM64 goodness

This patch:
* Switches on PreferAcquireRelease() (used to decide if load/store
volatile should use acquire release-semantics or explicit memory
barriers). Note that for ARMv8 CPUs we should always prefer this
(as proved by synthetic benchmarks on A53, A57 and Denver).

* Enables the use of constants for HBoundsCheck

Change-Id: I42524451772c05a1c74af73e97a59a95f49ba6d4
Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index ee04b3a..caf7cc7 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1333,7 +1333,7 @@
   LocationSummary* locations =
       new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
   locations->SetInAt(0, Location::RequiresRegister());
-  locations->SetInAt(1, Location::RequiresRegister());
+  locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction));
   if (instruction->HasUses()) {
     locations->SetOut(Location::SameAsFirstInput());
   }
diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h
index 966165b..53f1f3c 100644
--- a/compiler/optimizing/common_arm64.h
+++ b/compiler/optimizing/common_arm64.h
@@ -194,7 +194,8 @@
 
   int64_t value = CodeGenerator::GetInt64ValueOf(constant);
 
-  if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() || instr->IsCompare()) {
+  if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() ||
+      instr->IsCompare() || instr->IsBoundsCheck()) {
     // Uses aliases of ADD/SUB instructions.
     return vixl::Assembler::IsImmAddSub(value);
   } else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) {