libcorkscrew native stacks, mutex ranking, and better ScopedThreadListLock.

This change uses libcorkscrew to show native stacks for threads in kNative or,
unlike dalvikvm, kVmWait --- working on the runtime directly I've found it
somewhat useful to be able to see _which_ internal resource we're waiting on.
We can always take that back out (or make it oatexecd-only) if it turns out to
be too noisy/confusing for app developers.

This change also lets us rank mutexes and enforce -- in oatexecd -- that you
take locks in a specific order.

Both of these helped me test the third novelty: removing the heap locking from
ScopedThreadListLock. I've manually inspected all the callers and added a
ScopedHeapLock where I think one is necessary. In manual testing, this makes
jdb a lot less prone to locking us up. There still seems to be a problem with
the JDWP VirtualMachine.Resume command, but I'll look at that separately. This
is a big enough and potentially disruptive enough change already.

Change-Id: Iad974358919d0e00674662dc8a69cc65878cfb5c
diff --git a/src/heap.cc b/src/heap.cc
index ad20f9c..e153214 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -83,7 +83,7 @@
   }
 }
 
-bool GenerateImage(const std::string image_file_name) {
+static bool GenerateImage(const std::string image_file_name) {
   const std::string boot_class_path_string(Runtime::Current()->GetBootClassPathString());
   std::vector<std::string> boot_class_path;
   Split(boot_class_path_string, ':', boot_class_path);
@@ -257,7 +257,7 @@
   // It's still to early to take a lock because there are no threads yet,
   // but we can create the heap lock now. We don't create it earlier to
   // make it clear that you can't use locks during heap initialization.
-  lock_ = new Mutex("Heap lock");
+  lock_ = new Mutex("Heap lock", kHeapLock);
 
   Heap::EnableObjectValidation();
 
@@ -280,7 +280,7 @@
 
 Object* Heap::AllocObject(Class* klass, size_t byte_count) {
   {
-    ScopedHeapLock lock;
+    ScopedHeapLock heap_lock;
     DCHECK(klass == NULL || (klass->IsClassClass() && byte_count >= sizeof(Class)) ||
            (klass->IsVariableSize() || klass->GetObjectSize() == byte_count) ||
            strlen(ClassHelper(klass).GetDescriptor()) == 0);
@@ -323,7 +323,7 @@
   if (!verify_objects_) {
     return;
   }
-  ScopedHeapLock lock;
+  ScopedHeapLock heap_lock;
   Heap::VerifyObjectLocked(obj);
 }
 #endif
@@ -366,7 +366,7 @@
 }
 
 void Heap::VerifyHeap() {
-  ScopedHeapLock lock;
+  ScopedHeapLock heap_lock;
   live_bitmap_->Walk(Heap::VerificationCallback, NULL);
 }
 
@@ -553,14 +553,14 @@
 };
 
 int64_t Heap::CountInstances(Class* c, bool count_assignable) {
-  ScopedHeapLock lock;
+  ScopedHeapLock heap_lock;
   InstanceCounter counter(c, count_assignable);
   live_bitmap_->Walk(InstanceCounter::Callback, &counter);
   return counter.GetCount();
 }
 
 void Heap::CollectGarbage(bool clear_soft_references) {
-  ScopedHeapLock lock;
+  ScopedHeapLock heap_lock;
   CollectGarbageInternal(clear_soft_references);
 }
 
@@ -685,7 +685,7 @@
 }
 
 void Heap::ClearGrowthLimit() {
-  ScopedHeapLock lock;
+  ScopedHeapLock heap_lock;
   WaitForConcurrentGcToComplete();
   alloc_space_->ClearGrowthLimit();
 }