Change the BitTableBuilder API to be POD based.

The compiler has two copies of all stack map intermediate data
in memory at the same time at the moment.

Change the BitTableBuilder so that it will be able to store
the intermediate data directly (e.g. StackMapEntry), and thus
we can save the space, and can avoid the copying code complexity.

It will also make it possible to deduplicate data as we go,
thus saving further memory and code complexity.

Test: test-art-host-gtest-stack_map_test
Change-Id: I660fddf0629422ae0d2588333854d8fdf1e1bd0f
diff --git a/libartbase/base/bit_table_test.cc b/libartbase/base/bit_table_test.cc
index 25bfcf0..e6f0d53 100644
--- a/libartbase/base/bit_table_test.cc
+++ b/libartbase/base/bit_table_test.cc
@@ -16,8 +16,14 @@
 
 #include "bit_table.h"
 
+#include <map>
+
 #include "gtest/gtest.h"
 
+#include "base/arena_allocator.h"
+#include "base/bit_utils.h"
+#include "base/malloc_arena_pool.h"
+
 namespace art {
 
 TEST(BitTableTest, TestVarint) {
@@ -38,9 +44,13 @@
 }
 
 TEST(BitTableTest, TestEmptyTable) {
+  MallocArenaPool pool;
+  ArenaStack arena_stack(&pool);
+  ScopedArenaAllocator allocator(&arena_stack);
+
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<1> builder;
+  BitTableBuilder<uint32_t> builder(&allocator);
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;
@@ -50,14 +60,18 @@
 }
 
 TEST(BitTableTest, TestSingleColumnTable) {
+  MallocArenaPool pool;
+  ArenaStack arena_stack(&pool);
+  ScopedArenaAllocator allocator(&arena_stack);
+
   constexpr uint32_t kNoValue = -1;
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<1> builder;
-  builder.AddRow(42u);
-  builder.AddRow(kNoValue);
-  builder.AddRow(1000u);
-  builder.AddRow(kNoValue);
+  BitTableBuilder<uint32_t> builder(&allocator);
+  builder.Add(42u);
+  builder.Add(kNoValue);
+  builder.Add(1000u);
+  builder.Add(kNoValue);
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;
@@ -72,11 +86,15 @@
 }
 
 TEST(BitTableTest, TestUnalignedTable) {
+  MallocArenaPool pool;
+  ArenaStack arena_stack(&pool);
+  ScopedArenaAllocator allocator(&arena_stack);
+
   for (size_t start_bit_offset = 0; start_bit_offset <= 32; start_bit_offset++) {
     std::vector<uint8_t> buffer;
     size_t encode_bit_offset = start_bit_offset;
-    BitTableBuilder<1> builder;
-    builder.AddRow(42u);
+    BitTableBuilder<uint32_t> builder(&allocator);
+    builder.Add(42u);
     builder.Encode(&buffer, &encode_bit_offset);
 
     size_t decode_bit_offset = start_bit_offset;
@@ -88,12 +106,22 @@
 }
 
 TEST(BitTableTest, TestBigTable) {
+  MallocArenaPool pool;
+  ArenaStack arena_stack(&pool);
+  ScopedArenaAllocator allocator(&arena_stack);
+
   constexpr uint32_t kNoValue = -1;
   std::vector<uint8_t> buffer;
   size_t encode_bit_offset = 0;
-  BitTableBuilder<4> builder;
-  builder.AddRow(42u, kNoValue, 0u, static_cast<uint32_t>(-2));
-  builder.AddRow(62u, kNoValue, 63u, static_cast<uint32_t>(-3));
+  struct RowData {
+    uint32_t a;
+    uint32_t b;
+    uint32_t c;
+    uint32_t d;
+  };
+  BitTableBuilder<RowData> builder(&allocator);
+  builder.Add(RowData{42u, kNoValue, 0u, static_cast<uint32_t>(-2)});
+  builder.Add(RowData{62u, kNoValue, 63u, static_cast<uint32_t>(-3)});
   builder.Encode(&buffer, &encode_bit_offset);
 
   size_t decode_bit_offset = 0;