releasetools: Make blockimgdiff Python 3 compatible.

Bug: 131631303
Test: `python -m unittest test_blockimgdiff`
Test: `python3 -m unittest test_blockimgdiff`
Change-Id: I8cf072ca4af6a525bc675cba0be998ff7be7f4f1
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index b23eef1..ecb1d31 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -126,7 +126,7 @@
 
     assert len(self.data) % self.blocksize == 0
 
-    self.total_blocks = len(self.data) / self.blocksize
+    self.total_blocks = len(self.data) // self.blocksize
     self.care_map = RangeSet(data=(0, self.total_blocks))
     # When the last block is padded, we always write the whole block even for
     # incremental OTAs. Because otherwise the last block may get skipped if
@@ -179,8 +179,7 @@
   def TotalSha1(self, include_clobbered_blocks=False):
     if not include_clobbered_blocks:
       return self.RangeSha1(self.care_map.subtract(self.clobbered_blocks))
-    else:
-      return sha1(self.data).hexdigest()
+    return sha1(self.data).hexdigest()
 
   def WriteRangeDataToFd(self, ranges, fd):
     for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
@@ -194,13 +193,13 @@
     self.path = path
     self.blocksize = 4096
     self._file_size = os.path.getsize(self.path)
-    self._file = open(self.path, 'r')
+    self._file = open(self.path, 'rb')
 
     if self._file_size % self.blocksize != 0:
       raise ValueError("Size of file %s must be multiple of %d bytes, but is %d"
                        % self.path, self.blocksize, self._file_size)
 
-    self.total_blocks = self._file_size / self.blocksize
+    self.total_blocks = self._file_size // self.blocksize
     self.care_map = RangeSet(data=(0, self.total_blocks))
     self.clobbered_blocks = RangeSet()
     self.extended = RangeSet()
@@ -391,7 +390,7 @@
 
     def print_header(header, separator):
       logger.info(header)
-      logger.info(separator * len(header) + '\n')
+      logger.info('%s\n', separator * len(header))
 
     print_header('  Imgdiff Stats Report  ', '=')
     for key in self.REASONS:
@@ -779,7 +778,7 @@
     out.insert(2, "0\n")
     out.insert(3, str(max_stashed_blocks) + "\n")
 
-    with open(prefix + ".transfer.list", "wb") as f:
+    with open(prefix + ".transfer.list", "w") as f:
       for i in out:
         f.write(i)
 
@@ -1009,7 +1008,7 @@
     # - we write every block we care about exactly once.
 
     # Start with no blocks having been touched yet.
-    touched = array.array("B", "\0" * self.tgt.total_blocks)
+    touched = array.array("B", b"\0" * self.tgt.total_blocks)
 
     # Imagine processing the transfers in order.
     for xf in self.transfers:
@@ -1671,8 +1670,8 @@
 
         split_tgt_size = int(info[1])
         assert split_tgt_size % 4096 == 0
-        assert split_tgt_size / 4096 <= tgt_remain.size()
-        split_tgt_ranges = tgt_remain.first(split_tgt_size / 4096)
+        assert split_tgt_size // 4096 <= tgt_remain.size()
+        split_tgt_ranges = tgt_remain.first(split_tgt_size // 4096)
         tgt_remain = tgt_remain.subtract(split_tgt_ranges)
 
         # Find the split_src_ranges within the image file from its relative
@@ -1744,7 +1743,7 @@
                                                     lines)
         for index, (patch_start, patch_length, split_tgt_ranges,
                     split_src_ranges) in enumerate(split_info_list):
-          with open(patch_file) as f:
+          with open(patch_file, 'rb') as f:
             f.seek(patch_start)
             patch_content = f.read(patch_length)
 
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index b6d47d4..4c86933 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -268,6 +268,7 @@
 
 
 class DataImageTest(ReleaseToolsTestCase):
+
   def test_read_range_set(self):
     data = "file" + ('\0' * 4092)
     image = DataImage(data)
@@ -275,10 +276,11 @@
 
 
 class FileImageTest(ReleaseToolsTestCase):
+
   def setUp(self):
     self.file_path = common.MakeTempFile()
     self.data = os.urandom(4096 * 4)
-    with open(self.file_path, 'w') as f:
+    with open(self.file_path, 'wb') as f:
       f.write(self.data)
     self.file = FileImage(self.file_path)
 
@@ -292,18 +294,18 @@
         expected_data = self.data[s * blocksize : e * blocksize]
 
         rs = RangeSet([s, e])
-        data = "".join(self.file.ReadRangeSet(rs))
+        data = b''.join(self.file.ReadRangeSet(rs))
         self.assertEqual(expected_data, data)
 
         sha1sum = self.file.RangeSha1(rs)
         self.assertEqual(sha1(expected_data).hexdigest(), sha1sum)
 
         tmpfile = common.MakeTempFile()
-        with open(tmpfile, 'w') as f:
+        with open(tmpfile, 'wb') as f:
           self.file.WriteRangeDataToFd(rs, f)
-        with open(tmpfile, 'r') as f:
+        with open(tmpfile, 'rb') as f:
           self.assertEqual(expected_data, f.read())
 
   def test_read_all(self):
-    data = "".join(self.file.ReadRangeSet(self.file.care_map))
+    data = b''.join(self.file.ReadRangeSet(self.file.care_map))
     self.assertEqual(self.data, data)