Factor out the image classes to break circular dependency

This helps to break the circular dependency between common and
blockimgdiff.

Bug: 32379627
Test: unit tests pass
Change-Id: I90b5ff34782acbfac86f36265bd96c207d898bf6
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 6b4e4f5..1bb1603 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -40,6 +40,7 @@
         "common.py",
         "edify_generator.py",
         "img_from_target_files.py",
+        "images.py",
         "make_recovery_patch.py",
         "merge_target_files.py",
         "ota_from_target_files.py",
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index ecb1d31..72f065d 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -28,12 +28,12 @@
 import threading
 import zlib
 from collections import deque, namedtuple, OrderedDict
-from hashlib import sha1
 
 import common
+from images import EmptyImage
 from rangelib import RangeSet
 
-__all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
+__all__ = ["BlockImageDiff"]
 
 logger = logging.getLogger(__name__)
 
@@ -60,209 +60,6 @@
     return PatchInfo(imgdiff, f.read())
 
 
-class Image(object):
-  def RangeSha1(self, ranges):
-    raise NotImplementedError
-
-  def ReadRangeSet(self, ranges):
-    raise NotImplementedError
-
-  def TotalSha1(self, include_clobbered_blocks=False):
-    raise NotImplementedError
-
-  def WriteRangeDataToFd(self, ranges, fd):
-    raise NotImplementedError
-
-
-class EmptyImage(Image):
-  """A zero-length image."""
-
-  def __init__(self):
-    self.blocksize = 4096
-    self.care_map = RangeSet()
-    self.clobbered_blocks = RangeSet()
-    self.extended = RangeSet()
-    self.total_blocks = 0
-    self.file_map = {}
-    self.hashtree_info = None
-
-  def RangeSha1(self, ranges):
-    return sha1().hexdigest()
-
-  def ReadRangeSet(self, ranges):
-    return ()
-
-  def TotalSha1(self, include_clobbered_blocks=False):
-    # EmptyImage always carries empty clobbered_blocks, so
-    # include_clobbered_blocks can be ignored.
-    assert self.clobbered_blocks.size() == 0
-    return sha1().hexdigest()
-
-  def WriteRangeDataToFd(self, ranges, fd):
-    raise ValueError("Can't write data from EmptyImage to file")
-
-
-class DataImage(Image):
-  """An image wrapped around a single string of data."""
-
-  def __init__(self, data, trim=False, pad=False):
-    self.data = data
-    self.blocksize = 4096
-
-    assert not (trim and pad)
-
-    partial = len(self.data) % self.blocksize
-    padded = False
-    if partial > 0:
-      if trim:
-        self.data = self.data[:-partial]
-      elif pad:
-        self.data += '\0' * (self.blocksize - partial)
-        padded = True
-      else:
-        raise ValueError(("data for DataImage must be multiple of %d bytes "
-                          "unless trim or pad is specified") %
-                         (self.blocksize,))
-
-    assert len(self.data) % self.blocksize == 0
-
-    self.total_blocks = len(self.data) // self.blocksize
-    self.care_map = RangeSet(data=(0, self.total_blocks))
-    # When the last block is padded, we always write the whole block even for
-    # incremental OTAs. Because otherwise the last block may get skipped if
-    # unchanged for an incremental, but would fail the post-install
-    # verification if it has non-zero contents in the padding bytes.
-    # Bug: 23828506
-    if padded:
-      clobbered_blocks = [self.total_blocks-1, self.total_blocks]
-    else:
-      clobbered_blocks = []
-    self.clobbered_blocks = clobbered_blocks
-    self.extended = RangeSet()
-
-    zero_blocks = []
-    nonzero_blocks = []
-    reference = '\0' * self.blocksize
-
-    for i in range(self.total_blocks-1 if padded else self.total_blocks):
-      d = self.data[i*self.blocksize : (i+1)*self.blocksize]
-      if d == reference:
-        zero_blocks.append(i)
-        zero_blocks.append(i+1)
-      else:
-        nonzero_blocks.append(i)
-        nonzero_blocks.append(i+1)
-
-    assert zero_blocks or nonzero_blocks or clobbered_blocks
-
-    self.file_map = dict()
-    if zero_blocks:
-      self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
-    if nonzero_blocks:
-      self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
-    if clobbered_blocks:
-      self.file_map["__COPY"] = RangeSet(data=clobbered_blocks)
-
-  def _GetRangeData(self, ranges):
-    for s, e in ranges:
-      yield self.data[s*self.blocksize:e*self.blocksize]
-
-  def RangeSha1(self, ranges):
-    h = sha1()
-    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
-      h.update(data)
-    return h.hexdigest()
-
-  def ReadRangeSet(self, ranges):
-    return list(self._GetRangeData(ranges))
-
-  def TotalSha1(self, include_clobbered_blocks=False):
-    if not include_clobbered_blocks:
-      return self.RangeSha1(self.care_map.subtract(self.clobbered_blocks))
-    return sha1(self.data).hexdigest()
-
-  def WriteRangeDataToFd(self, ranges, fd):
-    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
-      fd.write(data)
-
-
-class FileImage(Image):
-  """An image wrapped around a raw image file."""
-
-  def __init__(self, path, hashtree_info_generator=None):
-    self.path = path
-    self.blocksize = 4096
-    self._file_size = os.path.getsize(self.path)
-    self._file = open(self.path, 'rb')
-
-    if self._file_size % self.blocksize != 0:
-      raise ValueError("Size of file %s must be multiple of %d bytes, but is %d"
-                       % self.path, self.blocksize, self._file_size)
-
-    self.total_blocks = self._file_size // self.blocksize
-    self.care_map = RangeSet(data=(0, self.total_blocks))
-    self.clobbered_blocks = RangeSet()
-    self.extended = RangeSet()
-
-    self.generator_lock = threading.Lock()
-
-    self.hashtree_info = None
-    if hashtree_info_generator:
-      self.hashtree_info = hashtree_info_generator.Generate(self)
-
-    zero_blocks = []
-    nonzero_blocks = []
-    reference = '\0' * self.blocksize
-
-    for i in range(self.total_blocks):
-      d = self._file.read(self.blocksize)
-      if d == reference:
-        zero_blocks.append(i)
-        zero_blocks.append(i+1)
-      else:
-        nonzero_blocks.append(i)
-        nonzero_blocks.append(i+1)
-
-    assert zero_blocks or nonzero_blocks
-
-    self.file_map = {}
-    if zero_blocks:
-      self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
-    if nonzero_blocks:
-      self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
-    if self.hashtree_info:
-      self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
-
-  def __del__(self):
-    self._file.close()
-
-  def _GetRangeData(self, ranges):
-    # Use a lock to protect the generator so that we will not run two
-    # instances of this generator on the same object simultaneously.
-    with self.generator_lock:
-      for s, e in ranges:
-        self._file.seek(s * self.blocksize)
-        for _ in range(s, e):
-          yield self._file.read(self.blocksize)
-
-  def RangeSha1(self, ranges):
-    h = sha1()
-    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
-      h.update(data)
-    return h.hexdigest()
-
-  def ReadRangeSet(self, ranges):
-    return list(self._GetRangeData(ranges))
-
-  def TotalSha1(self, include_clobbered_blocks=False):
-    assert not self.clobbered_blocks
-    return self.RangeSha1(self.care_map)
-
-  def WriteRangeDataToFd(self, ranges, fd):
-    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
-      fd.write(data)
-
-
 class Transfer(object):
   def __init__(self, tgt_name, src_name, tgt_ranges, src_ranges, tgt_sha1,
                src_sha1, style, by_id):
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 0030afa..e0e2e22 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -39,8 +39,9 @@
 import zipfile
 from hashlib import sha1, sha256
 
-import blockimgdiff
+import images
 import sparse_img
+from blockimgdiff import BlockImageDiff
 
 logger = logging.getLogger(__name__)
 
@@ -915,8 +916,8 @@
   # ota_from_target_files.py (since LMP).
   assert os.path.exists(path) and os.path.exists(mappath)
 
-  return blockimgdiff.FileImage(path, hashtree_info_generator=
-                                hashtree_info_generator)
+  return images.FileImage(path, hashtree_info_generator=hashtree_info_generator)
+
 
 def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
                    hashtree_info_generator=None):
@@ -1916,9 +1917,9 @@
     assert version >= 3
     self.version = version
 
-    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
-                                    version=self.version,
-                                    disable_imgdiff=self.disable_imgdiff)
+    b = BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
+                       version=self.version,
+                       disable_imgdiff=self.disable_imgdiff)
     self.path = os.path.join(MakeTempDir(), partition)
     b.Compute(self.path)
     self._required_cache = b.max_stashed_size
@@ -2172,8 +2173,10 @@
     return ctx.hexdigest()
 
 
-DataImage = blockimgdiff.DataImage
-EmptyImage = blockimgdiff.EmptyImage
+# Expose these two classes to support vendor-specific scripts
+DataImage = images.DataImage
+EmptyImage = images.EmptyImage
+
 
 # map recovery.fstab's fs_types to mount/format "partition types"
 PARTITION_TYPES = {
diff --git a/tools/releasetools/images.py b/tools/releasetools/images.py
new file mode 100644
index 0000000..a24148a
--- /dev/null
+++ b/tools/releasetools/images.py
@@ -0,0 +1,223 @@
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+import os
+import threading
+from hashlib import sha1
+
+from rangelib import RangeSet
+
+__all__ = ["EmptyImage", "DataImage", "FileImage"]
+
+
+class Image(object):
+  def RangeSha1(self, ranges):
+    raise NotImplementedError
+
+  def ReadRangeSet(self, ranges):
+    raise NotImplementedError
+
+  def TotalSha1(self, include_clobbered_blocks=False):
+    raise NotImplementedError
+
+  def WriteRangeDataToFd(self, ranges, fd):
+    raise NotImplementedError
+
+
+class EmptyImage(Image):
+  """A zero-length image."""
+
+  def __init__(self):
+    self.blocksize = 4096
+    self.care_map = RangeSet()
+    self.clobbered_blocks = RangeSet()
+    self.extended = RangeSet()
+    self.total_blocks = 0
+    self.file_map = {}
+    self.hashtree_info = None
+
+  def RangeSha1(self, ranges):
+    return sha1().hexdigest()
+
+  def ReadRangeSet(self, ranges):
+    return ()
+
+  def TotalSha1(self, include_clobbered_blocks=False):
+    # EmptyImage always carries empty clobbered_blocks, so
+    # include_clobbered_blocks can be ignored.
+    assert self.clobbered_blocks.size() == 0
+    return sha1().hexdigest()
+
+  def WriteRangeDataToFd(self, ranges, fd):
+    raise ValueError("Can't write data from EmptyImage to file")
+
+
+class DataImage(Image):
+  """An image wrapped around a single string of data."""
+
+  def __init__(self, data, trim=False, pad=False):
+    self.data = data
+    self.blocksize = 4096
+
+    assert not (trim and pad)
+
+    partial = len(self.data) % self.blocksize
+    padded = False
+    if partial > 0:
+      if trim:
+        self.data = self.data[:-partial]
+      elif pad:
+        self.data += '\0' * (self.blocksize - partial)
+        padded = True
+      else:
+        raise ValueError(("data for DataImage must be multiple of %d bytes "
+                          "unless trim or pad is specified") %
+                         (self.blocksize,))
+
+    assert len(self.data) % self.blocksize == 0
+
+    self.total_blocks = len(self.data) // self.blocksize
+    self.care_map = RangeSet(data=(0, self.total_blocks))
+    # When the last block is padded, we always write the whole block even for
+    # incremental OTAs. Because otherwise the last block may get skipped if
+    # unchanged for an incremental, but would fail the post-install
+    # verification if it has non-zero contents in the padding bytes.
+    # Bug: 23828506
+    if padded:
+      clobbered_blocks = [self.total_blocks-1, self.total_blocks]
+    else:
+      clobbered_blocks = []
+    self.clobbered_blocks = clobbered_blocks
+    self.extended = RangeSet()
+
+    zero_blocks = []
+    nonzero_blocks = []
+    reference = '\0' * self.blocksize
+
+    for i in range(self.total_blocks-1 if padded else self.total_blocks):
+      d = self.data[i*self.blocksize : (i+1)*self.blocksize]
+      if d == reference:
+        zero_blocks.append(i)
+        zero_blocks.append(i+1)
+      else:
+        nonzero_blocks.append(i)
+        nonzero_blocks.append(i+1)
+
+    assert zero_blocks or nonzero_blocks or clobbered_blocks
+
+    self.file_map = dict()
+    if zero_blocks:
+      self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
+    if nonzero_blocks:
+      self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
+    if clobbered_blocks:
+      self.file_map["__COPY"] = RangeSet(data=clobbered_blocks)
+
+  def _GetRangeData(self, ranges):
+    for s, e in ranges:
+      yield self.data[s*self.blocksize:e*self.blocksize]
+
+  def RangeSha1(self, ranges):
+    h = sha1()
+    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+      h.update(data)
+    return h.hexdigest()
+
+  def ReadRangeSet(self, ranges):
+    return list(self._GetRangeData(ranges))
+
+  def TotalSha1(self, include_clobbered_blocks=False):
+    if not include_clobbered_blocks:
+      return self.RangeSha1(self.care_map.subtract(self.clobbered_blocks))
+    return sha1(self.data).hexdigest()
+
+  def WriteRangeDataToFd(self, ranges, fd):
+    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+      fd.write(data)
+
+
+class FileImage(Image):
+  """An image wrapped around a raw image file."""
+
+  def __init__(self, path, hashtree_info_generator=None):
+    self.path = path
+    self.blocksize = 4096
+    self._file_size = os.path.getsize(self.path)
+    self._file = open(self.path, 'rb')
+
+    if self._file_size % self.blocksize != 0:
+      raise ValueError("Size of file %s must be multiple of %d bytes, but is %d"
+                       % self.path, self.blocksize, self._file_size)
+
+    self.total_blocks = self._file_size // self.blocksize
+    self.care_map = RangeSet(data=(0, self.total_blocks))
+    self.clobbered_blocks = RangeSet()
+    self.extended = RangeSet()
+
+    self.generator_lock = threading.Lock()
+
+    self.hashtree_info = None
+    if hashtree_info_generator:
+      self.hashtree_info = hashtree_info_generator.Generate(self)
+
+    zero_blocks = []
+    nonzero_blocks = []
+    reference = '\0' * self.blocksize
+
+    for i in range(self.total_blocks):
+      d = self._file.read(self.blocksize)
+      if d == reference:
+        zero_blocks.append(i)
+        zero_blocks.append(i+1)
+      else:
+        nonzero_blocks.append(i)
+        nonzero_blocks.append(i+1)
+
+    assert zero_blocks or nonzero_blocks
+
+    self.file_map = {}
+    if zero_blocks:
+      self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
+    if nonzero_blocks:
+      self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
+    if self.hashtree_info:
+      self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
+
+  def __del__(self):
+    self._file.close()
+
+  def _GetRangeData(self, ranges):
+    # Use a lock to protect the generator so that we will not run two
+    # instances of this generator on the same object simultaneously.
+    with self.generator_lock:
+      for s, e in ranges:
+        self._file.seek(s * self.blocksize)
+        for _ in range(s, e):
+          yield self._file.read(self.blocksize)
+
+  def RangeSha1(self, ranges):
+    h = sha1()
+    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+      h.update(data)
+    return h.hexdigest()
+
+  def ReadRangeSet(self, ranges):
+    return list(self._GetRangeData(ranges))
+
+  def TotalSha1(self, include_clobbered_blocks=False):
+    assert not self.clobbered_blocks
+    return self.RangeSha1(self.care_map)
+
+  def WriteRangeDataToFd(self, ranges, fd):
+    for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable
+      fd.write(data)
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index 4c86933..0987dcf 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -18,9 +18,8 @@
 from hashlib import sha1
 
 import common
-from blockimgdiff import (
-    BlockImageDiff, DataImage, EmptyImage, FileImage, HeapItem, ImgdiffStats,
-    Transfer)
+from blockimgdiff import BlockImageDiff, HeapItem, ImgdiffStats, Transfer
+from images import DataImage, EmptyImage, FileImage
 from rangelib import RangeSet
 from test_utils import ReleaseToolsTestCase
 
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 50fa86f..c0ebd89 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -25,9 +25,9 @@
 import common
 import test_utils
 import validate_target_files
+from images import EmptyImage, DataImage
 from rangelib import RangeSet
 
-from blockimgdiff import EmptyImage, DataImage
 
 KiB = 1024
 MiB = 1024 * KiB