Remove HashTreeInfo from verity_utils.py
We are removing VB related functionalities from release tools. This change removes the hashtreeinfo and generator in verity_utils.py. It also clears the function args using it. Bug: 241044073 Test: atest under build/make Change-Id: Icc298256be6917ffbd40cf4735f8e8092b541c9f
This commit is contained in:
parent
22e3b01d7b
commit
cc9c05d5b8
7 changed files with 10 additions and 94 deletions
|
@ -537,14 +537,6 @@ class BlockImageDiff(object):
|
|||
|
||||
self.touched_src_sha1 = self.src.RangeSha1(self.touched_src_ranges)
|
||||
|
||||
if self.tgt.hashtree_info:
|
||||
out.append("compute_hash_tree {} {} {} {} {}\n".format(
|
||||
self.tgt.hashtree_info.hashtree_range.to_string_raw(),
|
||||
self.tgt.hashtree_info.filesystem_range.to_string_raw(),
|
||||
self.tgt.hashtree_info.hash_algorithm,
|
||||
self.tgt.hashtree_info.salt,
|
||||
self.tgt.hashtree_info.root_hash))
|
||||
|
||||
# Zero out extended blocks as a workaround for bug 20881595.
|
||||
if self.tgt.extended:
|
||||
assert (WriteSplitTransfers(out, "zero", self.tgt.extended) ==
|
||||
|
@ -830,12 +822,6 @@ class BlockImageDiff(object):
|
|||
assert touched[i] == 0
|
||||
touched[i] = 1
|
||||
|
||||
if self.tgt.hashtree_info:
|
||||
for s, e in self.tgt.hashtree_info.hashtree_range:
|
||||
for i in range(s, e):
|
||||
assert touched[i] == 0
|
||||
touched[i] = 1
|
||||
|
||||
# Check that we've written every target block.
|
||||
for s, e in self.tgt.care_map:
|
||||
for i in range(s, e):
|
||||
|
|
|
@ -2055,7 +2055,6 @@ def UnzipTemp(filename, patterns=None):
|
|||
def GetUserImage(which, tmpdir, input_zip,
|
||||
info_dict=None,
|
||||
allow_shared_blocks=None,
|
||||
hashtree_info_generator=None,
|
||||
reset_file_map=False):
|
||||
"""Returns an Image object suitable for passing to BlockImageDiff.
|
||||
|
||||
|
@ -2072,8 +2071,6 @@ def GetUserImage(which, tmpdir, input_zip,
|
|||
info_dict: The dict to be looked up for relevant info.
|
||||
allow_shared_blocks: If image is sparse, whether having shared blocks is
|
||||
allowed. If none, it is looked up from info_dict.
|
||||
hashtree_info_generator: If present and image is sparse, generates the
|
||||
hashtree_info for this sparse image.
|
||||
reset_file_map: If true and image is sparse, reset file map before returning
|
||||
the image.
|
||||
Returns:
|
||||
|
@ -2095,15 +2092,14 @@ def GetUserImage(which, tmpdir, input_zip,
|
|||
allow_shared_blocks = info_dict.get("ext4_share_dup_blocks") == "true"
|
||||
|
||||
if is_sparse:
|
||||
img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
|
||||
hashtree_info_generator)
|
||||
img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks)
|
||||
if reset_file_map:
|
||||
img.ResetFileMap()
|
||||
return img
|
||||
return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
|
||||
return GetNonSparseImage(which, tmpdir)
|
||||
|
||||
|
||||
def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
|
||||
def GetNonSparseImage(which, tmpdir):
|
||||
"""Returns a Image object suitable for passing to BlockImageDiff.
|
||||
|
||||
This function loads the specified non-sparse image from the given path.
|
||||
|
@ -2121,11 +2117,10 @@ def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
|
|||
# ota_from_target_files.py (since LMP).
|
||||
assert os.path.exists(path) and os.path.exists(mappath)
|
||||
|
||||
return images.FileImage(path, hashtree_info_generator=hashtree_info_generator)
|
||||
return images.FileImage(path)
|
||||
|
||||
|
||||
def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
|
||||
hashtree_info_generator=None):
|
||||
def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
|
||||
"""Returns a SparseImage object suitable for passing to BlockImageDiff.
|
||||
|
||||
This function loads the specified sparse image from the given path, and
|
||||
|
@ -2138,8 +2133,6 @@ def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
|
|||
tmpdir: The directory that contains the prebuilt image and block map file.
|
||||
input_zip: The target-files ZIP archive.
|
||||
allow_shared_blocks: Whether having shared blocks is allowed.
|
||||
hashtree_info_generator: If present, generates the hashtree_info for this
|
||||
sparse image.
|
||||
Returns:
|
||||
A SparseImage object, with file_map info loaded.
|
||||
"""
|
||||
|
@ -2156,8 +2149,7 @@ def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
|
|||
clobbered_blocks = "0"
|
||||
|
||||
image = sparse_img.SparseImage(
|
||||
path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks,
|
||||
hashtree_info_generator=hashtree_info_generator)
|
||||
path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks)
|
||||
|
||||
# block.map may contain less blocks, because mke2fs may skip allocating blocks
|
||||
# if they contain all zeros. We can't reconstruct such a file from its block
|
||||
|
|
|
@ -149,7 +149,7 @@ class DataImage(Image):
|
|||
class FileImage(Image):
|
||||
"""An image wrapped around a raw image file."""
|
||||
|
||||
def __init__(self, path, hashtree_info_generator=None):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.blocksize = 4096
|
||||
self._file_size = os.path.getsize(self.path)
|
||||
|
@ -166,10 +166,6 @@ class FileImage(Image):
|
|||
|
||||
self.generator_lock = threading.Lock()
|
||||
|
||||
self.hashtree_info = None
|
||||
if hashtree_info_generator:
|
||||
self.hashtree_info = hashtree_info_generator.Generate(self)
|
||||
|
||||
zero_blocks = []
|
||||
nonzero_blocks = []
|
||||
reference = '\0' * self.blocksize
|
||||
|
@ -190,8 +186,6 @@ class FileImage(Image):
|
|||
self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
|
||||
if nonzero_blocks:
|
||||
self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
|
||||
if self.hashtree_info:
|
||||
self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
|
||||
|
||||
def __del__(self):
|
||||
self._file.close()
|
||||
|
|
|
@ -40,12 +40,9 @@ def GetBlockDifferences(target_zip, source_zip, target_info, source_info,
|
|||
info_dict=source_info,
|
||||
allow_shared_blocks=allow_shared_blocks)
|
||||
|
||||
hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
|
||||
name, 4096, target_info)
|
||||
partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
|
||||
info_dict=target_info,
|
||||
allow_shared_blocks=allow_shared_blocks,
|
||||
hashtree_info_generator=hashtree_info_generator)
|
||||
allow_shared_blocks=allow_shared_blocks)
|
||||
|
||||
# Check the first block of the source system partition for remount R/W only
|
||||
# if the filesystem is ext4.
|
||||
|
|
|
@ -41,8 +41,7 @@ class SparseImage(object):
|
|||
"""
|
||||
|
||||
def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
|
||||
mode="rb", build_map=True, allow_shared_blocks=False,
|
||||
hashtree_info_generator=None):
|
||||
mode="rb", build_map=True, allow_shared_blocks=False):
|
||||
self.simg_f = f = open(simg_fn, mode)
|
||||
|
||||
header_bin = f.read(28)
|
||||
|
@ -74,8 +73,6 @@ class SparseImage(object):
|
|||
blk_sz, total_chunks)
|
||||
|
||||
if not build_map:
|
||||
assert not hashtree_info_generator, \
|
||||
"Cannot generate the hashtree info without building the offset map."
|
||||
return
|
||||
|
||||
pos = 0 # in blocks
|
||||
|
@ -114,16 +111,6 @@ class SparseImage(object):
|
|||
if data_sz != 0:
|
||||
raise ValueError("Don't care chunk input size is non-zero (%u)" %
|
||||
(data_sz))
|
||||
# Fills the don't care data ranges with zeros.
|
||||
# TODO(xunchang) pass the care_map to hashtree info generator.
|
||||
if hashtree_info_generator:
|
||||
fill_data = '\x00' * 4
|
||||
# In order to compute verity hashtree on device, we need to write
|
||||
# zeros explicitly to the don't care ranges. Because these ranges may
|
||||
# contain non-zero data from the previous build.
|
||||
care_data.append(pos)
|
||||
care_data.append(pos + chunk_sz)
|
||||
offset_map.append((pos, chunk_sz, None, fill_data))
|
||||
|
||||
pos += chunk_sz
|
||||
|
||||
|
@ -150,10 +137,6 @@ class SparseImage(object):
|
|||
extended = extended.intersect(all_blocks).subtract(self.care_map)
|
||||
self.extended = extended
|
||||
|
||||
self.hashtree_info = None
|
||||
if hashtree_info_generator:
|
||||
self.hashtree_info = hashtree_info_generator.Generate(self)
|
||||
|
||||
if file_map_fn:
|
||||
self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
|
||||
allow_shared_blocks)
|
||||
|
@ -286,8 +269,6 @@ class SparseImage(object):
|
|||
remaining = remaining.subtract(ranges)
|
||||
|
||||
remaining = remaining.subtract(clobbered_blocks)
|
||||
if self.hashtree_info:
|
||||
remaining = remaining.subtract(self.hashtree_info.hashtree_range)
|
||||
|
||||
# For all the remaining blocks in the care_map (ie, those that
|
||||
# aren't part of the data for any file nor part of the clobbered_blocks),
|
||||
|
@ -350,8 +331,6 @@ class SparseImage(object):
|
|||
out["__NONZERO-%d" % i] = rangelib.RangeSet(data=blocks)
|
||||
if clobbered_blocks:
|
||||
out["__COPY"] = clobbered_blocks
|
||||
if self.hashtree_info:
|
||||
out["__HASHTREE"] = self.hashtree_info.hashtree_range
|
||||
|
||||
def ResetFileMap(self):
|
||||
"""Throw away the file map and treat the entire image as
|
||||
|
|
|
@ -27,8 +27,7 @@ from rangelib import RangeSet
|
|||
from test_utils import (
|
||||
get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
|
||||
from verity_utils import (
|
||||
CalculateVbmetaDigest, CreateHashtreeInfoGenerator,
|
||||
CreateVerityImageBuilder, HashtreeInfo)
|
||||
CalculateVbmetaDigest, CreateVerityImageBuilder)
|
||||
|
||||
BLOCK_SIZE = common.BLOCK_SIZE
|
||||
|
||||
|
|
|
@ -282,37 +282,6 @@ class VerifiedBootVersion2VerityImageBuilder(VerityImageBuilder):
|
|||
raise BuildVerityImageError("Failed to add AVB footer: {}".format(output))
|
||||
|
||||
|
||||
class HashtreeInfoGenerationError(Exception):
|
||||
"""An Exception raised during hashtree info generation."""
|
||||
|
||||
def __init__(self, message):
|
||||
Exception.__init__(self, message)
|
||||
|
||||
|
||||
class HashtreeInfo(object):
|
||||
def __init__(self):
|
||||
self.hashtree_range = None
|
||||
self.filesystem_range = None
|
||||
self.hash_algorithm = None
|
||||
self.salt = None
|
||||
self.root_hash = None
|
||||
|
||||
|
||||
def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
|
||||
return None
|
||||
|
||||
|
||||
class HashtreeInfoGenerator(object):
|
||||
def Generate(self, image):
|
||||
raise NotImplementedError
|
||||
|
||||
def DecomposeSparseImage(self, image):
|
||||
raise NotImplementedError
|
||||
|
||||
def ValidateHashtree(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
|
||||
key_path, algorithm, signing_args):
|
||||
builder = None
|
||||
|
|
Loading…
Reference in a new issue