Detect downgrade by checking build time for all partitions
Checking ro.build.date.utc to detect a downgrade is not enough in the
case where several target files are merged, as timestamps can differ
for each partition.
The solution is to check the build timestamp for each partition and
ro.build.date.utc.
With this change ota generation will be aborted during creation
instead of rejected when installing on device.
This also fixes the --override_timestamp so it is possible to generate
a package where timestamps are reversed, as was added in:
3e6161a3b3
.
Issue: 315913966
Test: Manual, confirm that ota generation is aborted when timestamp
on post vendor partitions is newer than on pre.
Ota created successfully with --override_timestamp parameter and
installation successful
Test: atest --host releasetools_test
Change-Id: I275e67a3840f4ef2263381c253231068e72f47d2
This commit is contained in:
parent
3f2432967b
commit
b77972402e
3 changed files with 140 additions and 8 deletions
|
@ -1038,7 +1038,11 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
|||
partition_timestamps_flags = []
|
||||
# Enforce a max timestamp this payload can be applied on top of.
|
||||
if OPTIONS.downgrade:
|
||||
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
||||
# When generating ota between merged target-files, partition build date can
|
||||
# decrease in target, at the same time as ro.build.date.utc increases,
|
||||
# so always pick largest value.
|
||||
max_timestamp = max(source_info.GetBuildProp("ro.build.date.utc"),
|
||||
str(metadata.postcondition.timestamp))
|
||||
partition_timestamps_flags = GeneratePartitionTimestampFlagsDowngrade(
|
||||
metadata.precondition.partition_state,
|
||||
metadata.postcondition.partition_state
|
||||
|
|
|
@ -364,26 +364,66 @@ def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
|
|||
# Only incremental OTAs are allowed to reach here.
|
||||
assert OPTIONS.incremental_source is not None
|
||||
|
||||
# used for logging upon errors
|
||||
log_downgrades = []
|
||||
log_upgrades = []
|
||||
|
||||
post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
|
||||
pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
||||
is_downgrade = int(post_timestamp) < int(pre_timestamp)
|
||||
if int(post_timestamp) < int(pre_timestamp):
|
||||
logger.info(f"ro.build.date.utc pre timestamp: {pre_timestamp}, "
|
||||
f"post timestamp: {post_timestamp}. Downgrade detected.")
|
||||
log_downgrades.append(f"ro.build.date.utc pre: {pre_timestamp} post: {post_timestamp}")
|
||||
else:
|
||||
logger.info(f"ro.build.date.utc pre timestamp: {pre_timestamp}, "
|
||||
f"post timestamp: {post_timestamp}.")
|
||||
log_upgrades.append(f"ro.build.date.utc pre: {pre_timestamp} post: {post_timestamp}")
|
||||
|
||||
# When merging system and vendor target files, it is not enough
|
||||
# to check ro.build.date.utc, the timestamp for each partition must
|
||||
# be checked.
|
||||
if source_info.is_ab:
|
||||
ab_partitions = set(source_info.get("ab_partitions"))
|
||||
for partition in sorted(set(PARTITIONS_WITH_BUILD_PROP) & ab_partitions):
|
||||
|
||||
partition_prop = source_info.get('{}.build.prop'.format(partition))
|
||||
# Skip if the partition is missing, or it doesn't have a build.prop
|
||||
if not partition_prop or not partition_prop.build_props:
|
||||
continue
|
||||
partition_prop = target_info.get('{}.build.prop'.format(partition))
|
||||
# Skip if the partition is missing, or it doesn't have a build.prop
|
||||
if not partition_prop or not partition_prop.build_props:
|
||||
continue
|
||||
|
||||
post_timestamp = target_info.GetPartitionBuildProp(
|
||||
'ro.build.date.utc', partition)
|
||||
pre_timestamp = source_info.GetPartitionBuildProp(
|
||||
'ro.build.date.utc', partition)
|
||||
if int(post_timestamp) < int(pre_timestamp):
|
||||
logger.info(f"Partition {partition} pre timestamp: {pre_timestamp}, "
|
||||
f"post time: {post_timestamp}. Downgrade detected.")
|
||||
log_downgrades.append(f"{partition} pre: {pre_timestamp} post: {post_timestamp}")
|
||||
else:
|
||||
logger.info(f"Partition {partition} pre timestamp: {pre_timestamp}, "
|
||||
f"post timestamp: {post_timestamp}.")
|
||||
log_upgrades.append(f"{partition} pre: {pre_timestamp} post: {post_timestamp}")
|
||||
|
||||
if OPTIONS.spl_downgrade:
|
||||
metadata_proto.spl_downgrade = True
|
||||
|
||||
if OPTIONS.downgrade:
|
||||
if not is_downgrade:
|
||||
if len(log_downgrades) == 0:
|
||||
raise RuntimeError(
|
||||
"--downgrade or --override_timestamp specified but no downgrade "
|
||||
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
|
||||
"detected. Current values for ro.build.date.utc: " + ', '.join(log_upgrades))
|
||||
metadata_proto.downgrade = True
|
||||
else:
|
||||
if is_downgrade:
|
||||
if len(log_downgrades) != 0:
|
||||
raise RuntimeError(
|
||||
"Downgrade detected based on timestamp check: pre: %s, post: %s. "
|
||||
"Downgrade detected based on timestamp check in ro.build.date.utc. "
|
||||
"Need to specify --override_timestamp OR --downgrade to allow "
|
||||
"building the incremental." % (pre_timestamp, post_timestamp))
|
||||
|
||||
"building the incremental. Downgrades detected for: "
|
||||
+ ', '.join(log_downgrades))
|
||||
|
||||
def ComputeRuntimeBuildInfos(default_build_info, boot_variable_values):
|
||||
"""Returns a set of build info objects that may exist during runtime."""
|
||||
|
|
|
@ -163,6 +163,20 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
|
||||
}
|
||||
|
||||
TEST_TARGET_VENDOR_INFO_DICT = common.PartitionBuildProps.FromDictionary(
|
||||
'vendor', {
|
||||
'ro.vendor.build.date.utc' : '87654321',
|
||||
'ro.product.vendor.device':'vendor-device',
|
||||
'ro.vendor.build.fingerprint': 'build-fingerprint-vendor'}
|
||||
)
|
||||
|
||||
TEST_SOURCE_VENDOR_INFO_DICT = common.PartitionBuildProps.FromDictionary(
|
||||
'vendor', {
|
||||
'ro.vendor.build.date.utc' : '12345678',
|
||||
'ro.product.vendor.device':'vendor-device',
|
||||
'ro.vendor.build.fingerprint': 'build-fingerprint-vendor'}
|
||||
)
|
||||
|
||||
def setUp(self):
|
||||
self.testdata_dir = test_utils.get_testdata_dir()
|
||||
self.assertTrue(os.path.exists(self.testdata_dir))
|
||||
|
@ -351,6 +365,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
source_info['build.prop'].build_props['ro.build.date.utc'],
|
||||
target_info['build.prop'].build_props['ro.build.date.utc'])
|
||||
|
||||
@staticmethod
|
||||
def _test_GetPackageMetadata_swapVendorBuildTimestamps(target_info, source_info):
|
||||
(target_info['vendor.build.prop'].build_props['ro.vendor.build.date.utc'],
|
||||
source_info['vendor.build.prop'].build_props['ro.vendor.build.date.utc']) = (
|
||||
source_info['vendor.build.prop'].build_props['ro.vendor.build.date.utc'],
|
||||
target_info['vendor.build.prop'].build_props['ro.vendor.build.date.utc'])
|
||||
|
||||
def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
|
||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||
source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
|
||||
|
@ -363,6 +384,24 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
|
||||
source_info)
|
||||
|
||||
def test_GetPackageMetadata_unintentionalVendorDowngradeDetected(self):
|
||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||
target_info_dict['ab_update'] = 'true'
|
||||
target_info_dict['ab_partitions'] = ['vendor']
|
||||
target_info_dict["vendor.build.prop"] = copy.deepcopy(self.TEST_TARGET_VENDOR_INFO_DICT)
|
||||
source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
|
||||
source_info_dict['ab_update'] = 'true'
|
||||
source_info_dict['ab_partitions'] = ['vendor']
|
||||
source_info_dict["vendor.build.prop"] = copy.deepcopy(self.TEST_SOURCE_VENDOR_INFO_DICT)
|
||||
self._test_GetPackageMetadata_swapVendorBuildTimestamps(
|
||||
target_info_dict, source_info_dict)
|
||||
|
||||
target_info = common.BuildInfo(target_info_dict, None)
|
||||
source_info = common.BuildInfo(source_info_dict, None)
|
||||
common.OPTIONS.incremental_source = ''
|
||||
self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
|
||||
source_info)
|
||||
|
||||
def test_GetPackageMetadata_downgrade(self):
|
||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||
source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
|
||||
|
@ -397,6 +436,55 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||
},
|
||||
metadata)
|
||||
|
||||
def test_GetPackageMetadata_vendorDowngrade(self):
|
||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||
target_info_dict['ab_update'] = 'true'
|
||||
target_info_dict['ab_partitions'] = ['vendor']
|
||||
target_info_dict["vendor.build.prop"] = copy.deepcopy(self.TEST_TARGET_VENDOR_INFO_DICT)
|
||||
source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
|
||||
source_info_dict['ab_update'] = 'true'
|
||||
source_info_dict['ab_partitions'] = ['vendor']
|
||||
source_info_dict["vendor.build.prop"] = copy.deepcopy(self.TEST_SOURCE_VENDOR_INFO_DICT)
|
||||
self._test_GetPackageMetadata_swapVendorBuildTimestamps(
|
||||
target_info_dict, source_info_dict)
|
||||
|
||||
target_info = common.BuildInfo(target_info_dict, None)
|
||||
source_info = common.BuildInfo(source_info_dict, None)
|
||||
common.OPTIONS.incremental_source = ''
|
||||
common.OPTIONS.downgrade = True
|
||||
common.OPTIONS.wipe_user_data = True
|
||||
common.OPTIONS.spl_downgrade = True
|
||||
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
|
||||
# Reset spl_downgrade so other tests are unaffected
|
||||
common.OPTIONS.spl_downgrade = False
|
||||
|
||||
self.assertDictEqual(
|
||||
{
|
||||
'ota-downgrade': 'yes',
|
||||
'ota-type': 'AB',
|
||||
'ota-required-cache': '0',
|
||||
'ota-wipe': 'yes',
|
||||
'post-build': 'build-fingerprint-target',
|
||||
'post-build-incremental': 'build-version-incremental-target',
|
||||
'post-sdk-level': '27',
|
||||
'post-security-patch-level': '2017-12-01',
|
||||
'post-timestamp': '1500000000',
|
||||
'pre-device': 'product-device',
|
||||
'pre-build': 'build-fingerprint-source',
|
||||
'pre-build-incremental': 'build-version-incremental-source',
|
||||
'spl-downgrade': 'yes',
|
||||
},
|
||||
metadata)
|
||||
|
||||
post_build = GetPackageMetadata(target_info, source_info).postcondition
|
||||
self.assertEqual('vendor', post_build.partition_state[0].partition_name)
|
||||
self.assertEqual('12345678', post_build.partition_state[0].version)
|
||||
|
||||
pre_build = GetPackageMetadata(target_info, source_info).precondition
|
||||
self.assertEqual('vendor', pre_build.partition_state[0].partition_name)
|
||||
self.assertEqual('87654321', pre_build.partition_state[0].version)
|
||||
|
||||
|
||||
@test_utils.SkipIfExternalToolsUnavailable()
|
||||
def test_GetTargetFilesZipForSecondaryImages(self):
|
||||
input_file = construct_target_files(secondary=True)
|
||||
|
|
Loading…
Reference in a new issue