2015-01-28 00:53:15 +01:00
|
|
|
#
|
|
|
|
# Copyright (C) 2015 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2018-08-24 21:08:38 +02:00
|
|
|
import copy
|
2020-10-29 20:33:11 +01:00
|
|
|
import json
|
2015-01-28 00:53:15 +01:00
|
|
|
import os
|
2018-02-17 02:12:54 +01:00
|
|
|
import subprocess
|
2015-01-28 00:53:15 +01:00
|
|
|
import tempfile
|
|
|
|
import time
|
2020-04-20 00:51:16 +02:00
|
|
|
import unittest
|
2015-01-28 00:53:15 +01:00
|
|
|
import zipfile
|
2017-11-09 00:50:59 +01:00
|
|
|
from hashlib import sha1
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
import common
|
2018-02-04 21:13:35 +01:00
|
|
|
import test_utils
|
2017-06-21 02:00:55 +02:00
|
|
|
import validate_target_files
|
2019-07-03 22:57:01 +02:00
|
|
|
from images import EmptyImage, DataImage
|
2018-02-13 22:54:02 +01:00
|
|
|
from rangelib import RangeSet
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2018-02-04 21:13:35 +01:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
KiB = 1024
|
|
|
|
MiB = 1024 * KiB
|
|
|
|
GiB = 1024 * MiB
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2017-12-25 19:43:47 +01:00
|
|
|
|
2015-04-01 20:21:55 +02:00
|
|
|
def get_2gb_string():
|
2017-11-09 00:50:59 +01:00
|
|
|
size = int(2 * GiB + 1)
|
|
|
|
block_size = 4 * KiB
|
|
|
|
step_size = 4 * MiB
|
|
|
|
# Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
|
|
|
|
for _ in range(0, size, step_size):
|
|
|
|
yield os.urandom(block_size)
|
2019-06-19 01:29:37 +02:00
|
|
|
yield b'\0' * (step_size - block_size)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2019-10-05 08:25:12 +02:00
|
|
|
class BuildInfoTest(test_utils.ReleaseToolsTestCase):
|
|
|
|
|
2021-05-05 23:46:35 +02:00
|
|
|
TEST_INFO_FINGERPRINT_DICT = {
|
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.brand': 'product-brand',
|
|
|
|
'ro.product.name': 'product-name',
|
|
|
|
'ro.product.device': 'product-device',
|
|
|
|
'ro.build.version.release': 'version-release',
|
|
|
|
'ro.build.id': 'build-id',
|
|
|
|
'ro.build.version.incremental': 'version-incremental',
|
|
|
|
'ro.build.type': 'build-type',
|
|
|
|
'ro.build.tags': 'build-tags',
|
|
|
|
'ro.build.version.sdk': 30,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
2019-10-05 08:25:12 +02:00
|
|
|
TEST_INFO_DICT = {
|
2020-05-09 07:24:18 +02:00
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.device': 'product-device',
|
|
|
|
'ro.product.name': 'product-name',
|
|
|
|
'ro.build.fingerprint': 'build-fingerprint',
|
|
|
|
'ro.build.foo': 'build-foo'}
|
|
|
|
),
|
|
|
|
'system.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.system.brand': 'product-brand',
|
|
|
|
'ro.product.system.name': 'product-name',
|
|
|
|
'ro.product.system.device': 'product-device',
|
|
|
|
'ro.system.build.version.release': 'version-release',
|
|
|
|
'ro.system.build.id': 'build-id',
|
|
|
|
'ro.system.build.version.incremental': 'version-incremental',
|
|
|
|
'ro.system.build.type': 'build-type',
|
|
|
|
'ro.system.build.tags': 'build-tags',
|
|
|
|
'ro.system.build.foo': 'build-foo'}
|
|
|
|
),
|
|
|
|
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'vendor', {
|
|
|
|
'ro.product.vendor.brand': 'vendor-product-brand',
|
|
|
|
'ro.product.vendor.name': 'vendor-product-name',
|
|
|
|
'ro.product.vendor.device': 'vendor-product-device',
|
|
|
|
'ro.vendor.build.version.release': 'vendor-version-release',
|
|
|
|
'ro.vendor.build.id': 'vendor-build-id',
|
|
|
|
'ro.vendor.build.version.incremental':
|
|
|
|
'vendor-version-incremental',
|
|
|
|
'ro.vendor.build.type': 'vendor-build-type',
|
|
|
|
'ro.vendor.build.tags': 'vendor-build-tags'}
|
|
|
|
),
|
|
|
|
'property1': 'value1',
|
|
|
|
'property2': 4096,
|
2019-10-05 08:25:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_INFO_DICT_USES_OEM_PROPS = {
|
2020-05-09 07:24:18 +02:00
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.name': 'product-name',
|
|
|
|
'ro.build.thumbprint': 'build-thumbprint',
|
|
|
|
'ro.build.bar': 'build-bar'}
|
|
|
|
),
|
|
|
|
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'vendor', {
|
|
|
|
'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
|
|
|
|
),
|
|
|
|
'property1': 'value1',
|
|
|
|
'property2': 4096,
|
|
|
|
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
|
2019-10-05 08:25:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_OEM_DICTS = [
|
|
|
|
{
|
2020-05-09 07:24:18 +02:00
|
|
|
'ro.product.brand': 'brand1',
|
|
|
|
'ro.product.device': 'device1',
|
2019-10-05 08:25:12 +02:00
|
|
|
},
|
|
|
|
{
|
2020-05-09 07:24:18 +02:00
|
|
|
'ro.product.brand': 'brand2',
|
|
|
|
'ro.product.device': 'device2',
|
2019-10-05 08:25:12 +02:00
|
|
|
},
|
|
|
|
{
|
2020-05-09 07:24:18 +02:00
|
|
|
'ro.product.brand': 'brand3',
|
|
|
|
'ro.product.device': 'device3',
|
2019-10-05 08:25:12 +02:00
|
|
|
},
|
|
|
|
]
|
|
|
|
|
2020-04-28 01:26:31 +02:00
|
|
|
TEST_INFO_DICT_PROPERTY_SOURCE_ORDER = {
|
2020-05-09 07:24:18 +02:00
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.build.fingerprint': 'build-fingerprint',
|
|
|
|
'ro.product.property_source_order':
|
|
|
|
'product,odm,vendor,system_ext,system'}
|
|
|
|
),
|
|
|
|
'system.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.system.device': 'system-product-device'}
|
|
|
|
),
|
|
|
|
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'vendor', {
|
|
|
|
'ro.product.vendor.device': 'vendor-product-device'}
|
|
|
|
),
|
2020-04-28 01:26:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_INFO_DICT_PROPERTY_SOURCE_ORDER_ANDROID_10 = {
|
2020-05-09 07:24:18 +02:00
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.build.fingerprint': 'build-fingerprint',
|
|
|
|
'ro.product.property_source_order':
|
|
|
|
'product,product_services,odm,vendor,system',
|
|
|
|
'ro.build.version.release': '10',
|
|
|
|
'ro.build.version.codename': 'REL'}
|
|
|
|
),
|
|
|
|
'system.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.system.device': 'system-product-device'}
|
|
|
|
),
|
|
|
|
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'vendor', {
|
|
|
|
'ro.product.vendor.device': 'vendor-product-device'}
|
|
|
|
),
|
2020-04-28 01:26:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_INFO_DICT_PROPERTY_SOURCE_ORDER_ANDROID_9 = {
|
2020-05-09 07:24:18 +02:00
|
|
|
'build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.device': 'product-device',
|
|
|
|
'ro.build.fingerprint': 'build-fingerprint',
|
|
|
|
'ro.build.version.release': '9',
|
|
|
|
'ro.build.version.codename': 'REL'}
|
|
|
|
),
|
|
|
|
'system.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'system', {
|
|
|
|
'ro.product.system.device': 'system-product-device'}
|
|
|
|
),
|
|
|
|
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
|
|
|
|
'vendor', {
|
|
|
|
'ro.product.vendor.device': 'vendor-product-device'}
|
|
|
|
),
|
2020-04-28 01:26:31 +02:00
|
|
|
}
|
|
|
|
|
2019-10-05 08:25:12 +02:00
|
|
|
def test_init(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
self.assertEqual('product-device', target_info.device)
|
|
|
|
self.assertEqual('build-fingerprint', target_info.fingerprint)
|
|
|
|
self.assertFalse(target_info.is_ab)
|
|
|
|
self.assertIsNone(target_info.oem_props)
|
|
|
|
|
|
|
|
def test_init_with_oem_props(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
self.assertEqual('device1', target_info.device)
|
|
|
|
self.assertEqual('brand1/product-name/device1:build-thumbprint',
|
|
|
|
target_info.fingerprint)
|
|
|
|
|
|
|
|
# Swap the order in oem_dicts, which would lead to different BuildInfo.
|
|
|
|
oem_dicts = copy.copy(self.TEST_OEM_DICTS)
|
|
|
|
oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
oem_dicts)
|
|
|
|
self.assertEqual('device3', target_info.device)
|
|
|
|
self.assertEqual('brand3/product-name/device3:build-thumbprint',
|
|
|
|
target_info.fingerprint)
|
|
|
|
|
|
|
|
def test_init_badFingerprint(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_DICT)
|
2020-05-09 07:24:18 +02:00
|
|
|
info_dict['build.prop'].build_props[
|
|
|
|
'ro.build.fingerprint'] = 'bad fingerprint'
|
2019-10-05 08:25:12 +02:00
|
|
|
self.assertRaises(ValueError, common.BuildInfo, info_dict, None)
|
|
|
|
|
2020-05-09 07:24:18 +02:00
|
|
|
info_dict['build.prop'].build_props[
|
|
|
|
'ro.build.fingerprint'] = 'bad\x80fingerprint'
|
2019-10-05 08:25:12 +02:00
|
|
|
self.assertRaises(ValueError, common.BuildInfo, info_dict, None)
|
|
|
|
|
2021-05-05 23:46:35 +02:00
|
|
|
def test_init_goodFingerprint(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
|
|
|
|
build_info = common.BuildInfo(info_dict)
|
|
|
|
self.assertEqual(
|
2023-04-14 23:32:54 +02:00
|
|
|
'product-brand/product-name/product-device:version-release/build-id/'
|
|
|
|
'version-incremental:build-type/build-tags', build_info.fingerprint)
|
2021-05-05 23:46:35 +02:00
|
|
|
|
|
|
|
build_props = info_dict['build.prop'].build_props
|
|
|
|
del build_props['ro.build.id']
|
|
|
|
build_props['ro.build.legacy.id'] = 'legacy-build-id'
|
|
|
|
build_info = common.BuildInfo(info_dict, use_legacy_id=True)
|
|
|
|
self.assertEqual(
|
2023-04-14 23:32:54 +02:00
|
|
|
'product-brand/product-name/product-device:version-release/'
|
|
|
|
'legacy-build-id/version-incremental:build-type/build-tags',
|
|
|
|
build_info.fingerprint)
|
2021-05-05 23:46:35 +02:00
|
|
|
|
|
|
|
self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
|
|
|
|
False)
|
|
|
|
|
|
|
|
info_dict['avb_enable'] = 'true'
|
|
|
|
info_dict['vbmeta_digest'] = 'abcde12345'
|
|
|
|
build_info = common.BuildInfo(info_dict, use_legacy_id=False)
|
|
|
|
self.assertEqual(
|
2023-04-14 23:32:54 +02:00
|
|
|
'product-brand/product-name/product-device:version-release/'
|
|
|
|
'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
|
|
|
|
build_info.fingerprint)
|
2021-05-05 23:46:35 +02:00
|
|
|
|
2019-10-05 08:25:12 +02:00
|
|
|
def test___getitem__(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
self.assertEqual('value1', target_info['property1'])
|
|
|
|
self.assertEqual(4096, target_info['property2'])
|
2020-05-09 07:24:18 +02:00
|
|
|
self.assertEqual('build-foo',
|
|
|
|
target_info['build.prop'].GetProp('ro.build.foo'))
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test___getitem__with_oem_props(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
self.assertEqual('value1', target_info['property1'])
|
|
|
|
self.assertEqual(4096, target_info['property2'])
|
2020-05-09 07:24:18 +02:00
|
|
|
self.assertIsNone(target_info['build.prop'].GetProp('ro.build.foo'))
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test___setitem__(self):
|
|
|
|
target_info = common.BuildInfo(copy.deepcopy(self.TEST_INFO_DICT), None)
|
|
|
|
self.assertEqual('value1', target_info['property1'])
|
|
|
|
target_info['property1'] = 'value2'
|
|
|
|
self.assertEqual('value2', target_info['property1'])
|
|
|
|
|
2020-05-09 07:24:18 +02:00
|
|
|
self.assertEqual('build-foo',
|
|
|
|
target_info['build.prop'].GetProp('ro.build.foo'))
|
|
|
|
target_info['build.prop'].build_props['ro.build.foo'] = 'build-bar'
|
|
|
|
self.assertEqual('build-bar',
|
|
|
|
target_info['build.prop'].GetProp('ro.build.foo'))
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test_get(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
self.assertEqual('value1', target_info.get('property1'))
|
|
|
|
self.assertEqual(4096, target_info.get('property2'))
|
|
|
|
self.assertEqual(4096, target_info.get('property2', 1024))
|
|
|
|
self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
|
2020-05-09 07:24:18 +02:00
|
|
|
self.assertEqual('build-foo',
|
|
|
|
target_info.get('build.prop').GetProp('ro.build.foo'))
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test_get_with_oem_props(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
self.assertEqual('value1', target_info.get('property1'))
|
|
|
|
self.assertEqual(4096, target_info.get('property2'))
|
|
|
|
self.assertEqual(4096, target_info.get('property2', 1024))
|
|
|
|
self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
|
2020-05-09 07:24:18 +02:00
|
|
|
self.assertIsNone(target_info.get('build.prop').GetProp('ro.build.foo'))
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test_items(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
items = target_info.items()
|
|
|
|
self.assertIn(('property1', 'value1'), items)
|
|
|
|
self.assertIn(('property2', 4096), items)
|
|
|
|
|
|
|
|
def test_GetBuildProp(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
|
|
|
|
self.assertRaises(common.ExternalError, target_info.GetBuildProp,
|
|
|
|
'ro.build.nonexistent')
|
|
|
|
|
|
|
|
def test_GetBuildProp_with_oem_props(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
|
|
|
|
self.assertRaises(common.ExternalError, target_info.GetBuildProp,
|
|
|
|
'ro.build.nonexistent')
|
|
|
|
|
2020-01-09 02:01:11 +01:00
|
|
|
def test_GetPartitionFingerprint(self):
|
2019-10-05 08:25:12 +02:00
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
2020-01-09 02:01:11 +01:00
|
|
|
self.assertEqual(
|
|
|
|
target_info.GetPartitionFingerprint('vendor'),
|
|
|
|
'vendor-product-brand/vendor-product-name/vendor-product-device'
|
|
|
|
':vendor-version-release/vendor-build-id/vendor-version-incremental'
|
|
|
|
':vendor-build-type/vendor-build-tags')
|
2019-10-05 08:25:12 +02:00
|
|
|
|
2020-01-09 02:01:11 +01:00
|
|
|
def test_GetPartitionFingerprint_system_other_uses_system(self):
|
2019-10-05 08:25:12 +02:00
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
2020-01-09 02:01:11 +01:00
|
|
|
self.assertEqual(
|
|
|
|
target_info.GetPartitionFingerprint('system_other'),
|
|
|
|
target_info.GetPartitionFingerprint('system'))
|
|
|
|
|
|
|
|
def test_GetPartitionFingerprint_uses_fingerprint_prop_if_available(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_DICT)
|
2020-05-09 07:24:18 +02:00
|
|
|
info_dict['vendor.build.prop'].build_props[
|
|
|
|
'ro.vendor.build.fingerprint'] = 'vendor:fingerprint'
|
2020-01-09 02:01:11 +01:00
|
|
|
target_info = common.BuildInfo(info_dict, None)
|
|
|
|
self.assertEqual(
|
|
|
|
target_info.GetPartitionFingerprint('vendor'),
|
|
|
|
'vendor:fingerprint')
|
2019-10-05 08:25:12 +02:00
|
|
|
|
|
|
|
def test_WriteMountOemScript(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
script_writer = test_utils.MockScriptWriter()
|
|
|
|
target_info.WriteMountOemScript(script_writer)
|
|
|
|
self.assertEqual([('Mount', '/oem', None)], script_writer.lines)
|
|
|
|
|
|
|
|
def test_WriteDeviceAssertions(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
|
|
|
|
script_writer = test_utils.MockScriptWriter()
|
|
|
|
target_info.WriteDeviceAssertions(script_writer, False)
|
|
|
|
self.assertEqual([('AssertDevice', 'product-device')], script_writer.lines)
|
|
|
|
|
|
|
|
def test_WriteDeviceAssertions_with_oem_props(self):
|
|
|
|
target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
|
|
|
|
self.TEST_OEM_DICTS)
|
|
|
|
script_writer = test_utils.MockScriptWriter()
|
|
|
|
target_info.WriteDeviceAssertions(script_writer, False)
|
|
|
|
self.assertEqual(
|
|
|
|
[
|
|
|
|
('AssertOemProperty', 'ro.product.device',
|
|
|
|
['device1', 'device2', 'device3'], False),
|
|
|
|
('AssertOemProperty', 'ro.product.brand',
|
|
|
|
['brand1', 'brand2', 'brand3'], False),
|
|
|
|
],
|
|
|
|
script_writer.lines)
|
|
|
|
|
2020-04-28 01:26:31 +02:00
|
|
|
def test_ResolveRoProductProperty_FromVendor(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_DICT_PROPERTY_SOURCE_ORDER)
|
|
|
|
info = common.BuildInfo(info_dict, None)
|
|
|
|
self.assertEqual('vendor-product-device',
|
|
|
|
info.GetBuildProp('ro.product.device'))
|
|
|
|
|
|
|
|
def test_ResolveRoProductProperty_FromSystem(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_DICT_PROPERTY_SOURCE_ORDER)
|
2020-05-09 07:24:18 +02:00
|
|
|
del info_dict['vendor.build.prop'].build_props['ro.product.vendor.device']
|
2020-04-28 01:26:31 +02:00
|
|
|
info = common.BuildInfo(info_dict, None)
|
|
|
|
self.assertEqual('system-product-device',
|
|
|
|
info.GetBuildProp('ro.product.device'))
|
|
|
|
|
|
|
|
def test_ResolveRoProductProperty_InvalidPropertySearchOrder(self):
|
|
|
|
info_dict = copy.deepcopy(self.TEST_INFO_DICT_PROPERTY_SOURCE_ORDER)
|
2020-05-09 07:24:18 +02:00
|
|
|
info_dict['build.prop'].build_props[
|
|
|
|
'ro.product.property_source_order'] = 'bad-source'
|
2020-04-28 01:26:31 +02:00
|
|
|
with self.assertRaisesRegexp(common.ExternalError,
|
2023-04-14 23:32:54 +02:00
|
|
|
'Invalid ro.product.property_source_order'):
|
2020-04-28 01:26:31 +02:00
|
|
|
info = common.BuildInfo(info_dict, None)
|
|
|
|
info.GetBuildProp('ro.product.device')
|
|
|
|
|
|
|
|
def test_ResolveRoProductProperty_Android10PropertySearchOrder(self):
|
|
|
|
info_dict = copy.deepcopy(
|
|
|
|
self.TEST_INFO_DICT_PROPERTY_SOURCE_ORDER_ANDROID_10)
|
|
|
|
info = common.BuildInfo(info_dict, None)
|
|
|
|
self.assertEqual('vendor-product-device',
|
|
|
|
info.GetBuildProp('ro.product.device'))
|
|
|
|
|
|
|
|
def test_ResolveRoProductProperty_Android9PropertySearchOrder(self):
|
|
|
|
info_dict = copy.deepcopy(
|
|
|
|
self.TEST_INFO_DICT_PROPERTY_SOURCE_ORDER_ANDROID_9)
|
|
|
|
info = common.BuildInfo(info_dict, None)
|
|
|
|
self.assertEqual('product-device',
|
|
|
|
info.GetBuildProp('ro.product.device'))
|
|
|
|
|
2019-10-05 08:25:12 +02:00
|
|
|
|
2018-10-12 06:57:26 +02:00
|
|
|
class CommonZipTest(test_utils.ReleaseToolsTestCase):
|
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
|
2015-04-01 20:21:55 +02:00
|
|
|
test_file_name=None, expected_stat=None, expected_mode=0o644,
|
|
|
|
expected_compress_type=zipfile.ZIP_STORED):
|
|
|
|
# Verify the stat if present.
|
|
|
|
if test_file_name is not None:
|
|
|
|
new_stat = os.stat(test_file_name)
|
|
|
|
self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode))
|
|
|
|
self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
|
|
|
|
|
|
|
|
# Reopen the zip file to verify.
|
2020-09-22 22:15:57 +02:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "r", allowZip64=True)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
# Verify the timestamp.
|
|
|
|
info = zip_file.getinfo(arcname)
|
|
|
|
self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0))
|
|
|
|
|
|
|
|
# Verify the file mode.
|
|
|
|
mode = (info.external_attr >> 16) & 0o777
|
|
|
|
self.assertEqual(mode, expected_mode)
|
|
|
|
|
|
|
|
# Verify the compress type.
|
|
|
|
self.assertEqual(info.compress_type, expected_compress_type)
|
|
|
|
|
|
|
|
# Verify the zip contents.
|
2017-11-09 00:50:59 +01:00
|
|
|
entry = zip_file.open(arcname)
|
|
|
|
sha1_hash = sha1()
|
2019-06-19 01:29:37 +02:00
|
|
|
for chunk in iter(lambda: entry.read(4 * MiB), b''):
|
2017-11-09 00:50:59 +01:00
|
|
|
sha1_hash.update(chunk)
|
|
|
|
self.assertEqual(expected_hash, sha1_hash.hexdigest())
|
2015-04-01 20:21:55 +02:00
|
|
|
self.assertIsNone(zip_file.testzip())
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
|
|
|
|
extra_zipwrite_args = dict(extra_zipwrite_args or {})
|
|
|
|
|
|
|
|
test_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
test_file_name = test_file.name
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
2015-01-28 00:53:15 +01:00
|
|
|
zip_file_name = zip_file.name
|
|
|
|
|
|
|
|
# File names within an archive strip the leading slash.
|
|
|
|
arcname = extra_zipwrite_args.get("arcname", test_file_name)
|
|
|
|
if arcname[0] == "/":
|
|
|
|
arcname = arcname[1:]
|
|
|
|
|
|
|
|
zip_file.close()
|
2020-09-22 22:15:57 +02:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
|
2015-01-28 00:53:15 +01:00
|
|
|
|
|
|
|
try:
|
2017-11-09 00:50:59 +01:00
|
|
|
sha1_hash = sha1()
|
|
|
|
for data in contents:
|
2019-06-19 01:29:37 +02:00
|
|
|
sha1_hash.update(bytes(data))
|
|
|
|
test_file.write(bytes(data))
|
2015-01-28 00:53:15 +01:00
|
|
|
test_file.close()
|
|
|
|
|
|
|
|
expected_mode = extra_zipwrite_args.get("perms", 0o644)
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type = extra_zipwrite_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2023-04-11 21:50:34 +02:00
|
|
|
# Arbitrary timestamp, just to make sure common.ZipWrite() restores
|
|
|
|
# the timestamp after writing.
|
|
|
|
os.utime(test_file_name, (1234567, 1234567))
|
|
|
|
expected_stat = os.stat(test_file_name)
|
2015-01-28 00:53:15 +01:00
|
|
|
common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(zip_file)
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
|
|
|
|
test_file_name, expected_stat, expected_mode,
|
|
|
|
expected_compress_type)
|
2015-01-28 00:53:15 +01:00
|
|
|
finally:
|
|
|
|
os.remove(test_file_name)
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
|
2015-04-01 20:21:55 +02:00
|
|
|
def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
|
|
|
|
extra_args = dict(extra_args or {})
|
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
zip_file.close()
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
expected_compress_type = extra_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
|
|
|
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
|
2015-05-20 18:32:18 +02:00
|
|
|
arcname = zinfo_or_arcname
|
|
|
|
expected_mode = extra_args.get("perms", 0o644)
|
2015-04-01 20:21:55 +02:00
|
|
|
else:
|
2015-05-20 18:32:18 +02:00
|
|
|
arcname = zinfo_or_arcname.filename
|
2019-06-19 01:29:37 +02:00
|
|
|
if zinfo_or_arcname.external_attr:
|
|
|
|
zinfo_perms = zinfo_or_arcname.external_attr >> 16
|
|
|
|
else:
|
|
|
|
zinfo_perms = 0o600
|
|
|
|
expected_mode = extra_args.get("perms", zinfo_perms)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
2015-05-20 18:32:18 +02:00
|
|
|
common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(zip_file)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=expected_mode,
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type=expected_compress_type)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
|
|
|
|
def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
|
|
|
|
extra_args = dict(extra_args or {})
|
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
|
|
|
|
test_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
test_file_name = test_file.name
|
|
|
|
|
|
|
|
arcname_large = test_file_name
|
|
|
|
arcname_small = "bar"
|
|
|
|
|
|
|
|
# File names within an archive strip the leading slash.
|
|
|
|
if arcname_large[0] == "/":
|
|
|
|
arcname_large = arcname_large[1:]
|
|
|
|
|
|
|
|
zip_file.close()
|
2020-09-22 22:15:57 +02:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
try:
|
2017-11-09 00:50:59 +01:00
|
|
|
sha1_hash = sha1()
|
|
|
|
for data in large:
|
|
|
|
sha1_hash.update(data)
|
|
|
|
test_file.write(data)
|
2015-04-01 20:21:55 +02:00
|
|
|
test_file.close()
|
|
|
|
|
2023-04-11 21:50:34 +02:00
|
|
|
# Arbitrary timestamp, just to make sure common.ZipWrite() restores
|
|
|
|
# the timestamp after writing.
|
|
|
|
os.utime(test_file_name, (1234567, 1234567))
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_stat = os.stat(test_file_name)
|
|
|
|
expected_mode = 0o644
|
|
|
|
expected_compress_type = extra_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
|
|
|
|
|
|
|
common.ZipWrite(zip_file, test_file_name, **extra_args)
|
|
|
|
common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(zip_file)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
# Verify the contents written by ZipWrite().
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname_large,
|
|
|
|
sha1_hash.hexdigest(), test_file_name, expected_stat,
|
|
|
|
expected_mode, expected_compress_type)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
# Verify the contents written by ZipWriteStr().
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname_small,
|
|
|
|
sha1(small).hexdigest(),
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type=expected_compress_type)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
os.remove(test_file_name)
|
|
|
|
|
2023-04-14 23:32:54 +02:00
|
|
|
def _test_reset_ZIP64_LIMIT(self, func, *args):
|
|
|
|
default_limit = (1 << 31) - 1
|
|
|
|
self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
|
|
|
|
func(*args)
|
|
|
|
self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
def test_ZipWrite(self):
|
|
|
|
file_contents = os.urandom(1024)
|
|
|
|
self._test_ZipWrite(file_contents)
|
|
|
|
|
|
|
|
def test_ZipWrite_with_opts(self):
|
|
|
|
file_contents = os.urandom(1024)
|
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"arcname": "foobar",
|
|
|
|
"perms": 0o777,
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
2015-04-01 20:21:55 +02:00
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"arcname": "foobar",
|
|
|
|
"perms": 0o700,
|
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
2015-01-28 00:53:15 +01:00
|
|
|
|
|
|
|
def test_ZipWrite_large_file(self):
|
2015-04-01 20:21:55 +02:00
|
|
|
file_contents = get_2gb_string()
|
2015-01-28 00:53:15 +01:00
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_ZipWrite_resets_ZIP64_LIMIT(self):
|
2023-04-14 23:32:54 +02:00
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
def test_ZipWriteStr(self):
|
|
|
|
random_string = os.urandom(1024)
|
|
|
|
# Passing arcname
|
|
|
|
self._test_ZipWriteStr("foo", random_string)
|
|
|
|
|
|
|
|
# Passing zinfo
|
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string)
|
|
|
|
|
|
|
|
# Timestamp in the zinfo should be overwritten.
|
|
|
|
zinfo.date_time = (2015, 3, 1, 15, 30, 0)
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string)
|
|
|
|
|
|
|
|
def test_ZipWriteStr_with_opts(self):
|
|
|
|
random_string = os.urandom(1024)
|
|
|
|
# Passing arcname
|
|
|
|
self._test_ZipWriteStr("foo", random_string, {
|
2015-05-20 18:32:18 +02:00
|
|
|
"perms": 0o700,
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
2015-05-20 18:32:18 +02:00
|
|
|
self._test_ZipWriteStr("bar", random_string, {
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
|
|
|
|
|
|
|
# Passing zinfo
|
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string, {
|
2015-05-20 18:32:18 +02:00
|
|
|
"perms": 0o600,
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
2019-06-19 01:29:37 +02:00
|
|
|
self._test_ZipWriteStr(zinfo, random_string, {
|
|
|
|
"perms": 0o000,
|
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
def test_ZipWriteStr_large_file(self):
|
|
|
|
# zipfile.writestr() doesn't work when the str size is over 2GiB even with
|
|
|
|
# the workaround. We will only test the case of writing a string into a
|
|
|
|
# large archive.
|
|
|
|
long_string = get_2gb_string()
|
|
|
|
short_string = os.urandom(1024)
|
|
|
|
self._test_ZipWriteStr_large_file(long_string, short_string, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
|
2023-04-14 23:32:54 +02:00
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
|
2015-04-01 20:21:55 +02:00
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
2023-04-14 23:32:54 +02:00
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
|
2015-05-20 18:32:18 +02:00
|
|
|
|
|
|
|
def test_bug21309935(self):
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
zip_file.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
random_string = os.urandom(1024)
|
2020-09-22 22:15:57 +02:00
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w", allowZip64=True)
|
2015-05-20 18:32:18 +02:00
|
|
|
# Default perms should be 0o644 when passing the filename.
|
|
|
|
common.ZipWriteStr(zip_file, "foo", random_string)
|
|
|
|
# Honor the specified perms.
|
|
|
|
common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755)
|
|
|
|
# The perms in zinfo should be untouched.
|
|
|
|
zinfo = zipfile.ZipInfo(filename="baz")
|
|
|
|
zinfo.external_attr = 0o740 << 16
|
|
|
|
common.ZipWriteStr(zip_file, zinfo, random_string)
|
|
|
|
# Explicitly specified perms has the priority.
|
|
|
|
zinfo = zipfile.ZipInfo(filename="qux")
|
|
|
|
zinfo.external_attr = 0o700 << 16
|
|
|
|
common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(zip_file)
|
2015-05-20 18:32:18 +02:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "foo",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o644)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "bar",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o755)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "baz",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o740)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "qux",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o400)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
2017-06-21 02:00:55 +02:00
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2017-12-15 02:05:33 +01:00
|
|
|
def test_ZipDelete(self):
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
|
|
|
|
output_zip = zipfile.ZipFile(zip_file.name, 'w',
|
|
|
|
compression=zipfile.ZIP_DEFLATED)
|
|
|
|
with tempfile.NamedTemporaryFile() as entry_file:
|
|
|
|
entry_file.write(os.urandom(1024))
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(output_zip)
|
2017-12-15 02:05:33 +01:00
|
|
|
zip_file.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
common.ZipDelete(zip_file.name, 'Test2')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
|
2017-12-15 02:05:33 +01:00
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertTrue('Test3' in entries)
|
|
|
|
|
2018-10-05 00:46:16 +02:00
|
|
|
self.assertRaises(
|
|
|
|
common.ExternalError, common.ZipDelete, zip_file.name, 'Test2')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
|
2017-12-15 02:05:33 +01:00
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertTrue('Test3' in entries)
|
|
|
|
|
|
|
|
common.ZipDelete(zip_file.name, ['Test3'])
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
|
2017-12-15 02:05:33 +01:00
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertFalse('Test3' in entries)
|
|
|
|
|
|
|
|
common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(zip_file.name, 'r', allowZip64=True) as check_zip:
|
2017-12-15 02:05:33 +01:00
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertFalse('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertFalse('Test3' in entries)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file.name)
|
|
|
|
|
2019-03-20 19:26:06 +01:00
|
|
|
@staticmethod
|
|
|
|
def _test_UnzipTemp_createZipFile():
|
|
|
|
zip_file = common.MakeTempFile(suffix='.zip')
|
|
|
|
output_zip = zipfile.ZipFile(
|
|
|
|
zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
|
|
|
|
contents = os.urandom(1024)
|
|
|
|
with tempfile.NamedTemporaryFile() as entry_file:
|
|
|
|
entry_file.write(contents)
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
|
2023-04-14 23:32:54 +02:00
|
|
|
common.ZipClose(output_zip)
|
|
|
|
common.ZipClose(output_zip)
|
2019-03-20 19:26:06 +01:00
|
|
|
return zip_file
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2019-03-20 19:26:06 +01:00
|
|
|
def test_UnzipTemp(self):
|
|
|
|
zip_file = self._test_UnzipTemp_createZipFile()
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file)
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2019-03-20 19:26:06 +01:00
|
|
|
def test_UnzipTemp_withPatterns(self):
|
|
|
|
zip_file = self._test_UnzipTemp_createZipFile()
|
|
|
|
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['Test1'])
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['Test1', 'Foo3'])
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Foo3*'])
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['*Test1', '*Baz*'])
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
|
|
|
def test_UnzipTemp_withEmptyPatterns(self):
|
|
|
|
zip_file = self._test_UnzipTemp_createZipFile()
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, [])
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2019-03-20 19:26:06 +01:00
|
|
|
def test_UnzipTemp_withPartiallyMatchingPatterns(self):
|
|
|
|
zip_file = self._test_UnzipTemp_createZipFile()
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
|
|
|
def test_UnzipTemp_withNoMatchingPatterns(self):
|
|
|
|
zip_file = self._test_UnzipTemp_createZipFile()
|
|
|
|
unzipped_dir = common.UnzipTemp(zip_file, ['Foo4', 'Nonexistent*'])
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test1')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Test2')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Foo3')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
|
|
|
|
self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
|
|
|
|
|
2017-12-15 02:05:33 +01:00
|
|
|
|
2018-10-12 06:57:26 +02:00
|
|
|
class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase):
|
2018-01-05 20:17:34 +01:00
|
|
|
"""Tests the APK utils related functions."""
|
|
|
|
|
|
|
|
APKCERTS_TXT1 = (
|
|
|
|
'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
|
|
|
|
' private_key="certs/devkey.pk8"\n'
|
|
|
|
'name="Settings.apk"'
|
2019-04-10 06:35:37 +02:00
|
|
|
' certificate="build/make/target/product/security/platform.x509.pem"'
|
|
|
|
' private_key="build/make/target/product/security/platform.pk8"\n'
|
2018-01-05 20:17:34 +01:00
|
|
|
'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP1 = {
|
2023-04-14 23:32:54 +02:00
|
|
|
'RecoveryLocalizer.apk' : 'certs/devkey',
|
|
|
|
'Settings.apk' : 'build/make/target/product/security/platform',
|
|
|
|
'TV.apk' : 'PRESIGNED',
|
2018-01-05 20:17:34 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
APKCERTS_TXT2 = (
|
|
|
|
'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
|
|
|
|
' private_key="certs/compressed1.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
|
|
|
|
' private_key="certs/compressed2.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
|
|
|
|
' private_key="certs/compressed2.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
|
|
|
|
' private_key="certs/compressed3.pk8" compressed="gz"\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP2 = {
|
2023-04-14 23:32:54 +02:00
|
|
|
'Compressed1.apk' : 'certs/compressed1',
|
|
|
|
'Compressed2a.apk' : 'certs/compressed2',
|
|
|
|
'Compressed2b.apk' : 'certs/compressed2',
|
|
|
|
'Compressed3.apk' : 'certs/compressed3',
|
2018-01-05 20:17:34 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
APKCERTS_TXT3 = (
|
|
|
|
'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
|
|
|
|
' private_key="certs/compressed4.pk8" compressed="xz"\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP3 = {
|
2023-04-14 23:32:54 +02:00
|
|
|
'Compressed4.apk' : 'certs/compressed4',
|
2018-01-05 20:17:34 +01:00
|
|
|
}
|
|
|
|
|
2020-04-04 00:36:23 +02:00
|
|
|
# Test parsing with no optional fields, both optional fields, and only the
|
|
|
|
# partition optional field.
|
|
|
|
APKCERTS_TXT4 = (
|
|
|
|
'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
|
|
|
|
' private_key="certs/devkey.pk8"\n'
|
|
|
|
'name="Settings.apk"'
|
|
|
|
' certificate="build/make/target/product/security/platform.x509.pem"'
|
|
|
|
' private_key="build/make/target/product/security/platform.pk8"'
|
|
|
|
' compressed="gz" partition="system"\n'
|
|
|
|
'name="TV.apk" certificate="PRESIGNED" private_key=""'
|
|
|
|
' partition="product"\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP4 = {
|
2023-04-14 23:32:54 +02:00
|
|
|
'RecoveryLocalizer.apk' : 'certs/devkey',
|
|
|
|
'Settings.apk' : 'build/make/target/product/security/platform',
|
|
|
|
'TV.apk' : 'PRESIGNED',
|
2020-04-04 00:36:23 +02:00
|
|
|
}
|
|
|
|
|
2018-02-17 02:12:54 +01:00
|
|
|
def setUp(self):
|
|
|
|
self.testdata_dir = test_utils.get_testdata_dir()
|
|
|
|
|
2018-01-05 20:17:34 +01:00
|
|
|
@staticmethod
|
|
|
|
def _write_apkcerts_txt(apkcerts_txt, additional=None):
|
|
|
|
if additional is None:
|
|
|
|
additional = []
|
|
|
|
target_files = common.MakeTempFile(suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
|
|
|
|
for entry in additional:
|
|
|
|
target_files_zip.writestr(entry, '')
|
|
|
|
return target_files
|
|
|
|
|
|
|
|
def test_ReadApkCerts_NoncompressedApks(self):
|
|
|
|
target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
|
|
|
|
self.assertIsNone(ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_CompressedApks(self):
|
|
|
|
# We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
|
|
|
|
# not stored in '.gz' format, so it shouldn't be considered as installed.
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT2,
|
|
|
|
['Compressed1.apk.gz', 'Compressed3.apk'])
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
|
|
|
|
self.assertEqual('.gz', ext)
|
|
|
|
|
|
|
|
# Alternative case with '.xz'.
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
|
|
|
|
self.assertEqual('.xz', ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
|
|
|
|
['Compressed1.apk.gz', 'Compressed3.apk'])
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
certmap_merged = self.APKCERTS_CERTMAP1.copy()
|
|
|
|
certmap_merged.update(self.APKCERTS_CERTMAP2)
|
|
|
|
self.assertDictEqual(certmap_merged, certmap)
|
|
|
|
self.assertEqual('.gz', ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_MultipleCompressionMethods(self):
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
|
|
|
|
['Compressed1.apk.gz', 'Compressed4.apk.xz'])
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_MismatchingKeys(self):
|
|
|
|
malformed_apkcerts_txt = (
|
|
|
|
'name="App1.apk" certificate="certs/cert1.x509.pem"'
|
|
|
|
' private_key="certs/cert2.pk8"\n'
|
|
|
|
)
|
|
|
|
target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-05 20:17:34 +01:00
|
|
|
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
|
|
|
|
|
2020-04-04 00:36:23 +02:00
|
|
|
def test_ReadApkCerts_WithWithoutOptionalFields(self):
|
|
|
|
target_files = self._write_apkcerts_txt(self.APKCERTS_TXT4)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2020-04-04 00:36:23 +02:00
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP4, certmap)
|
|
|
|
self.assertIsNone(ext)
|
|
|
|
|
2018-02-04 21:13:35 +01:00
|
|
|
def test_ExtractPublicKey(self):
|
2018-02-17 02:12:54 +01:00
|
|
|
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
|
|
|
|
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
|
2017-12-02 01:19:46 +01:00
|
|
|
with open(pubkey) as pubkey_fp:
|
2018-02-04 21:13:35 +01:00
|
|
|
self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
|
|
|
|
|
|
|
|
def test_ExtractPublicKey_invalidInput(self):
|
2018-02-17 02:12:54 +01:00
|
|
|
wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
|
2018-02-04 21:13:35 +01:00
|
|
|
self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2019-03-15 18:44:43 +01:00
|
|
|
def test_ExtractAvbPublicKey(self):
|
|
|
|
privkey = os.path.join(self.testdata_dir, 'testkey.key')
|
|
|
|
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
|
2019-06-26 20:58:22 +02:00
|
|
|
extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
|
|
|
|
extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
|
|
|
|
with open(extracted_from_privkey, 'rb') as privkey_fp, \
|
2023-04-14 23:32:54 +02:00
|
|
|
open(extracted_from_pubkey, 'rb') as pubkey_fp:
|
2019-03-15 18:44:43 +01:00
|
|
|
self.assertEqual(privkey_fp.read(), pubkey_fp.read())
|
|
|
|
|
2018-02-17 02:12:54 +01:00
|
|
|
def test_ParseCertificate(self):
|
|
|
|
cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
|
|
|
|
|
|
|
|
cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
|
2017-12-02 01:19:46 +01:00
|
|
|
proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
|
|
|
universal_newlines=False)
|
2018-02-17 02:12:54 +01:00
|
|
|
expected, _ = proc.communicate()
|
|
|
|
self.assertEqual(0, proc.returncode)
|
|
|
|
|
|
|
|
with open(cert) as cert_fp:
|
|
|
|
actual = common.ParseCertificate(cert_fp.read())
|
|
|
|
self.assertEqual(expected, actual)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-03-22 07:28:51 +01:00
|
|
|
def test_GetMinSdkVersion(self):
|
|
|
|
test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
|
|
|
|
self.assertEqual('24', common.GetMinSdkVersion(test_app))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-03-22 07:28:51 +01:00
|
|
|
def test_GetMinSdkVersion_invalidInput(self):
|
|
|
|
self.assertRaises(
|
|
|
|
common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-03-22 07:28:51 +01:00
|
|
|
def test_GetMinSdkVersionInt(self):
|
|
|
|
test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
|
|
|
|
self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-03-22 07:28:51 +01:00
|
|
|
def test_GetMinSdkVersionInt_invalidInput(self):
|
|
|
|
self.assertRaises(
|
|
|
|
common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
|
|
|
|
{})
|
|
|
|
|
2018-01-05 20:17:34 +01:00
|
|
|
|
2018-10-12 06:57:26 +02:00
|
|
|
class CommonUtilsTest(test_utils.ReleaseToolsTestCase):
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2018-07-22 21:40:45 +02:00
|
|
|
def setUp(self):
|
|
|
|
self.testdata_dir = test_utils.get_testdata_dir()
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-02-13 22:54:02 +01:00
|
|
|
def test_GetSparseImage_emptyBlockMapFile(self):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-02-13 22:54:02 +01:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([
|
|
|
|
(0xCAC1, 6),
|
|
|
|
(0xCAC3, 3),
|
|
|
|
(0xCAC1, 4)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr('IMAGES/system.map', '')
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
|
2018-01-09 22:21:02 +01:00
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-09 22:21:02 +01:00
|
|
|
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
|
|
|
'__COPY': RangeSet("0"),
|
|
|
|
'__NONZERO-0': RangeSet("1-5 9-12"),
|
|
|
|
},
|
|
|
|
sparse_image.file_map)
|
|
|
|
|
2020-11-12 02:25:50 +01:00
|
|
|
def test_PartitionMapFromTargetFiles(self):
|
|
|
|
target_files_dir = common.MakeTempDir()
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'SYSTEM'))
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'vendor'))
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'PRODUCT'))
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'product'))
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'SYSTEM', 'vendor', 'odm'))
|
|
|
|
os.makedirs(os.path.join(target_files_dir, 'VENDOR_DLKM'))
|
|
|
|
partition_map = common.PartitionMapFromTargetFiles(target_files_dir)
|
|
|
|
self.assertDictEqual(
|
|
|
|
partition_map,
|
|
|
|
{
|
|
|
|
'system': 'SYSTEM',
|
|
|
|
'vendor': 'SYSTEM/vendor',
|
|
|
|
# Prefer PRODUCT over SYSTEM/product
|
|
|
|
'product': 'PRODUCT',
|
|
|
|
'odm': 'SYSTEM/vendor/odm',
|
|
|
|
'vendor_dlkm': 'VENDOR_DLKM',
|
|
|
|
# No system_ext or odm_dlkm
|
|
|
|
})
|
|
|
|
|
2020-10-29 20:33:11 +01:00
|
|
|
def test_SharedUidPartitionViolations(self):
|
|
|
|
uid_dict = {
|
|
|
|
'android.uid.phone': {
|
|
|
|
'system': ['system_phone.apk'],
|
|
|
|
'system_ext': ['system_ext_phone.apk'],
|
|
|
|
},
|
|
|
|
'android.uid.wifi': {
|
|
|
|
'vendor': ['vendor_wifi.apk'],
|
|
|
|
'odm': ['odm_wifi.apk'],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
errors = common.SharedUidPartitionViolations(
|
|
|
|
uid_dict, [('system', 'system_ext'), ('vendor', 'odm')])
|
|
|
|
self.assertEqual(errors, [])
|
|
|
|
|
|
|
|
def test_SharedUidPartitionViolations_Violation(self):
|
|
|
|
uid_dict = {
|
|
|
|
'android.uid.phone': {
|
|
|
|
'system': ['system_phone.apk'],
|
|
|
|
'vendor': ['vendor_phone.apk'],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
errors = common.SharedUidPartitionViolations(
|
|
|
|
uid_dict, [('system', 'system_ext'), ('vendor', 'odm')])
|
|
|
|
self.assertIn(
|
|
|
|
('APK sharedUserId "android.uid.phone" found across partition groups '
|
|
|
|
'in partitions "system,vendor"'), errors)
|
|
|
|
|
2019-04-10 19:01:47 +02:00
|
|
|
def test_GetSparseImage_missingImageFile(self):
|
2018-02-13 22:54:02 +01:00
|
|
|
self.assertRaises(
|
2019-04-10 19:01:47 +02:00
|
|
|
AssertionError, common.GetSparseImage, 'system2', self.testdata_dir,
|
|
|
|
None, False)
|
2018-02-13 22:54:02 +01:00
|
|
|
self.assertRaises(
|
2019-04-10 19:01:47 +02:00
|
|
|
AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
|
|
|
|
None, False)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-02-13 22:54:02 +01:00
|
|
|
def test_GetSparseImage_missingBlockMapFile(self):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-02-13 22:54:02 +01:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([
|
|
|
|
(0xCAC1, 6),
|
|
|
|
(0xCAC3, 3),
|
|
|
|
(0xCAC1, 4)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
|
2018-01-09 22:21:02 +01:00
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-09 22:21:02 +01:00
|
|
|
self.assertRaises(
|
|
|
|
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
|
|
|
|
False)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-02-13 22:54:02 +01:00
|
|
|
def test_GetSparseImage_sharedBlocks_notAllowed(self):
|
|
|
|
"""Tests the case of having overlapping blocks but disallowed."""
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-02-13 22:54:02 +01:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
# Block 10 is shared between two files.
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'/system/file1 1-5 9-10',
|
|
|
|
'/system/file2 10-12']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
|
2018-01-09 22:21:02 +01:00
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-09 22:21:02 +01:00
|
|
|
self.assertRaises(
|
|
|
|
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
|
|
|
|
False)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-02-13 22:54:02 +01:00
|
|
|
def test_GetSparseImage_sharedBlocks_allowed(self):
|
|
|
|
"""Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-02-13 22:54:02 +01:00
|
|
|
# Construct an image with a care_map of "0-5 9-12".
|
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
# Block 10 is shared between two files.
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'/system/file1 1-5 9-10',
|
|
|
|
'/system/file2 10-12']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
|
2018-01-09 22:21:02 +01:00
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-09 22:21:02 +01:00
|
|
|
sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
|
|
|
'__COPY': RangeSet("0"),
|
|
|
|
'__NONZERO-0': RangeSet("6-8 13-15"),
|
|
|
|
'/system/file1': RangeSet("1-5 9-10"),
|
|
|
|
'/system/file2': RangeSet("11-12"),
|
|
|
|
},
|
|
|
|
sparse_image.file_map)
|
|
|
|
|
|
|
|
# '/system/file2' should be marked with 'uses_shared_blocks', but not with
|
|
|
|
# 'incomplete'.
|
|
|
|
self.assertTrue(
|
|
|
|
sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
|
|
|
|
self.assertNotIn(
|
|
|
|
'incomplete', sparse_image.file_map['/system/file2'].extra)
|
|
|
|
|
2019-10-07 06:55:20 +02:00
|
|
|
# '/system/file1' will only contain one field -- a copy of the input text.
|
|
|
|
self.assertEqual(1, len(sparse_image.file_map['/system/file1'].extra))
|
|
|
|
|
|
|
|
# Meta entries should not have any extra tag.
|
2018-02-13 22:54:02 +01:00
|
|
|
self.assertFalse(sparse_image.file_map['__COPY'].extra)
|
|
|
|
self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-02-13 22:54:02 +01:00
|
|
|
def test_GetSparseImage_incompleteRanges(self):
|
|
|
|
"""Tests the case of ext4 images with holes."""
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-02-13 22:54:02 +01:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'/system/file1 1-5 9-10',
|
|
|
|
'/system/file2 11-12']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
# '/system/file2' has less blocks listed (2) than actual (3).
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
|
2018-01-09 22:21:02 +01:00
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-01-09 22:21:02 +01:00
|
|
|
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2019-10-07 06:55:20 +02:00
|
|
|
self.assertEqual(
|
|
|
|
'1-5 9-10',
|
|
|
|
sparse_image.file_map['/system/file1'].extra['text_str'])
|
2018-02-13 22:54:02 +01:00
|
|
|
self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-11 00:31:22 +02:00
|
|
|
def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'//system/file1 1-5 9-10',
|
|
|
|
'//system/file2 11-12',
|
|
|
|
'/system/app/file3 13-15']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
# '/system/file2' has less blocks listed (2) than actual (3).
|
|
|
|
target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
|
|
|
|
# '/system/app/file3' has less blocks listed (3) than actual (4).
|
|
|
|
target_files_zip.writestr('SYSTEM/app/file3', os.urandom(4096 * 4))
|
|
|
|
|
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
|
|
|
|
|
2019-10-07 06:55:20 +02:00
|
|
|
self.assertEqual(
|
|
|
|
'1-5 9-10',
|
|
|
|
sparse_image.file_map['//system/file1'].extra['text_str'])
|
2023-04-14 23:32:54 +02:00
|
|
|
self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
|
2018-07-11 00:31:22 +02:00
|
|
|
self.assertTrue(
|
|
|
|
sparse_image.file_map['/system/app/file3'].extra['incomplete'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-11 00:31:22 +02:00
|
|
|
def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'//system/file1 1-5 9-10',
|
|
|
|
'//init.rc 13-15']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
# '/init.rc' has less blocks listed (3) than actual (4).
|
|
|
|
target_files_zip.writestr('ROOT/init.rc', os.urandom(4096 * 4))
|
|
|
|
|
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
|
|
|
|
|
2019-10-07 06:55:20 +02:00
|
|
|
self.assertEqual(
|
|
|
|
'1-5 9-10',
|
|
|
|
sparse_image.file_map['//system/file1'].extra['text_str'])
|
2018-07-11 00:31:22 +02:00
|
|
|
self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-11 00:31:22 +02:00
|
|
|
def test_GetSparseImage_fileNotFound(self):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
target_files_zip.write(
|
|
|
|
test_utils.construct_sparse_image([(0xCAC2, 16)]),
|
|
|
|
arcname='IMAGES/system.img')
|
|
|
|
target_files_zip.writestr(
|
|
|
|
'IMAGES/system.map',
|
|
|
|
'\n'.join([
|
|
|
|
'//system/file1 1-5 9-10',
|
|
|
|
'//system/file2 11-12']))
|
|
|
|
target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
|
|
|
|
|
|
|
|
tempdir = common.UnzipTemp(target_files)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as input_zip:
|
2018-07-11 00:31:22 +02:00
|
|
|
self.assertRaises(
|
|
|
|
AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
|
|
|
|
False)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-22 21:40:45 +02:00
|
|
|
def test_GetAvbChainedPartitionArg(self):
|
|
|
|
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_system_key_path': pubkey,
|
|
|
|
'avb_system_rollback_index_location': 2,
|
|
|
|
}
|
|
|
|
args = common.GetAvbChainedPartitionArg('system', info_dict).split(':')
|
|
|
|
self.assertEqual(3, len(args))
|
|
|
|
self.assertEqual('system', args[0])
|
|
|
|
self.assertEqual('2', args[1])
|
|
|
|
self.assertTrue(os.path.exists(args[2]))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-22 21:40:45 +02:00
|
|
|
def test_GetAvbChainedPartitionArg_withPrivateKey(self):
|
|
|
|
key = os.path.join(self.testdata_dir, 'testkey.key')
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_product_key_path': key,
|
|
|
|
'avb_product_rollback_index_location': 2,
|
|
|
|
}
|
|
|
|
args = common.GetAvbChainedPartitionArg('product', info_dict).split(':')
|
|
|
|
self.assertEqual(3, len(args))
|
|
|
|
self.assertEqual('product', args[0])
|
|
|
|
self.assertEqual('2', args[1])
|
|
|
|
self.assertTrue(os.path.exists(args[2]))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-22 21:40:45 +02:00
|
|
|
def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_system_key_path': 'does-not-exist',
|
|
|
|
'avb_system_rollback_index_location': 2,
|
|
|
|
}
|
|
|
|
pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
|
|
|
|
args = common.GetAvbChainedPartitionArg(
|
|
|
|
'system', info_dict, pubkey).split(':')
|
|
|
|
self.assertEqual(3, len(args))
|
|
|
|
self.assertEqual('system', args[0])
|
|
|
|
self.assertEqual('2', args[1])
|
|
|
|
self.assertTrue(os.path.exists(args[2]))
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-07-22 21:40:45 +02:00
|
|
|
def test_GetAvbChainedPartitionArg_invalidKey(self):
|
|
|
|
pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_system_key_path': pubkey,
|
|
|
|
'avb_system_rollback_index_location': 2,
|
|
|
|
}
|
|
|
|
self.assertRaises(
|
2018-10-05 00:46:16 +02:00
|
|
|
common.ExternalError, common.GetAvbChainedPartitionArg, 'system',
|
|
|
|
info_dict)
|
2018-07-22 21:40:45 +02:00
|
|
|
|
2018-08-24 21:08:38 +02:00
|
|
|
INFO_DICT_DEFAULT = {
|
|
|
|
'recovery_api_version': 3,
|
|
|
|
'fstab_version': 2,
|
|
|
|
'system_root_image': 'true',
|
2023-04-14 23:32:54 +02:00
|
|
|
'no_recovery' : 'true',
|
2018-08-24 21:08:38 +02:00
|
|
|
'recovery_as_boot': 'true',
|
|
|
|
}
|
|
|
|
|
2019-07-18 19:11:07 +02:00
|
|
|
def test_LoadListFromFile(self):
|
|
|
|
file_path = os.path.join(self.testdata_dir,
|
|
|
|
'merge_config_framework_item_list')
|
|
|
|
contents = common.LoadListFromFile(file_path)
|
|
|
|
expected_contents = [
|
|
|
|
'META/apkcerts.txt',
|
|
|
|
'META/filesystem_config.txt',
|
|
|
|
'META/root_filesystem_config.txt',
|
|
|
|
'META/system_manifest.xml',
|
|
|
|
'META/system_matrix.xml',
|
|
|
|
'META/update_engine_config.txt',
|
|
|
|
'PRODUCT/*',
|
|
|
|
'ROOT/*',
|
|
|
|
'SYSTEM/*',
|
|
|
|
]
|
|
|
|
self.assertEqual(sorted(contents), sorted(expected_contents))
|
|
|
|
|
2018-08-24 21:08:38 +02:00
|
|
|
@staticmethod
|
|
|
|
def _test_LoadInfoDict_createTargetFiles(info_dict, fstab_path):
|
|
|
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
info_values = ''.join(
|
2017-12-02 01:19:46 +01:00
|
|
|
['{}={}\n'.format(k, v) for k, v in sorted(info_dict.items())])
|
2018-08-24 21:08:38 +02:00
|
|
|
common.ZipWriteStr(target_files_zip, 'META/misc_info.txt', info_values)
|
|
|
|
|
|
|
|
FSTAB_TEMPLATE = "/dev/block/system {} ext4 ro,barrier=1 defaults"
|
|
|
|
if info_dict.get('system_root_image') == 'true':
|
|
|
|
fstab_values = FSTAB_TEMPLATE.format('/')
|
|
|
|
else:
|
|
|
|
fstab_values = FSTAB_TEMPLATE.format('/system')
|
|
|
|
common.ZipWriteStr(target_files_zip, fstab_path, fstab_values)
|
2018-08-24 21:08:38 +02:00
|
|
|
|
|
|
|
common.ZipWriteStr(
|
|
|
|
target_files_zip, 'META/file_contexts', 'file-contexts')
|
2018-08-24 21:08:38 +02:00
|
|
|
return target_files
|
|
|
|
|
|
|
|
def test_LoadInfoDict(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
loaded_dict = common.LoadInfoDict(target_files_zip)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
|
|
|
def test_LoadInfoDict_legacyRecoveryFstabPath(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
loaded_dict = common.LoadInfoDict(target_files_zip)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-08-24 21:08:38 +02:00
|
|
|
def test_LoadInfoDict_dirInput(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
|
|
|
unzipped = common.UnzipTemp(target_files)
|
|
|
|
loaded_dict = common.LoadInfoDict(unzipped)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-08-24 21:08:38 +02:00
|
|
|
def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
|
|
|
unzipped = common.UnzipTemp(target_files)
|
|
|
|
loaded_dict = common.LoadInfoDict(unzipped)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
|
|
|
def test_LoadInfoDict_systemRootImageFalse(self):
|
|
|
|
# Devices not using system-as-root nor recovery-as-boot. Non-A/B devices
|
|
|
|
# launched prior to P will likely have this config.
|
|
|
|
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
|
|
|
|
del info_dict['no_recovery']
|
|
|
|
del info_dict['system_root_image']
|
|
|
|
del info_dict['recovery_as_boot']
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
info_dict,
|
|
|
|
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
loaded_dict = common.LoadInfoDict(target_files_zip)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertNotIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
|
|
|
def test_LoadInfoDict_recoveryAsBootFalse(self):
|
|
|
|
# Devices using system-as-root, but with standalone recovery image. Non-A/B
|
|
|
|
# devices launched since P will likely have this config.
|
|
|
|
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
|
|
|
|
del info_dict['no_recovery']
|
|
|
|
del info_dict['recovery_as_boot']
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
info_dict,
|
|
|
|
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
loaded_dict = common.LoadInfoDict(target_files_zip)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
|
|
|
|
def test_LoadInfoDict_noRecoveryTrue(self):
|
|
|
|
# Device doesn't have a recovery partition at all.
|
|
|
|
info_dict = copy.copy(self.INFO_DICT_DEFAULT)
|
|
|
|
del info_dict['recovery_as_boot']
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
info_dict,
|
|
|
|
'RECOVERY/RAMDISK/system/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
loaded_dict = common.LoadInfoDict(target_files_zip)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIsNone(loaded_dict['fstab'])
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-08-24 21:08:38 +02:00
|
|
|
def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
|
|
|
common.ZipDelete(target_files, 'META/misc_info.txt')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2018-08-24 21:08:38 +02:00
|
|
|
def test_LoadInfoDict_repacking(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
|
|
|
unzipped = common.UnzipTemp(target_files)
|
|
|
|
loaded_dict = common.LoadInfoDict(unzipped, True)
|
|
|
|
self.assertEqual(3, loaded_dict['recovery_api_version'])
|
|
|
|
self.assertEqual(2, loaded_dict['fstab_version'])
|
|
|
|
self.assertIn('/', loaded_dict['fstab'])
|
|
|
|
self.assertIn('/system', loaded_dict['fstab'])
|
|
|
|
self.assertEqual(
|
|
|
|
os.path.join(unzipped, 'ROOT'), loaded_dict['root_dir'])
|
|
|
|
self.assertEqual(
|
|
|
|
os.path.join(unzipped, 'META', 'root_filesystem_config.txt'),
|
|
|
|
loaded_dict['root_fs_config'])
|
|
|
|
|
|
|
|
def test_LoadInfoDict_repackingWithZipFileInput(self):
|
|
|
|
target_files = self._test_LoadInfoDict_createTargetFiles(
|
|
|
|
self.INFO_DICT_DEFAULT,
|
|
|
|
'BOOT/RAMDISK/system/etc/recovery.fstab')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(target_files, 'r', allowZip64=True) as target_files_zip:
|
2018-08-24 21:08:38 +02:00
|
|
|
self.assertRaises(
|
|
|
|
AssertionError, common.LoadInfoDict, target_files_zip, True)
|
|
|
|
|
2019-07-24 23:34:54 +02:00
|
|
|
def test_MergeDynamicPartitionInfoDicts_ReturnsMergedDict(self):
|
|
|
|
framework_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a',
|
|
|
|
'dynamic_partition_list': 'system',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'system',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
vendor_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a group_b',
|
|
|
|
'dynamic_partition_list': 'vendor product',
|
2020-09-24 23:30:21 +02:00
|
|
|
'super_block_devices': 'super',
|
|
|
|
'super_super_device_size': '3000',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'vendor',
|
|
|
|
'super_group_a_group_size': '1000',
|
|
|
|
'super_group_b_partition_list': 'product',
|
|
|
|
'super_group_b_group_size': '2000',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
merged_dict = common.MergeDynamicPartitionInfoDicts(
|
|
|
|
framework_dict=framework_dict,
|
2019-11-26 01:04:36 +01:00
|
|
|
vendor_dict=vendor_dict)
|
2019-07-24 23:34:54 +02:00
|
|
|
expected_merged_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a group_b',
|
2020-09-24 23:30:21 +02:00
|
|
|
'dynamic_partition_list': 'product system vendor',
|
|
|
|
'super_block_devices': 'super',
|
|
|
|
'super_super_device_size': '3000',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'system vendor',
|
|
|
|
'super_group_a_group_size': '1000',
|
|
|
|
'super_group_b_partition_list': 'product',
|
|
|
|
'super_group_b_group_size': '2000',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
self.assertEqual(merged_dict, expected_merged_dict)
|
|
|
|
|
|
|
|
def test_MergeDynamicPartitionInfoDicts_IgnoringFrameworkGroupSize(self):
|
|
|
|
framework_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a',
|
|
|
|
'dynamic_partition_list': 'system',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'system',
|
|
|
|
'super_group_a_group_size': '5000',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
vendor_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a group_b',
|
|
|
|
'dynamic_partition_list': 'vendor product',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'vendor',
|
|
|
|
'super_group_a_group_size': '1000',
|
|
|
|
'super_group_b_partition_list': 'product',
|
|
|
|
'super_group_b_group_size': '2000',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
merged_dict = common.MergeDynamicPartitionInfoDicts(
|
|
|
|
framework_dict=framework_dict,
|
2019-11-26 01:04:36 +01:00
|
|
|
vendor_dict=vendor_dict)
|
2019-07-24 23:34:54 +02:00
|
|
|
expected_merged_dict = {
|
2020-09-24 23:30:21 +02:00
|
|
|
'use_dynamic_partitions': 'true',
|
2019-07-24 23:34:54 +02:00
|
|
|
'super_partition_groups': 'group_a group_b',
|
2020-09-24 23:30:21 +02:00
|
|
|
'dynamic_partition_list': 'product system vendor',
|
2019-11-26 01:04:36 +01:00
|
|
|
'super_group_a_partition_list': 'system vendor',
|
|
|
|
'super_group_a_group_size': '1000',
|
|
|
|
'super_group_b_partition_list': 'product',
|
|
|
|
'super_group_b_group_size': '2000',
|
2019-07-24 23:34:54 +02:00
|
|
|
}
|
|
|
|
self.assertEqual(merged_dict, expected_merged_dict)
|
|
|
|
|
2019-07-26 23:13:51 +02:00
|
|
|
def test_GetAvbPartitionArg(self):
|
|
|
|
info_dict = {}
|
|
|
|
cmd = common.GetAvbPartitionArg('system', '/path/to/system.img', info_dict)
|
|
|
|
self.assertEqual(
|
|
|
|
['--include_descriptors_from_image', '/path/to/system.img'], cmd)
|
|
|
|
|
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
|
|
|
def test_AppendVBMetaArgsForPartition_vendorAsChainedPartition(self):
|
|
|
|
testdata_dir = test_utils.get_testdata_dir()
|
|
|
|
pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_vendor_key_path': pubkey,
|
|
|
|
'avb_vendor_rollback_index_location': 5,
|
|
|
|
}
|
|
|
|
cmd = common.GetAvbPartitionArg('vendor', '/path/to/vendor.img', info_dict)
|
|
|
|
self.assertEqual(2, len(cmd))
|
|
|
|
self.assertEqual('--chain_partition', cmd[0])
|
|
|
|
chained_partition_args = cmd[1].split(':')
|
|
|
|
self.assertEqual(3, len(chained_partition_args))
|
|
|
|
self.assertEqual('vendor', chained_partition_args[0])
|
|
|
|
self.assertEqual('5', chained_partition_args[1])
|
|
|
|
self.assertTrue(os.path.exists(chained_partition_args[2]))
|
|
|
|
|
2019-10-15 02:49:31 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
|
|
|
def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_nonAb(self):
|
|
|
|
testdata_dir = test_utils.get_testdata_dir()
|
|
|
|
pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
|
|
|
|
info_dict = {
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_recovery_key_path': pubkey,
|
|
|
|
'avb_recovery_rollback_index_location': 3,
|
|
|
|
}
|
|
|
|
cmd = common.GetAvbPartitionArg(
|
|
|
|
'recovery', '/path/to/recovery.img', info_dict)
|
|
|
|
self.assertFalse(cmd)
|
|
|
|
|
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
|
|
|
def test_AppendVBMetaArgsForPartition_recoveryAsChainedPartition_ab(self):
|
|
|
|
testdata_dir = test_utils.get_testdata_dir()
|
|
|
|
pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
|
|
|
|
info_dict = {
|
|
|
|
'ab_update': 'true',
|
|
|
|
'avb_avbtool': 'avbtool',
|
|
|
|
'avb_recovery_key_path': pubkey,
|
|
|
|
'avb_recovery_rollback_index_location': 3,
|
|
|
|
}
|
|
|
|
cmd = common.GetAvbPartitionArg(
|
|
|
|
'recovery', '/path/to/recovery.img', info_dict)
|
|
|
|
self.assertEqual(2, len(cmd))
|
|
|
|
self.assertEqual('--chain_partition', cmd[0])
|
|
|
|
chained_partition_args = cmd[1].split(':')
|
|
|
|
self.assertEqual(3, len(chained_partition_args))
|
|
|
|
self.assertEqual('recovery', chained_partition_args[0])
|
|
|
|
self.assertEqual('3', chained_partition_args[1])
|
|
|
|
self.assertTrue(os.path.exists(chained_partition_args[2]))
|
|
|
|
|
2022-01-08 15:29:30 +01:00
|
|
|
def test_GenerateGkiCertificate_KeyPathNotFound(self):
|
2021-03-12 14:40:32 +01:00
|
|
|
pubkey = os.path.join(self.testdata_dir, 'no_testkey_gki.pem')
|
|
|
|
self.assertFalse(os.path.exists(pubkey))
|
|
|
|
|
|
|
|
common.OPTIONS.info_dict = {
|
|
|
|
'gki_signing_key_path': pubkey,
|
|
|
|
'gki_signing_algorithm': 'SHA256_RSA4096',
|
|
|
|
'gki_signing_signature_args': '--prop foo:bar',
|
|
|
|
}
|
2022-01-08 15:29:30 +01:00
|
|
|
test_file = tempfile.NamedTemporaryFile()
|
|
|
|
self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
|
2022-02-22 12:51:15 +01:00
|
|
|
test_file.name, 'generic_kernel')
|
2021-03-12 14:40:32 +01:00
|
|
|
|
2022-01-08 15:29:30 +01:00
|
|
|
def test_GenerateGkiCertificate_SearchKeyPathNotFound(self):
|
2021-03-12 14:40:32 +01:00
|
|
|
pubkey = 'no_testkey_gki.pem'
|
|
|
|
self.assertFalse(os.path.exists(pubkey))
|
|
|
|
|
|
|
|
# Tests it should raise ExternalError if no key found under
|
|
|
|
# OPTIONS.search_path.
|
|
|
|
search_path_dir = common.MakeTempDir()
|
|
|
|
search_pubkey = os.path.join(search_path_dir, pubkey)
|
|
|
|
self.assertFalse(os.path.exists(search_pubkey))
|
|
|
|
|
|
|
|
common.OPTIONS.search_path = search_path_dir
|
|
|
|
common.OPTIONS.info_dict = {
|
|
|
|
'gki_signing_key_path': pubkey,
|
|
|
|
'gki_signing_algorithm': 'SHA256_RSA4096',
|
|
|
|
'gki_signing_signature_args': '--prop foo:bar',
|
|
|
|
}
|
2022-01-08 15:29:30 +01:00
|
|
|
test_file = tempfile.NamedTemporaryFile()
|
|
|
|
self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
|
2022-02-22 12:51:15 +01:00
|
|
|
test_file.name, 'generic_kernel')
|
2018-02-13 22:54:02 +01:00
|
|
|
|
2018-10-12 06:57:26 +02:00
|
|
|
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
|
2017-12-25 19:43:47 +01:00
|
|
|
"""Checks the format of install-recovery.sh.
|
2017-06-21 02:00:55 +02:00
|
|
|
|
2017-12-25 19:43:47 +01:00
|
|
|
Its format should match between common.py and validate_target_files.py.
|
|
|
|
"""
|
2017-06-21 02:00:55 +02:00
|
|
|
|
|
|
|
def setUp(self):
|
2017-12-25 19:43:47 +01:00
|
|
|
self._tempdir = common.MakeTempDir()
|
2020-07-23 01:21:22 +02:00
|
|
|
# Create a fake dict that contains the fstab info for boot&recovery.
|
2023-04-14 23:32:54 +02:00
|
|
|
self._info = {"fstab" : {}}
|
2020-07-23 01:21:22 +02:00
|
|
|
fake_fstab = [
|
2017-12-25 19:43:47 +01:00
|
|
|
"/dev/soc.0/by-name/boot /boot emmc defaults defaults",
|
|
|
|
"/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
|
2020-07-23 01:21:22 +02:00
|
|
|
self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, fake_fstab)
|
2017-11-07 21:22:58 +01:00
|
|
|
# Construct the gzipped recovery.img and boot.img
|
|
|
|
self.recovery_data = bytearray([
|
|
|
|
0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
|
|
|
|
0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
|
|
|
|
0x08, 0x00, 0x00, 0x00
|
|
|
|
])
|
|
|
|
# echo -n "boot" | gzip -f | hd
|
|
|
|
self.boot_data = bytearray([
|
|
|
|
0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
|
|
|
|
0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
|
|
|
|
])
|
2017-06-21 02:00:55 +02:00
|
|
|
|
|
|
|
def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
|
|
|
|
loc = os.path.join(self._tempdir, prefix, name)
|
|
|
|
if not os.path.exists(os.path.dirname(loc)):
|
|
|
|
os.makedirs(os.path.dirname(loc))
|
2017-12-02 01:19:46 +01:00
|
|
|
with open(loc, "wb") as f:
|
2017-06-21 02:00:55 +02:00
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
def test_full_recovery(self):
|
2017-11-09 00:50:59 +01:00
|
|
|
recovery_image = common.File("recovery.img", self.recovery_data)
|
|
|
|
boot_image = common.File("boot.img", self.boot_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._info["full_recovery_image"] = "true"
|
|
|
|
|
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
|
|
|
|
2019-04-09 09:12:30 +02:00
|
|
|
@test_utils.SkipIfExternalToolsUnavailable()
|
2017-06-21 02:00:55 +02:00
|
|
|
def test_recovery_from_boot(self):
|
2017-11-09 00:50:59 +01:00
|
|
|
recovery_image = common.File("recovery.img", self.recovery_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
|
2017-11-09 00:50:59 +01:00
|
|
|
boot_image = common.File("boot.img", self.boot_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
|
|
|
|
|
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
|
|
|
# Validate 'recovery-from-boot' with bonus argument.
|
2017-12-02 01:19:46 +01:00
|
|
|
self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
|
2017-06-21 02:00:55 +02:00
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
2019-01-18 22:55:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
class MockBlockDifference(object):
|
2017-12-02 01:19:46 +01:00
|
|
|
|
2019-01-18 22:55:25 +01:00
|
|
|
def __init__(self, partition, tgt, src=None):
|
|
|
|
self.partition = partition
|
|
|
|
self.tgt = tgt
|
|
|
|
self.src = src
|
2017-12-02 01:19:46 +01:00
|
|
|
|
2019-01-18 22:55:25 +01:00
|
|
|
def WriteScript(self, script, _, progress=None,
|
|
|
|
write_verify_script=False):
|
|
|
|
if progress:
|
|
|
|
script.AppendExtra("progress({})".format(progress))
|
|
|
|
script.AppendExtra("patch({});".format(self.partition))
|
|
|
|
if write_verify_script:
|
|
|
|
self.WritePostInstallVerifyScript(script)
|
2017-12-02 01:19:46 +01:00
|
|
|
|
2019-01-18 22:55:25 +01:00
|
|
|
def WritePostInstallVerifyScript(self, script):
|
|
|
|
script.AppendExtra("verify({});".format(self.partition))
|
|
|
|
|
|
|
|
|
|
|
|
class FakeSparseImage(object):
|
2017-12-02 01:19:46 +01:00
|
|
|
|
2019-01-18 22:55:25 +01:00
|
|
|
def __init__(self, size):
|
|
|
|
self.blocksize = 4096
|
|
|
|
self.total_blocks = size // 4096
|
|
|
|
assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
|
|
|
|
|
|
|
|
|
|
|
|
class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
|
2017-12-02 01:19:46 +01:00
|
|
|
|
2019-01-18 22:55:25 +01:00
|
|
|
@staticmethod
|
|
|
|
def get_op_list(output_path):
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
|
2017-12-02 01:19:46 +01:00
|
|
|
with output_zip.open('dynamic_partitions_op_list') as op_list:
|
|
|
|
return [line.decode().strip() for line in op_list.readlines()
|
|
|
|
if not line.startswith(b'#')]
|
2019-01-18 22:55:25 +01:00
|
|
|
|
|
|
|
def setUp(self):
|
2019-10-08 05:00:34 +02:00
|
|
|
self.script = test_utils.MockScriptWriter()
|
2019-01-18 22:55:25 +01:00
|
|
|
self.output_path = common.MakeTempFile(suffix='.zip')
|
|
|
|
|
|
|
|
def test_full(self):
|
|
|
|
target_info = common.LoadDictionaryFromLines("""
|
|
|
|
dynamic_partition_list=system vendor
|
|
|
|
super_partition_groups=group_foo
|
|
|
|
super_group_foo_group_size={group_size}
|
|
|
|
super_group_foo_partition_list=system vendor
|
|
|
|
""".format(group_size=4 * GiB).split("\n"))
|
|
|
|
block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
|
|
|
|
MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
|
|
|
|
|
|
|
|
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
|
2019-01-18 22:55:25 +01:00
|
|
|
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
|
|
|
|
|
|
|
|
self.assertEqual(str(self.script).strip(), """
|
|
|
|
assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
|
|
|
|
patch(system);
|
|
|
|
verify(system);
|
|
|
|
unmap_partition("system");
|
2019-06-18 21:10:14 +02:00
|
|
|
patch(vendor);
|
|
|
|
verify(vendor);
|
|
|
|
unmap_partition("vendor");
|
2019-01-18 22:55:25 +01:00
|
|
|
""".strip())
|
|
|
|
|
|
|
|
lines = self.get_op_list(self.output_path)
|
|
|
|
|
|
|
|
remove_all_groups = lines.index("remove_all_groups")
|
|
|
|
add_group = lines.index("add_group group_foo 4294967296")
|
|
|
|
add_vendor = lines.index("add vendor group_foo")
|
|
|
|
add_system = lines.index("add system group_foo")
|
|
|
|
resize_vendor = lines.index("resize vendor 1073741824")
|
|
|
|
resize_system = lines.index("resize system 3221225472")
|
|
|
|
|
|
|
|
self.assertLess(remove_all_groups, add_group,
|
|
|
|
"Should add groups after removing all groups")
|
|
|
|
self.assertLess(add_group, min(add_vendor, add_system),
|
|
|
|
"Should add partitions after adding group")
|
|
|
|
self.assertLess(add_system, resize_system,
|
|
|
|
"Should resize system after adding it")
|
|
|
|
self.assertLess(add_vendor, resize_vendor,
|
|
|
|
"Should resize vendor after adding it")
|
|
|
|
|
|
|
|
def test_inc_groups(self):
|
|
|
|
source_info = common.LoadDictionaryFromLines("""
|
|
|
|
super_partition_groups=group_foo group_bar group_baz
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
|
|
|
super_group_bar_group_size={group_bar_size}
|
|
|
|
""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
|
|
|
|
target_info = common.LoadDictionaryFromLines("""
|
|
|
|
super_partition_groups=group_foo group_baz group_qux
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
|
|
|
super_group_baz_group_size={group_baz_size}
|
|
|
|
super_group_qux_group_size={group_qux_size}
|
|
|
|
""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
|
|
|
|
group_qux_size=1 * GiB).split("\n"))
|
|
|
|
|
|
|
|
dp_diff = common.DynamicPartitionsDifference(target_info,
|
|
|
|
block_diffs=[],
|
|
|
|
source_info_dict=source_info)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
|
2019-01-18 22:55:25 +01:00
|
|
|
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
|
|
|
|
|
|
|
|
lines = self.get_op_list(self.output_path)
|
|
|
|
|
|
|
|
removed = lines.index("remove_group group_bar")
|
|
|
|
shrunk = lines.index("resize_group group_foo 3221225472")
|
|
|
|
grown = lines.index("resize_group group_baz 4294967296")
|
|
|
|
added = lines.index("add_group group_qux 1073741824")
|
|
|
|
|
2019-06-18 21:10:14 +02:00
|
|
|
self.assertLess(max(removed, shrunk),
|
|
|
|
min(grown, added),
|
2019-01-18 22:55:25 +01:00
|
|
|
"ops that remove / shrink partitions must precede ops that "
|
|
|
|
"grow / add partitions")
|
|
|
|
|
2019-01-25 21:30:58 +01:00
|
|
|
def test_incremental(self):
|
2019-01-18 22:55:25 +01:00
|
|
|
source_info = common.LoadDictionaryFromLines("""
|
2019-06-25 08:58:13 +02:00
|
|
|
dynamic_partition_list=system vendor product system_ext
|
2019-01-18 22:55:25 +01:00
|
|
|
super_partition_groups=group_foo
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
2019-06-25 08:58:13 +02:00
|
|
|
super_group_foo_partition_list=system vendor product system_ext
|
2019-01-18 22:55:25 +01:00
|
|
|
""".format(group_foo_size=4 * GiB).split("\n"))
|
|
|
|
target_info = common.LoadDictionaryFromLines("""
|
|
|
|
dynamic_partition_list=system vendor product odm
|
|
|
|
super_partition_groups=group_foo group_bar
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
|
|
|
super_group_foo_partition_list=system vendor odm
|
|
|
|
super_group_bar_group_size={group_bar_size}
|
|
|
|
super_group_bar_partition_list=product
|
|
|
|
""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
|
|
|
|
|
|
|
|
block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
|
|
|
|
src=FakeSparseImage(1024 * MiB)),
|
|
|
|
MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
|
|
|
|
src=FakeSparseImage(1024 * MiB)),
|
|
|
|
MockBlockDifference("product", FakeSparseImage(1024 * MiB),
|
|
|
|
src=FakeSparseImage(1024 * MiB)),
|
2019-06-25 08:58:13 +02:00
|
|
|
MockBlockDifference("system_ext", None,
|
2019-01-18 22:55:25 +01:00
|
|
|
src=FakeSparseImage(1024 * MiB)),
|
|
|
|
MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
|
|
|
|
src=None)]
|
|
|
|
|
|
|
|
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
|
|
|
|
source_info_dict=source_info)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
|
2019-01-18 22:55:25 +01:00
|
|
|
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
|
|
|
|
|
|
|
|
metadata_idx = self.script.lines.index(
|
|
|
|
'assert(update_dynamic_partitions(package_extract_file('
|
|
|
|
'"dynamic_partitions_op_list")));')
|
|
|
|
self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
|
|
|
|
self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
|
|
|
|
for p in ("product", "system", "odm"):
|
|
|
|
patch_idx = self.script.lines.index("patch({});".format(p))
|
|
|
|
verify_idx = self.script.lines.index("verify({});".format(p))
|
|
|
|
self.assertLess(metadata_idx, patch_idx,
|
|
|
|
"Should patch {} after updating metadata".format(p))
|
|
|
|
self.assertLess(patch_idx, verify_idx,
|
|
|
|
"Should verify {} after patching".format(p))
|
|
|
|
|
2019-06-25 08:58:13 +02:00
|
|
|
self.assertNotIn("patch(system_ext);", self.script.lines)
|
2019-01-18 22:55:25 +01:00
|
|
|
|
|
|
|
lines = self.get_op_list(self.output_path)
|
|
|
|
|
2019-06-25 08:58:13 +02:00
|
|
|
remove = lines.index("remove system_ext")
|
2019-01-18 22:55:25 +01:00
|
|
|
move_product_out = lines.index("move product default")
|
|
|
|
shrink = lines.index("resize vendor 536870912")
|
|
|
|
shrink_group = lines.index("resize_group group_foo 3221225472")
|
|
|
|
add_group_bar = lines.index("add_group group_bar 1073741824")
|
|
|
|
add_odm = lines.index("add odm group_foo")
|
|
|
|
grow_existing = lines.index("resize system 1610612736")
|
|
|
|
grow_added = lines.index("resize odm 1073741824")
|
|
|
|
move_product_in = lines.index("move product group_bar")
|
|
|
|
|
|
|
|
max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
|
|
|
|
min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
|
|
|
|
|
|
|
|
self.assertLess(max_idx_move_partition_out_foo, shrink_group,
|
|
|
|
"Must shrink group after partitions inside group are shrunk"
|
|
|
|
" / removed")
|
|
|
|
|
|
|
|
self.assertLess(add_group_bar, move_product_in,
|
|
|
|
"Must add partitions to group after group is added")
|
|
|
|
|
|
|
|
self.assertLess(max_idx_move_partition_out_foo,
|
|
|
|
min_idx_move_partition_in_foo,
|
|
|
|
"Must shrink partitions / remove partitions from group"
|
|
|
|
"before adding / moving partitions into group")
|
2019-01-25 21:30:58 +01:00
|
|
|
|
|
|
|
def test_remove_partition(self):
|
|
|
|
source_info = common.LoadDictionaryFromLines("""
|
|
|
|
blockimgdiff_versions=3,4
|
|
|
|
use_dynamic_partitions=true
|
|
|
|
dynamic_partition_list=foo
|
|
|
|
super_partition_groups=group_foo
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
|
|
|
super_group_foo_partition_list=foo
|
|
|
|
""".format(group_foo_size=4 * GiB).split("\n"))
|
|
|
|
target_info = common.LoadDictionaryFromLines("""
|
|
|
|
blockimgdiff_versions=3,4
|
|
|
|
use_dynamic_partitions=true
|
|
|
|
super_partition_groups=group_foo
|
|
|
|
super_group_foo_group_size={group_foo_size}
|
|
|
|
""".format(group_foo_size=4 * GiB).split("\n"))
|
|
|
|
|
|
|
|
common.OPTIONS.info_dict = target_info
|
|
|
|
common.OPTIONS.target_info_dict = target_info
|
|
|
|
common.OPTIONS.source_info_dict = source_info
|
|
|
|
common.OPTIONS.cache_size = 4 * 4096
|
|
|
|
|
|
|
|
block_diffs = [common.BlockDifference("foo", EmptyImage(),
|
|
|
|
src=DataImage("source", pad=True))]
|
|
|
|
|
|
|
|
dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
|
|
|
|
source_info_dict=source_info)
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
|
2019-01-25 21:30:58 +01:00
|
|
|
dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
|
|
|
|
|
|
|
|
self.assertNotIn("block_image_update", str(self.script),
|
2019-03-15 18:44:43 +01:00
|
|
|
"Removed partition should not be patched.")
|
2019-01-25 21:30:58 +01:00
|
|
|
|
|
|
|
lines = self.get_op_list(self.output_path)
|
|
|
|
self.assertEqual(lines, ["remove foo"])
|
2020-05-09 07:24:18 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PartitionBuildPropsTest(test_utils.ReleaseToolsTestCase):
|
|
|
|
def setUp(self):
|
2020-05-10 23:48:15 +02:00
|
|
|
self.odm_build_prop = [
|
2020-05-09 07:24:18 +02:00
|
|
|
'ro.odm.build.date.utc=1578430045',
|
|
|
|
'ro.odm.build.fingerprint='
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
'import /odm/etc/build_${ro.boot.product.device_name}.prop',
|
|
|
|
]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _BuildZipFile(entries):
|
|
|
|
input_file = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
|
2020-05-09 07:24:18 +02:00
|
|
|
for name, content in entries.items():
|
|
|
|
input_zip.writestr(name, content)
|
|
|
|
|
|
|
|
return input_file
|
|
|
|
|
|
|
|
def test_parseBuildProps_noImportStatement(self):
|
|
|
|
build_prop = [
|
2020-05-10 23:48:15 +02:00
|
|
|
'ro.odm.build.date.utc=1578430045',
|
|
|
|
'ro.odm.build.fingerprint='
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device=coral',
|
2020-05-09 07:24:18 +02:00
|
|
|
]
|
|
|
|
input_file = self._BuildZipFile({
|
2020-05-10 23:48:15 +02:00
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
2020-05-09 07:24:18 +02:00
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': ['std', 'pro']
|
|
|
|
}
|
2020-05-09 07:24:18 +02:00
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
2020-05-10 23:48:15 +02:00
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
self.assertEqual({
|
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coral',
|
|
|
|
}, partition_props.build_props)
|
|
|
|
|
|
|
|
self.assertEqual(set(), partition_props.prop_overrides)
|
|
|
|
|
|
|
|
def test_parseBuildProps_singleImportStatement(self):
|
|
|
|
build_std_prop = [
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
'ro.product.odm.name=product1',
|
|
|
|
]
|
|
|
|
build_pro_prop = [
|
|
|
|
'ro.product.odm.device=coralpro',
|
|
|
|
'ro.product.odm.name=product2',
|
|
|
|
]
|
|
|
|
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(self.odm_build_prop),
|
|
|
|
'ODM/etc/build_std.prop': '\n'.join(build_std_prop),
|
|
|
|
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
|
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'std'
|
|
|
|
}
|
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm', placeholder_values)
|
2020-05-09 07:24:18 +02:00
|
|
|
|
|
|
|
self.assertEqual({
|
2023-04-14 23:32:54 +02:00
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coral',
|
|
|
|
'ro.product.odm.name': 'product1',
|
2020-05-10 23:48:15 +02:00
|
|
|
}, partition_props.build_props)
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'pro'
|
|
|
|
}
|
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
self.assertEqual({
|
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coralpro',
|
|
|
|
'ro.product.odm.name': 'product2',
|
|
|
|
}, partition_props.build_props)
|
|
|
|
|
|
|
|
def test_parseBuildProps_noPlaceHolders(self):
|
|
|
|
build_prop = copy.copy(self.odm_build_prop)
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm')
|
|
|
|
|
|
|
|
self.assertEqual({
|
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coral',
|
|
|
|
}, partition_props.build_props)
|
|
|
|
|
|
|
|
self.assertEqual(set(), partition_props.prop_overrides)
|
|
|
|
|
|
|
|
def test_parseBuildProps_multipleImportStatements(self):
|
|
|
|
build_prop = copy.deepcopy(self.odm_build_prop)
|
|
|
|
build_prop.append(
|
|
|
|
'import /odm/etc/build_${ro.boot.product.product_name}.prop')
|
|
|
|
|
|
|
|
build_std_prop = [
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
]
|
|
|
|
build_pro_prop = [
|
|
|
|
'ro.product.odm.device=coralpro',
|
|
|
|
]
|
|
|
|
|
|
|
|
product1_prop = [
|
|
|
|
'ro.product.odm.name=product1',
|
|
|
|
'ro.product.not_care=not_care',
|
|
|
|
]
|
|
|
|
|
|
|
|
product2_prop = [
|
|
|
|
'ro.product.odm.name=product2',
|
|
|
|
'ro.product.not_care=not_care',
|
|
|
|
]
|
|
|
|
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
|
|
|
'ODM/etc/build_std.prop': '\n'.join(build_std_prop),
|
|
|
|
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
|
|
|
|
'ODM/etc/build_product1.prop': '\n'.join(product1_prop),
|
|
|
|
'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
|
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'std',
|
|
|
|
'ro.boot.product.product_name': 'product1',
|
|
|
|
'ro.boot.product.not_care': 'not_care',
|
|
|
|
}
|
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
self.assertEqual({
|
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coral',
|
|
|
|
'ro.product.odm.name': 'product1'
|
2020-05-09 07:24:18 +02:00
|
|
|
}, partition_props.build_props)
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'pro',
|
|
|
|
'ro.boot.product.product_name': 'product2',
|
|
|
|
'ro.boot.product.not_care': 'not_care',
|
|
|
|
}
|
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
self.assertEqual({
|
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coralpro',
|
|
|
|
'ro.product.odm.name': 'product2'
|
|
|
|
}, partition_props.build_props)
|
|
|
|
|
|
|
|
def test_parseBuildProps_defineAfterOverride(self):
|
|
|
|
build_prop = copy.deepcopy(self.odm_build_prop)
|
|
|
|
build_prop.append('ro.product.odm.device=coral')
|
|
|
|
|
|
|
|
build_std_prop = [
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
]
|
|
|
|
build_pro_prop = [
|
|
|
|
'ro.product.odm.device=coralpro',
|
|
|
|
]
|
|
|
|
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
|
|
|
'ODM/etc/build_std.prop': '\n'.join(build_std_prop),
|
|
|
|
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
|
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'std',
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertRaises(ValueError, common.PartitionBuildProps.FromInputFile,
|
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
def test_parseBuildProps_duplicateOverride(self):
|
|
|
|
build_prop = copy.deepcopy(self.odm_build_prop)
|
|
|
|
build_prop.append(
|
|
|
|
'import /odm/etc/build_${ro.boot.product.product_name}.prop')
|
|
|
|
|
|
|
|
build_std_prop = [
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
'ro.product.odm.name=product1',
|
|
|
|
]
|
|
|
|
build_pro_prop = [
|
|
|
|
'ro.product.odm.device=coralpro',
|
|
|
|
]
|
|
|
|
|
|
|
|
product1_prop = [
|
|
|
|
'ro.product.odm.name=product1',
|
|
|
|
]
|
|
|
|
|
|
|
|
product2_prop = [
|
|
|
|
'ro.product.odm.name=product2',
|
|
|
|
]
|
|
|
|
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
|
|
|
'ODM/etc/build_std.prop': '\n'.join(build_std_prop),
|
|
|
|
'ODM/etc/build_pro.prop': '\n'.join(build_pro_prop),
|
|
|
|
'ODM/etc/build_product1.prop': '\n'.join(product1_prop),
|
|
|
|
'ODM/etc/build_product2.prop': '\n'.join(product2_prop),
|
|
|
|
})
|
|
|
|
|
2020-09-22 22:15:57 +02:00
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
2020-05-10 23:48:15 +02:00
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': 'std',
|
|
|
|
'ro.boot.product.product_name': 'product1',
|
|
|
|
}
|
|
|
|
self.assertRaises(ValueError, common.PartitionBuildProps.FromInputFile,
|
|
|
|
input_zip, 'odm', placeholder_values)
|
2022-10-18 05:50:16 +02:00
|
|
|
|
|
|
|
def test_partitionBuildProps_fromInputFile_deepcopy(self):
|
|
|
|
build_prop = [
|
|
|
|
'ro.odm.build.date.utc=1578430045',
|
|
|
|
'ro.odm.build.fingerprint='
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device=coral',
|
|
|
|
]
|
|
|
|
input_file = self._BuildZipFile({
|
|
|
|
'ODM/etc/build.prop': '\n'.join(build_prop),
|
|
|
|
})
|
|
|
|
|
|
|
|
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
|
|
|
|
placeholder_values = {
|
|
|
|
'ro.boot.product.device_name': ['std', 'pro']
|
|
|
|
}
|
|
|
|
partition_props = common.PartitionBuildProps.FromInputFile(
|
|
|
|
input_zip, 'odm', placeholder_values)
|
|
|
|
|
|
|
|
copied_props = copy.deepcopy(partition_props)
|
|
|
|
self.assertEqual({
|
2023-04-14 23:32:54 +02:00
|
|
|
'ro.odm.build.date.utc': '1578430045',
|
|
|
|
'ro.odm.build.fingerprint':
|
|
|
|
'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
|
|
|
|
'ro.product.odm.device': 'coral',
|
2022-10-18 05:50:16 +02:00
|
|
|
}, copied_props.build_props)
|