2015-01-28 00:53:15 +01:00
|
|
|
#
|
|
|
|
# Copyright (C) 2015 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
#
|
|
|
|
import os
|
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import unittest
|
|
|
|
import zipfile
|
2017-11-09 00:50:59 +01:00
|
|
|
from hashlib import sha1
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
import common
|
2017-06-21 02:00:55 +02:00
|
|
|
import validate_target_files
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
KiB = 1024
|
|
|
|
MiB = 1024 * KiB
|
|
|
|
GiB = 1024 * MiB
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2017-12-25 19:43:47 +01:00
|
|
|
|
2015-04-01 20:21:55 +02:00
|
|
|
def get_2gb_string():
|
2017-11-09 00:50:59 +01:00
|
|
|
size = int(2 * GiB + 1)
|
|
|
|
block_size = 4 * KiB
|
|
|
|
step_size = 4 * MiB
|
|
|
|
# Generate a long string with holes, e.g. 'xyz\x00abc\x00...'.
|
|
|
|
for _ in range(0, size, step_size):
|
|
|
|
yield os.urandom(block_size)
|
|
|
|
yield '\0' * (step_size - block_size)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
|
|
|
|
class CommonZipTest(unittest.TestCase):
|
2017-11-09 00:50:59 +01:00
|
|
|
def _verify(self, zip_file, zip_file_name, arcname, expected_hash,
|
2015-04-01 20:21:55 +02:00
|
|
|
test_file_name=None, expected_stat=None, expected_mode=0o644,
|
|
|
|
expected_compress_type=zipfile.ZIP_STORED):
|
|
|
|
# Verify the stat if present.
|
|
|
|
if test_file_name is not None:
|
|
|
|
new_stat = os.stat(test_file_name)
|
|
|
|
self.assertEqual(int(expected_stat.st_mode), int(new_stat.st_mode))
|
|
|
|
self.assertEqual(int(expected_stat.st_mtime), int(new_stat.st_mtime))
|
|
|
|
|
|
|
|
# Reopen the zip file to verify.
|
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "r")
|
|
|
|
|
|
|
|
# Verify the timestamp.
|
|
|
|
info = zip_file.getinfo(arcname)
|
|
|
|
self.assertEqual(info.date_time, (2009, 1, 1, 0, 0, 0))
|
|
|
|
|
|
|
|
# Verify the file mode.
|
|
|
|
mode = (info.external_attr >> 16) & 0o777
|
|
|
|
self.assertEqual(mode, expected_mode)
|
|
|
|
|
|
|
|
# Verify the compress type.
|
|
|
|
self.assertEqual(info.compress_type, expected_compress_type)
|
|
|
|
|
|
|
|
# Verify the zip contents.
|
2017-11-09 00:50:59 +01:00
|
|
|
entry = zip_file.open(arcname)
|
|
|
|
sha1_hash = sha1()
|
|
|
|
for chunk in iter(lambda: entry.read(4 * MiB), ''):
|
|
|
|
sha1_hash.update(chunk)
|
|
|
|
self.assertEqual(expected_hash, sha1_hash.hexdigest())
|
2015-04-01 20:21:55 +02:00
|
|
|
self.assertIsNone(zip_file.testzip())
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
def _test_ZipWrite(self, contents, extra_zipwrite_args=None):
|
|
|
|
extra_zipwrite_args = dict(extra_zipwrite_args or {})
|
|
|
|
|
|
|
|
test_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
test_file_name = test_file.name
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
2015-01-28 00:53:15 +01:00
|
|
|
zip_file_name = zip_file.name
|
|
|
|
|
|
|
|
# File names within an archive strip the leading slash.
|
|
|
|
arcname = extra_zipwrite_args.get("arcname", test_file_name)
|
|
|
|
if arcname[0] == "/":
|
|
|
|
arcname = arcname[1:]
|
|
|
|
|
|
|
|
zip_file.close()
|
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w")
|
|
|
|
|
|
|
|
try:
|
2017-11-09 00:50:59 +01:00
|
|
|
sha1_hash = sha1()
|
|
|
|
for data in contents:
|
|
|
|
sha1_hash.update(data)
|
|
|
|
test_file.write(data)
|
2015-01-28 00:53:15 +01:00
|
|
|
test_file.close()
|
|
|
|
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_stat = os.stat(test_file_name)
|
2015-01-28 00:53:15 +01:00
|
|
|
expected_mode = extra_zipwrite_args.get("perms", 0o644)
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type = extra_zipwrite_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
2015-01-28 00:53:15 +01:00
|
|
|
time.sleep(5) # Make sure the atime/mtime will change measurably.
|
|
|
|
|
|
|
|
common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
|
2015-04-01 20:21:55 +02:00
|
|
|
common.ZipClose(zip_file)
|
2015-01-28 00:53:15 +01:00
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
|
|
|
|
test_file_name, expected_stat, expected_mode,
|
|
|
|
expected_compress_type)
|
2015-01-28 00:53:15 +01:00
|
|
|
finally:
|
|
|
|
os.remove(test_file_name)
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
|
2015-04-01 20:21:55 +02:00
|
|
|
def _test_ZipWriteStr(self, zinfo_or_arcname, contents, extra_args=None):
|
|
|
|
extra_args = dict(extra_args or {})
|
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
zip_file.close()
|
|
|
|
|
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w")
|
|
|
|
|
|
|
|
try:
|
|
|
|
expected_compress_type = extra_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
|
|
|
time.sleep(5) # Make sure the atime/mtime will change measurably.
|
|
|
|
|
|
|
|
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
|
2015-05-20 18:32:18 +02:00
|
|
|
arcname = zinfo_or_arcname
|
|
|
|
expected_mode = extra_args.get("perms", 0o644)
|
2015-04-01 20:21:55 +02:00
|
|
|
else:
|
2015-05-20 18:32:18 +02:00
|
|
|
arcname = zinfo_or_arcname.filename
|
|
|
|
expected_mode = extra_args.get("perms",
|
|
|
|
zinfo_or_arcname.external_attr >> 16)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
2015-05-20 18:32:18 +02:00
|
|
|
common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
|
2015-04-01 20:21:55 +02:00
|
|
|
common.ZipClose(zip_file)
|
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=expected_mode,
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type=expected_compress_type)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
|
|
|
|
def _test_ZipWriteStr_large_file(self, large, small, extra_args=None):
|
|
|
|
extra_args = dict(extra_args or {})
|
|
|
|
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
|
|
|
|
test_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
test_file_name = test_file.name
|
|
|
|
|
|
|
|
arcname_large = test_file_name
|
|
|
|
arcname_small = "bar"
|
|
|
|
|
|
|
|
# File names within an archive strip the leading slash.
|
|
|
|
if arcname_large[0] == "/":
|
|
|
|
arcname_large = arcname_large[1:]
|
|
|
|
|
|
|
|
zip_file.close()
|
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w")
|
|
|
|
|
|
|
|
try:
|
2017-11-09 00:50:59 +01:00
|
|
|
sha1_hash = sha1()
|
|
|
|
for data in large:
|
|
|
|
sha1_hash.update(data)
|
|
|
|
test_file.write(data)
|
2015-04-01 20:21:55 +02:00
|
|
|
test_file.close()
|
|
|
|
|
|
|
|
expected_stat = os.stat(test_file_name)
|
|
|
|
expected_mode = 0o644
|
|
|
|
expected_compress_type = extra_args.get("compress_type",
|
|
|
|
zipfile.ZIP_STORED)
|
|
|
|
time.sleep(5) # Make sure the atime/mtime will change measurably.
|
|
|
|
|
|
|
|
common.ZipWrite(zip_file, test_file_name, **extra_args)
|
|
|
|
common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
|
|
|
|
common.ZipClose(zip_file)
|
|
|
|
|
|
|
|
# Verify the contents written by ZipWrite().
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname_large,
|
|
|
|
sha1_hash.hexdigest(), test_file_name, expected_stat,
|
|
|
|
expected_mode, expected_compress_type)
|
2015-04-01 20:21:55 +02:00
|
|
|
|
|
|
|
# Verify the contents written by ZipWriteStr().
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, arcname_small,
|
|
|
|
sha1(small).hexdigest(),
|
2015-04-01 20:21:55 +02:00
|
|
|
expected_compress_type=expected_compress_type)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
|
|
|
os.remove(test_file_name)
|
|
|
|
|
|
|
|
def _test_reset_ZIP64_LIMIT(self, func, *args):
|
|
|
|
default_limit = (1 << 31) - 1
|
|
|
|
self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
|
|
|
|
func(*args)
|
|
|
|
self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
|
|
|
|
|
2015-01-28 00:53:15 +01:00
|
|
|
def test_ZipWrite(self):
|
|
|
|
file_contents = os.urandom(1024)
|
|
|
|
self._test_ZipWrite(file_contents)
|
|
|
|
|
|
|
|
def test_ZipWrite_with_opts(self):
|
|
|
|
file_contents = os.urandom(1024)
|
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"arcname": "foobar",
|
|
|
|
"perms": 0o777,
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
2015-04-01 20:21:55 +02:00
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"arcname": "foobar",
|
|
|
|
"perms": 0o700,
|
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
2015-01-28 00:53:15 +01:00
|
|
|
|
|
|
|
def test_ZipWrite_large_file(self):
|
2015-04-01 20:21:55 +02:00
|
|
|
file_contents = get_2gb_string()
|
2015-01-28 00:53:15 +01:00
|
|
|
self._test_ZipWrite(file_contents, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_ZipWrite_resets_ZIP64_LIMIT(self):
|
2015-04-01 20:21:55 +02:00
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
|
|
|
|
|
|
|
|
def test_ZipWriteStr(self):
|
|
|
|
random_string = os.urandom(1024)
|
|
|
|
# Passing arcname
|
|
|
|
self._test_ZipWriteStr("foo", random_string)
|
|
|
|
|
|
|
|
# Passing zinfo
|
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string)
|
|
|
|
|
|
|
|
# Timestamp in the zinfo should be overwritten.
|
|
|
|
zinfo.date_time = (2015, 3, 1, 15, 30, 0)
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string)
|
|
|
|
|
|
|
|
def test_ZipWriteStr_with_opts(self):
|
|
|
|
random_string = os.urandom(1024)
|
|
|
|
# Passing arcname
|
|
|
|
self._test_ZipWriteStr("foo", random_string, {
|
2015-05-20 18:32:18 +02:00
|
|
|
"perms": 0o700,
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
2015-05-20 18:32:18 +02:00
|
|
|
self._test_ZipWriteStr("bar", random_string, {
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
|
|
|
|
|
|
|
# Passing zinfo
|
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
self._test_ZipWriteStr(zinfo, random_string, {
|
2015-05-20 18:32:18 +02:00
|
|
|
"perms": 0o600,
|
2015-04-01 20:21:55 +02:00
|
|
|
"compress_type": zipfile.ZIP_STORED,
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_ZipWriteStr_large_file(self):
|
|
|
|
# zipfile.writestr() doesn't work when the str size is over 2GiB even with
|
|
|
|
# the workaround. We will only test the case of writing a string into a
|
|
|
|
# large archive.
|
|
|
|
long_string = get_2gb_string()
|
|
|
|
short_string = os.urandom(1024)
|
|
|
|
self._test_ZipWriteStr_large_file(long_string, short_string, {
|
|
|
|
"compress_type": zipfile.ZIP_DEFLATED,
|
|
|
|
})
|
|
|
|
|
|
|
|
def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
|
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, "foo", "")
|
|
|
|
zinfo = zipfile.ZipInfo(filename="foo")
|
|
|
|
self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, "")
|
2015-05-20 18:32:18 +02:00
|
|
|
|
|
|
|
def test_bug21309935(self):
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False)
|
|
|
|
zip_file_name = zip_file.name
|
|
|
|
zip_file.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
random_string = os.urandom(1024)
|
|
|
|
zip_file = zipfile.ZipFile(zip_file_name, "w")
|
|
|
|
# Default perms should be 0o644 when passing the filename.
|
|
|
|
common.ZipWriteStr(zip_file, "foo", random_string)
|
|
|
|
# Honor the specified perms.
|
|
|
|
common.ZipWriteStr(zip_file, "bar", random_string, perms=0o755)
|
|
|
|
# The perms in zinfo should be untouched.
|
|
|
|
zinfo = zipfile.ZipInfo(filename="baz")
|
|
|
|
zinfo.external_attr = 0o740 << 16
|
|
|
|
common.ZipWriteStr(zip_file, zinfo, random_string)
|
|
|
|
# Explicitly specified perms has the priority.
|
|
|
|
zinfo = zipfile.ZipInfo(filename="qux")
|
|
|
|
zinfo.external_attr = 0o700 << 16
|
|
|
|
common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
|
|
|
|
common.ZipClose(zip_file)
|
|
|
|
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "foo",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o644)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "bar",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o755)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "baz",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o740)
|
2017-11-09 00:50:59 +01:00
|
|
|
self._verify(zip_file, zip_file_name, "qux",
|
|
|
|
sha1(random_string).hexdigest(),
|
2015-05-20 18:32:18 +02:00
|
|
|
expected_mode=0o400)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file_name)
|
2017-06-21 02:00:55 +02:00
|
|
|
|
2017-12-15 02:05:33 +01:00
|
|
|
def test_ZipDelete(self):
|
|
|
|
zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
|
|
|
|
output_zip = zipfile.ZipFile(zip_file.name, 'w',
|
|
|
|
compression=zipfile.ZIP_DEFLATED)
|
|
|
|
with tempfile.NamedTemporaryFile() as entry_file:
|
|
|
|
entry_file.write(os.urandom(1024))
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
|
|
|
|
common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
|
|
|
|
common.ZipClose(output_zip)
|
|
|
|
zip_file.close()
|
|
|
|
|
|
|
|
try:
|
|
|
|
common.ZipDelete(zip_file.name, 'Test2')
|
|
|
|
with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
|
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertTrue('Test3' in entries)
|
|
|
|
|
|
|
|
self.assertRaises(AssertionError, common.ZipDelete, zip_file.name,
|
|
|
|
'Test2')
|
|
|
|
with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
|
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertTrue('Test3' in entries)
|
|
|
|
|
|
|
|
common.ZipDelete(zip_file.name, ['Test3'])
|
|
|
|
with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
|
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertTrue('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertFalse('Test3' in entries)
|
|
|
|
|
|
|
|
common.ZipDelete(zip_file.name, ['Test1', 'Test2'])
|
|
|
|
with zipfile.ZipFile(zip_file.name, 'r') as check_zip:
|
|
|
|
entries = check_zip.namelist()
|
|
|
|
self.assertFalse('Test1' in entries)
|
|
|
|
self.assertFalse('Test2' in entries)
|
|
|
|
self.assertFalse('Test3' in entries)
|
|
|
|
finally:
|
|
|
|
os.remove(zip_file.name)
|
|
|
|
|
|
|
|
|
2018-01-05 20:17:34 +01:00
|
|
|
class CommonApkUtilsTest(unittest.TestCase):
|
|
|
|
"""Tests the APK utils related functions."""
|
|
|
|
|
|
|
|
APKCERTS_TXT1 = (
|
|
|
|
'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
|
|
|
|
' private_key="certs/devkey.pk8"\n'
|
|
|
|
'name="Settings.apk"'
|
|
|
|
' certificate="build/target/product/security/platform.x509.pem"'
|
|
|
|
' private_key="build/target/product/security/platform.pk8"\n'
|
|
|
|
'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP1 = {
|
|
|
|
'RecoveryLocalizer.apk' : 'certs/devkey',
|
|
|
|
'Settings.apk' : 'build/target/product/security/platform',
|
|
|
|
'TV.apk' : 'PRESIGNED',
|
|
|
|
}
|
|
|
|
|
|
|
|
APKCERTS_TXT2 = (
|
|
|
|
'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
|
|
|
|
' private_key="certs/compressed1.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
|
|
|
|
' private_key="certs/compressed2.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
|
|
|
|
' private_key="certs/compressed2.pk8" compressed="gz"\n'
|
|
|
|
'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
|
|
|
|
' private_key="certs/compressed3.pk8" compressed="gz"\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP2 = {
|
|
|
|
'Compressed1.apk' : 'certs/compressed1',
|
|
|
|
'Compressed2a.apk' : 'certs/compressed2',
|
|
|
|
'Compressed2b.apk' : 'certs/compressed2',
|
|
|
|
'Compressed3.apk' : 'certs/compressed3',
|
|
|
|
}
|
|
|
|
|
|
|
|
APKCERTS_TXT3 = (
|
|
|
|
'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
|
|
|
|
' private_key="certs/compressed4.pk8" compressed="xz"\n'
|
|
|
|
)
|
|
|
|
|
|
|
|
APKCERTS_CERTMAP3 = {
|
|
|
|
'Compressed4.apk' : 'certs/compressed4',
|
|
|
|
}
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
common.Cleanup()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _write_apkcerts_txt(apkcerts_txt, additional=None):
|
|
|
|
if additional is None:
|
|
|
|
additional = []
|
|
|
|
target_files = common.MakeTempFile(suffix='.zip')
|
|
|
|
with zipfile.ZipFile(target_files, 'w') as target_files_zip:
|
|
|
|
target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
|
|
|
|
for entry in additional:
|
|
|
|
target_files_zip.writestr(entry, '')
|
|
|
|
return target_files
|
|
|
|
|
|
|
|
def test_ReadApkCerts_NoncompressedApks(self):
|
|
|
|
target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
|
|
|
|
self.assertIsNone(ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_CompressedApks(self):
|
|
|
|
# We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
|
|
|
|
# not stored in '.gz' format, so it shouldn't be considered as installed.
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT2,
|
|
|
|
['Compressed1.apk.gz', 'Compressed3.apk'])
|
|
|
|
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
|
|
|
|
self.assertEqual('.gz', ext)
|
|
|
|
|
|
|
|
# Alternative case with '.xz'.
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
|
|
|
|
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
|
|
|
|
self.assertEqual('.xz', ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
|
|
|
|
['Compressed1.apk.gz', 'Compressed3.apk'])
|
|
|
|
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
certmap, ext = common.ReadApkCerts(input_zip)
|
|
|
|
|
|
|
|
certmap_merged = self.APKCERTS_CERTMAP1.copy()
|
|
|
|
certmap_merged.update(self.APKCERTS_CERTMAP2)
|
|
|
|
self.assertDictEqual(certmap_merged, certmap)
|
|
|
|
self.assertEqual('.gz', ext)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_MultipleCompressionMethods(self):
|
|
|
|
target_files = self._write_apkcerts_txt(
|
|
|
|
self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
|
|
|
|
['Compressed1.apk.gz', 'Compressed4.apk.xz'])
|
|
|
|
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
|
|
|
|
|
|
|
|
def test_ReadApkCerts_MismatchingKeys(self):
|
|
|
|
malformed_apkcerts_txt = (
|
|
|
|
'name="App1.apk" certificate="certs/cert1.x509.pem"'
|
|
|
|
' private_key="certs/cert2.pk8"\n'
|
|
|
|
)
|
|
|
|
target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
|
|
|
|
|
|
|
|
with zipfile.ZipFile(target_files, 'r') as input_zip:
|
|
|
|
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
|
|
|
|
|
|
|
|
|
2017-06-21 02:00:55 +02:00
|
|
|
class InstallRecoveryScriptFormatTest(unittest.TestCase):
|
2017-12-25 19:43:47 +01:00
|
|
|
"""Checks the format of install-recovery.sh.
|
2017-06-21 02:00:55 +02:00
|
|
|
|
2017-12-25 19:43:47 +01:00
|
|
|
Its format should match between common.py and validate_target_files.py.
|
|
|
|
"""
|
2017-06-21 02:00:55 +02:00
|
|
|
|
|
|
|
def setUp(self):
|
2017-12-25 19:43:47 +01:00
|
|
|
self._tempdir = common.MakeTempDir()
|
2017-06-21 02:00:55 +02:00
|
|
|
# Create a dummy dict that contains the fstab info for boot&recovery.
|
|
|
|
self._info = {"fstab" : {}}
|
2017-12-25 19:43:47 +01:00
|
|
|
dummy_fstab = [
|
|
|
|
"/dev/soc.0/by-name/boot /boot emmc defaults defaults",
|
|
|
|
"/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
|
2017-11-09 00:50:59 +01:00
|
|
|
self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
|
2017-11-07 21:22:58 +01:00
|
|
|
# Construct the gzipped recovery.img and boot.img
|
|
|
|
self.recovery_data = bytearray([
|
|
|
|
0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
|
|
|
|
0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
|
|
|
|
0x08, 0x00, 0x00, 0x00
|
|
|
|
])
|
|
|
|
# echo -n "boot" | gzip -f | hd
|
|
|
|
self.boot_data = bytearray([
|
|
|
|
0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
|
|
|
|
0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
|
|
|
|
])
|
2017-06-21 02:00:55 +02:00
|
|
|
|
|
|
|
def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
|
|
|
|
loc = os.path.join(self._tempdir, prefix, name)
|
|
|
|
if not os.path.exists(os.path.dirname(loc)):
|
|
|
|
os.makedirs(os.path.dirname(loc))
|
|
|
|
with open(loc, "w+") as f:
|
|
|
|
f.write(data)
|
|
|
|
|
|
|
|
def test_full_recovery(self):
|
2017-11-09 00:50:59 +01:00
|
|
|
recovery_image = common.File("recovery.img", self.recovery_data)
|
|
|
|
boot_image = common.File("boot.img", self.boot_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._info["full_recovery_image"] = "true"
|
|
|
|
|
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
|
|
|
|
|
|
|
def test_recovery_from_boot(self):
|
2017-11-09 00:50:59 +01:00
|
|
|
recovery_image = common.File("recovery.img", self.recovery_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
|
2017-11-09 00:50:59 +01:00
|
|
|
boot_image = common.File("boot.img", self.boot_data)
|
2017-06-21 02:00:55 +02:00
|
|
|
self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
|
|
|
|
|
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
|
|
|
# Validate 'recovery-from-boot' with bonus argument.
|
|
|
|
self._out_tmp_sink("etc/recovery-resource.dat", "bonus", "SYSTEM")
|
|
|
|
common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
|
|
|
|
recovery_image, boot_image, self._info)
|
|
|
|
validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
|
|
|
|
self._info)
|
|
|
|
|
|
|
|
def tearDown(self):
|
2017-12-25 19:43:47 +01:00
|
|
|
common.Cleanup()
|