2019-02-02 00:52:10 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (C) 2019 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
|
|
|
# use this file except in compliance with the License. You may obtain a copy of
|
|
|
|
# the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations under
|
|
|
|
# the License.
|
2019-04-17 23:54:06 +02:00
|
|
|
"""This script merges two partial target files packages.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
One package contains system files, and the other contains non-system files.
|
|
|
|
It produces a complete target files package that can be used to generate an
|
|
|
|
OTA package.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
Usage: merge_target_files.py [args]
|
|
|
|
|
|
|
|
--system-target-files system-target-files-zip-archive
|
|
|
|
The input target files package containing system bits. This is a zip
|
|
|
|
archive.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
--system-item-list system-item-list-file
|
|
|
|
The optional path to a newline-separated config file that replaces the
|
|
|
|
contents of default_system_item_list if provided.
|
|
|
|
|
|
|
|
--system-misc-info-keys system-misc-info-keys-file
|
|
|
|
The optional path to a newline-separated config file that replaces the
|
|
|
|
contents of default_system_misc_info_keys if provided.
|
|
|
|
|
2019-02-02 00:52:10 +01:00
|
|
|
--other-target-files other-target-files-zip-archive
|
|
|
|
The input target files package containing other bits. This is a zip
|
|
|
|
archive.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
--other-item-list other-item-list-file
|
|
|
|
The optional path to a newline-separated config file that replaces the
|
|
|
|
contents of default_other_item_list if provided.
|
|
|
|
|
2019-02-02 00:52:10 +01:00
|
|
|
--output-target-files output-target-files-package
|
2019-04-15 18:47:24 +02:00
|
|
|
If provided, the output merged target files package. Also a zip archive.
|
|
|
|
|
|
|
|
--output-dir output-directory
|
|
|
|
If provided, the destination directory for saving merged files. Requires
|
|
|
|
the --output-item-list flag.
|
|
|
|
Can be provided alongside --output-target-files, or by itself.
|
|
|
|
|
|
|
|
--output-item-list output-item-list-file.
|
|
|
|
The optional path to a newline-separated config file that specifies the
|
|
|
|
file patterns to copy into the --output-dir. Required if providing
|
|
|
|
the --output-dir flag.
|
2019-03-15 22:36:21 +01:00
|
|
|
|
2019-04-17 01:11:35 +02:00
|
|
|
--output-ota output-ota-package
|
|
|
|
The output ota package. This is a zip archive. Use of this flag may
|
|
|
|
require passing the --path common flag; see common.py.
|
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
--output-img output-img-package
|
|
|
|
The output img package, suitable for use with 'fastboot update'. Use of
|
|
|
|
this flag may require passing the --path common flag; see common.py.
|
|
|
|
|
2019-04-15 20:34:56 +02:00
|
|
|
--output-super-empty output-super-empty-image
|
|
|
|
If provided, creates a super_empty.img file from the merged target
|
|
|
|
files package and saves it at this path.
|
|
|
|
|
2019-03-15 22:36:21 +01:00
|
|
|
--rebuild_recovery
|
|
|
|
Rebuild the recovery patch used by non-A/B devices and write it to the
|
|
|
|
system image.
|
2019-03-30 02:27:23 +01:00
|
|
|
|
|
|
|
--keep-tmp
|
|
|
|
Keep tempoary files for debugging purposes.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import fnmatch
|
|
|
|
import logging
|
|
|
|
import os
|
2019-04-15 18:47:24 +02:00
|
|
|
import shutil
|
2019-04-25 23:18:16 +02:00
|
|
|
import subprocess
|
2019-02-02 00:52:10 +01:00
|
|
|
import sys
|
|
|
|
import zipfile
|
|
|
|
|
|
|
|
import add_img_to_target_files
|
2019-04-15 20:34:56 +02:00
|
|
|
import build_super_image
|
|
|
|
import common
|
2019-04-18 21:32:18 +02:00
|
|
|
import img_from_target_files
|
2019-04-17 01:11:35 +02:00
|
|
|
import ota_from_target_files
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
OPTIONS = common.OPTIONS
|
|
|
|
OPTIONS.verbose = True
|
2019-02-20 03:02:46 +01:00
|
|
|
OPTIONS.system_target_files = None
|
2019-03-07 22:01:48 +01:00
|
|
|
OPTIONS.system_item_list = None
|
|
|
|
OPTIONS.system_misc_info_keys = None
|
2019-02-20 03:02:46 +01:00
|
|
|
OPTIONS.other_target_files = None
|
2019-03-07 22:01:48 +01:00
|
|
|
OPTIONS.other_item_list = None
|
2019-02-20 03:02:46 +01:00
|
|
|
OPTIONS.output_target_files = None
|
2019-04-15 18:47:24 +02:00
|
|
|
OPTIONS.output_dir = None
|
|
|
|
OPTIONS.output_item_list = None
|
2019-04-17 01:11:35 +02:00
|
|
|
OPTIONS.output_ota = None
|
2019-04-18 21:32:18 +02:00
|
|
|
OPTIONS.output_img = None
|
2019-04-15 20:34:56 +02:00
|
|
|
OPTIONS.output_super_empty = None
|
2019-03-15 22:36:21 +01:00
|
|
|
OPTIONS.rebuild_recovery = False
|
2019-02-20 03:02:46 +01:00
|
|
|
OPTIONS.keep_tmp = False
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
# default_system_item_list is a list of items to extract from the partial
|
2019-02-02 00:52:10 +01:00
|
|
|
# system target files package as is, meaning these items will land in the
|
|
|
|
# output target files package exactly as they appear in the input partial
|
|
|
|
# system target files package.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
default_system_item_list = [
|
2019-02-02 00:52:10 +01:00
|
|
|
'META/apkcerts.txt',
|
|
|
|
'META/filesystem_config.txt',
|
|
|
|
'META/root_filesystem_config.txt',
|
|
|
|
'META/system_manifest.xml',
|
|
|
|
'META/system_matrix.xml',
|
|
|
|
'META/update_engine_config.txt',
|
|
|
|
'PRODUCT/*',
|
|
|
|
'ROOT/*',
|
|
|
|
'SYSTEM/*',
|
|
|
|
]
|
|
|
|
|
|
|
|
# system_extract_special_item_list is a list of items to extract from the
|
|
|
|
# partial system target files package that need some special processing, such
|
|
|
|
# as some sort of combination with items from the partial other target files
|
|
|
|
# package.
|
|
|
|
|
|
|
|
system_extract_special_item_list = [
|
|
|
|
'META/*',
|
|
|
|
]
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
# default_system_misc_info_keys is a list of keys to obtain from the system
|
|
|
|
# instance of META/misc_info.txt. The remaining keys from the other instance.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
default_system_misc_info_keys = [
|
2019-02-02 00:52:10 +01:00
|
|
|
'avb_system_hashtree_enable',
|
|
|
|
'avb_system_add_hashtree_footer_args',
|
|
|
|
'avb_system_key_path',
|
|
|
|
'avb_system_algorithm',
|
|
|
|
'avb_system_rollback_index_location',
|
|
|
|
'avb_product_hashtree_enable',
|
|
|
|
'avb_product_add_hashtree_footer_args',
|
|
|
|
'avb_product_services_hashtree_enable',
|
|
|
|
'avb_product_services_add_hashtree_footer_args',
|
|
|
|
'system_root_image',
|
|
|
|
'root_dir',
|
|
|
|
'ab_update',
|
|
|
|
'default_system_dev_certificate',
|
|
|
|
'system_size',
|
|
|
|
]
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
# default_other_item_list is a list of items to extract from the partial
|
2019-02-02 00:52:10 +01:00
|
|
|
# other target files package as is, meaning these items will land in the output
|
|
|
|
# target files package exactly as they appear in the input partial other target
|
|
|
|
# files package.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
default_other_item_list = [
|
2019-02-02 00:52:10 +01:00
|
|
|
'META/boot_filesystem_config.txt',
|
|
|
|
'META/otakeys.txt',
|
|
|
|
'META/releasetools.py',
|
|
|
|
'META/vendor_filesystem_config.txt',
|
|
|
|
'META/vendor_manifest.xml',
|
|
|
|
'META/vendor_matrix.xml',
|
|
|
|
'BOOT/*',
|
|
|
|
'DATA/*',
|
|
|
|
'ODM/*',
|
|
|
|
'OTA/android-info.txt',
|
|
|
|
'PREBUILT_IMAGES/*',
|
|
|
|
'RADIO/*',
|
|
|
|
'VENDOR/*',
|
|
|
|
]
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
# other_extract_special_item_list is a list of items to extract from the
|
2019-02-02 00:52:10 +01:00
|
|
|
# partial other target files package that need some special processing, such as
|
|
|
|
# some sort of combination with items from the partial system target files
|
|
|
|
# package.
|
|
|
|
|
|
|
|
other_extract_special_item_list = [
|
|
|
|
'META/*',
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def extract_items(target_files, target_files_temp_dir, extract_item_list):
|
|
|
|
"""Extract items from target files to temporary directory.
|
|
|
|
|
|
|
|
This function extracts from the specified target files zip archive into the
|
|
|
|
specified temporary directory, the items specified in the extract item list.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
target_files: The target files zip archive from which to extract items.
|
|
|
|
target_files_temp_dir: The temporary directory where the extracted items
|
2019-04-17 23:54:06 +02:00
|
|
|
will land.
|
2019-02-02 00:52:10 +01:00
|
|
|
extract_item_list: A list of items to extract.
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger.info('extracting from %s', target_files)
|
|
|
|
|
|
|
|
# Filter the extract_item_list to remove any items that do not exist in the
|
|
|
|
# zip file. Otherwise, the extraction step will fail.
|
|
|
|
|
|
|
|
with zipfile.ZipFile(
|
2019-04-17 23:54:06 +02:00
|
|
|
target_files, 'r', allowZip64=True) as target_files_zipfile:
|
2019-02-02 00:52:10 +01:00
|
|
|
target_files_namelist = target_files_zipfile.namelist()
|
|
|
|
|
|
|
|
filtered_extract_item_list = []
|
|
|
|
for pattern in extract_item_list:
|
|
|
|
matching_namelist = fnmatch.filter(target_files_namelist, pattern)
|
|
|
|
if not matching_namelist:
|
|
|
|
logger.warning('no match for %s', pattern)
|
|
|
|
else:
|
|
|
|
filtered_extract_item_list.append(pattern)
|
|
|
|
|
2019-02-22 19:57:43 +01:00
|
|
|
# Extract from target_files into target_files_temp_dir the
|
|
|
|
# filtered_extract_item_list.
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
common.UnzipToDir(target_files, target_files_temp_dir,
|
|
|
|
filtered_extract_item_list)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
|
2019-04-15 18:47:24 +02:00
|
|
|
def copy_items(from_dir, to_dir, patterns):
|
|
|
|
"""Similar to extract_items() except uses an input dir instead of zip."""
|
|
|
|
file_paths = []
|
|
|
|
for dirpath, _, filenames in os.walk(from_dir):
|
2019-04-17 23:54:06 +02:00
|
|
|
file_paths.extend(
|
|
|
|
os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
|
|
|
|
for filename in filenames)
|
2019-04-15 18:47:24 +02:00
|
|
|
|
|
|
|
filtered_file_paths = set()
|
|
|
|
for pattern in patterns:
|
|
|
|
filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
|
|
|
|
|
|
|
|
for file_path in filtered_file_paths:
|
|
|
|
original_file_path = os.path.join(from_dir, file_path)
|
|
|
|
copied_file_path = os.path.join(to_dir, file_path)
|
|
|
|
copied_file_dir = os.path.dirname(copied_file_path)
|
|
|
|
if not os.path.exists(copied_file_dir):
|
|
|
|
os.makedirs(copied_file_dir)
|
|
|
|
if os.path.islink(original_file_path):
|
|
|
|
os.symlink(os.readlink(original_file_path), copied_file_path)
|
|
|
|
else:
|
|
|
|
shutil.copyfile(original_file_path, copied_file_path)
|
|
|
|
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
def read_config_list(config_file_path):
|
|
|
|
"""Reads a config file into a list of strings.
|
|
|
|
|
|
|
|
Expects the file to be newline-separated.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_file_path: The path to the config file to open and read.
|
2019-04-17 23:54:06 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
The list of strings in the config file.
|
2019-03-07 22:01:48 +01:00
|
|
|
"""
|
|
|
|
with open(config_file_path) as config_file:
|
|
|
|
return config_file.read().splitlines()
|
|
|
|
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
def validate_config_lists(system_item_list, system_misc_info_keys,
|
|
|
|
other_item_list):
|
2019-03-19 18:32:03 +01:00
|
|
|
"""Performs validations on the merge config lists.
|
|
|
|
|
|
|
|
Args:
|
2019-04-17 23:54:06 +02:00
|
|
|
system_item_list: The list of items to extract from the partial system
|
|
|
|
target files package as is.
|
|
|
|
system_misc_info_keys: A list of keys to obtain from the system instance of
|
|
|
|
META/misc_info.txt. The remaining keys from the other instance.
|
|
|
|
other_item_list: The list of items to extract from the partial other target
|
|
|
|
files package as is.
|
2019-03-19 18:32:03 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
False if a validation fails, otherwise true.
|
|
|
|
"""
|
|
|
|
default_combined_item_set = set(default_system_item_list)
|
|
|
|
default_combined_item_set.update(default_other_item_list)
|
|
|
|
|
|
|
|
combined_item_set = set(system_item_list)
|
|
|
|
combined_item_set.update(other_item_list)
|
|
|
|
|
|
|
|
# Check that the merge config lists are not missing any item specified
|
|
|
|
# by the default config lists.
|
|
|
|
difference = default_combined_item_set.difference(combined_item_set)
|
|
|
|
if difference:
|
2019-04-17 23:54:06 +02:00
|
|
|
logger.error('Missing merge config items: %s', list(difference))
|
2019-03-19 18:32:03 +01:00
|
|
|
logger.error('Please ensure missing items are in either the '
|
|
|
|
'system-item-list or other-item-list files provided to '
|
|
|
|
'this script.')
|
|
|
|
return False
|
|
|
|
|
2019-03-19 22:48:02 +01:00
|
|
|
if ('dynamic_partition_list' in system_misc_info_keys) or (
|
|
|
|
'super_partition_groups' in system_misc_info_keys):
|
|
|
|
logger.error('Dynamic partition misc info keys should come from '
|
|
|
|
'the other instance of META/misc_info.txt.')
|
|
|
|
return False
|
|
|
|
|
2019-03-19 18:32:03 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
def process_ab_partitions_txt(system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir,
|
|
|
|
output_target_files_temp_dir):
|
|
|
|
"""Perform special processing for META/ab_partitions.txt.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
This function merges the contents of the META/ab_partitions.txt files from
|
|
|
|
the system directory and the other directory, placing the merged result in
|
|
|
|
the output directory. The precondition in that the files are already
|
|
|
|
extracted. The post condition is that the output META/ab_partitions.txt
|
|
|
|
contains the merged content. The format for each ab_partitions.txt a one
|
|
|
|
partition name per line. The output file contains the union of the parition
|
|
|
|
names.
|
|
|
|
|
|
|
|
Args:
|
2019-04-17 23:54:06 +02:00
|
|
|
system_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the system target files package.
|
|
|
|
other_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the other target files package.
|
|
|
|
output_target_files_temp_dir: The name of a directory that will be used to
|
|
|
|
create the output target files package after all the special cases are
|
|
|
|
processed.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
system_ab_partitions_txt = os.path.join(system_target_files_temp_dir, 'META',
|
|
|
|
'ab_partitions.txt')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
other_ab_partitions_txt = os.path.join(other_target_files_temp_dir, 'META',
|
|
|
|
'ab_partitions.txt')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
with open(system_ab_partitions_txt) as f:
|
|
|
|
system_ab_partitions = f.read().splitlines()
|
|
|
|
|
|
|
|
with open(other_ab_partitions_txt) as f:
|
|
|
|
other_ab_partitions = f.read().splitlines()
|
|
|
|
|
|
|
|
output_ab_partitions = set(system_ab_partitions + other_ab_partitions)
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
output_ab_partitions_txt = os.path.join(output_target_files_temp_dir, 'META',
|
|
|
|
'ab_partitions.txt')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
with open(output_ab_partitions_txt, 'w') as output:
|
|
|
|
for partition in sorted(output_ab_partitions):
|
|
|
|
output.write('%s\n' % partition)
|
|
|
|
|
|
|
|
|
2019-03-30 02:27:23 +01:00
|
|
|
def append_recovery_to_filesystem_config(output_target_files_temp_dir):
|
2019-04-17 23:54:06 +02:00
|
|
|
"""Perform special processing for META/filesystem_config.txt.
|
2019-03-30 02:27:23 +01:00
|
|
|
|
|
|
|
This function appends recovery information to META/filesystem_config.txt
|
|
|
|
so that recovery patch regeneration will succeed.
|
|
|
|
|
|
|
|
Args:
|
2019-04-17 23:54:06 +02:00
|
|
|
output_target_files_temp_dir: The name of a directory that will be used to
|
|
|
|
create the output target files package after all the special cases are
|
|
|
|
processed. We find filesystem_config.txt here.
|
2019-03-30 02:27:23 +01:00
|
|
|
"""
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
filesystem_config_txt = os.path.join(output_target_files_temp_dir, 'META',
|
|
|
|
'filesystem_config.txt')
|
2019-03-30 02:27:23 +01:00
|
|
|
|
|
|
|
with open(filesystem_config_txt, 'a') as f:
|
|
|
|
# TODO(bpeckham) this data is hard coded. It should be generated
|
|
|
|
# programmatically.
|
2019-04-17 23:54:06 +02:00
|
|
|
f.write('system/bin/install-recovery.sh 0 0 750 '
|
|
|
|
'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
|
|
|
|
f.write('system/recovery-from-boot.p 0 0 644 '
|
|
|
|
'selabel=u:object_r:system_file:s0 capabilities=0x0\n')
|
|
|
|
f.write('system/etc/recovery.img 0 0 440 '
|
|
|
|
'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
|
|
|
|
|
|
|
|
|
|
|
|
def process_misc_info_txt(system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir,
|
|
|
|
output_target_files_temp_dir, system_misc_info_keys):
|
|
|
|
"""Perform special processing for META/misc_info.txt.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
This function merges the contents of the META/misc_info.txt files from the
|
|
|
|
system directory and the other directory, placing the merged result in the
|
|
|
|
output directory. The precondition in that the files are already extracted.
|
|
|
|
The post condition is that the output META/misc_info.txt contains the merged
|
|
|
|
content.
|
|
|
|
|
|
|
|
Args:
|
2019-04-17 23:54:06 +02:00
|
|
|
system_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the system target files package.
|
|
|
|
other_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the other target files package.
|
|
|
|
output_target_files_temp_dir: The name of a directory that will be used to
|
|
|
|
create the output target files package after all the special cases are
|
|
|
|
processed.
|
|
|
|
system_misc_info_keys: A list of keys to obtain from the system instance of
|
|
|
|
META/misc_info.txt. The remaining keys from the other instance.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
def read_helper(d):
|
|
|
|
misc_info_txt = os.path.join(d, 'META', 'misc_info.txt')
|
|
|
|
with open(misc_info_txt) as f:
|
|
|
|
return list(f.read().splitlines())
|
|
|
|
|
|
|
|
system_info_dict = common.LoadDictionaryFromLines(
|
|
|
|
read_helper(system_target_files_temp_dir))
|
|
|
|
|
|
|
|
# We take most of the misc info from the other target files.
|
|
|
|
|
|
|
|
merged_info_dict = common.LoadDictionaryFromLines(
|
|
|
|
read_helper(other_target_files_temp_dir))
|
|
|
|
|
|
|
|
# Replace certain values in merged_info_dict with values from
|
2019-03-07 22:01:48 +01:00
|
|
|
# system_info_dict.
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
for key in system_misc_info_keys:
|
|
|
|
merged_info_dict[key] = system_info_dict[key]
|
|
|
|
|
2019-03-19 22:48:02 +01:00
|
|
|
# Merge misc info keys used for Dynamic Partitions.
|
|
|
|
if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (
|
|
|
|
system_info_dict.get('use_dynamic_partitions') == 'true'):
|
|
|
|
merged_info_dict['dynamic_partition_list'] = '%s %s' % (
|
|
|
|
system_info_dict.get('dynamic_partition_list', ''),
|
|
|
|
merged_info_dict.get('dynamic_partition_list', ''))
|
|
|
|
# Partition groups and group sizes are defined by the other (non-system)
|
|
|
|
# misc info file because these values may vary for each board that uses
|
|
|
|
# a shared system image.
|
2019-04-17 23:54:06 +02:00
|
|
|
for partition_group in merged_info_dict['super_partition_groups'].split(
|
|
|
|
' '):
|
2019-03-19 22:48:02 +01:00
|
|
|
if ('super_%s_group_size' % partition_group) not in merged_info_dict:
|
2019-04-15 20:34:56 +02:00
|
|
|
raise ValueError(
|
2019-03-19 22:48:02 +01:00
|
|
|
'Other META/misc_info.txt does not contain required key '
|
|
|
|
'super_%s_group_size.' % partition_group)
|
|
|
|
key = 'super_%s_partition_list' % partition_group
|
2019-04-17 23:54:06 +02:00
|
|
|
merged_info_dict[key] = '%s %s' % (system_info_dict.get(
|
|
|
|
key, ''), merged_info_dict.get(key, ''))
|
2019-03-19 22:48:02 +01:00
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
|
|
|
|
'misc_info.txt')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
sorted_keys = sorted(merged_info_dict.keys())
|
|
|
|
|
|
|
|
with open(output_misc_info_txt, 'w') as output:
|
|
|
|
for key in sorted_keys:
|
|
|
|
output.write('{}={}\n'.format(key, merged_info_dict[key]))
|
|
|
|
|
|
|
|
|
|
|
|
def process_file_contexts_bin(temp_dir, output_target_files_temp_dir):
|
|
|
|
"""Perform special processing for META/file_contexts.bin.
|
|
|
|
|
|
|
|
This function combines plat_file_contexts and vendor_file_contexts, which are
|
|
|
|
expected to already be extracted in temp_dir, to produce a merged
|
|
|
|
file_contexts.bin that will land in temp_dir at META/file_contexts.bin.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
temp_dir: The name of a scratch directory that this function can use for
|
2019-04-17 23:54:06 +02:00
|
|
|
intermediate files generated during processing.
|
2019-02-02 00:52:10 +01:00
|
|
|
output_target_files_temp_dir: The name of the working directory that must
|
2019-04-17 23:54:06 +02:00
|
|
|
already contain plat_file_contexts and vendor_file_contexts (in the
|
|
|
|
appropriate sub directories), and to which META/file_contexts.bin will be
|
|
|
|
written.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
# To create a merged file_contexts.bin file, we use the system and vendor
|
|
|
|
# file contexts files as input, the m4 tool to combine them, the sorting tool
|
|
|
|
# to sort, and finally the sefcontext_compile tool to generate the final
|
|
|
|
# output. We currently omit a checkfc step since the files had been checked
|
|
|
|
# as part of the build.
|
|
|
|
|
|
|
|
# The m4 step concatenates the two input files contexts files. Since m4
|
|
|
|
# writes to stdout, we receive that into an array of bytes, and then write it
|
|
|
|
# to a file.
|
|
|
|
|
|
|
|
# Collect the file contexts that we're going to combine from SYSTEM, VENDOR,
|
|
|
|
# PRODUCT, and ODM. We require SYSTEM and VENDOR, but others are optional.
|
|
|
|
|
|
|
|
file_contexts_list = []
|
|
|
|
|
|
|
|
for partition in ['SYSTEM', 'VENDOR', 'PRODUCT', 'ODM']:
|
|
|
|
prefix = 'plat' if partition == 'SYSTEM' else partition.lower()
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
file_contexts = os.path.join(output_target_files_temp_dir, partition, 'etc',
|
|
|
|
'selinux', prefix + '_file_contexts')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
mandatory = partition in ['SYSTEM', 'VENDOR']
|
|
|
|
|
|
|
|
if mandatory or os.path.isfile(file_contexts):
|
|
|
|
file_contexts_list.append(file_contexts)
|
|
|
|
else:
|
|
|
|
logger.warning('file not found: %s', file_contexts)
|
|
|
|
|
|
|
|
command = ['m4', '--fatal-warnings', '-s'] + file_contexts_list
|
|
|
|
|
|
|
|
merged_content = common.RunAndCheckOutput(command, verbose=False)
|
|
|
|
|
|
|
|
merged_file_contexts_txt = os.path.join(temp_dir, 'merged_file_contexts.txt')
|
|
|
|
|
|
|
|
with open(merged_file_contexts_txt, 'wb') as f:
|
|
|
|
f.write(merged_content)
|
|
|
|
|
|
|
|
# The sort step sorts the concatenated file.
|
|
|
|
|
|
|
|
sorted_file_contexts_txt = os.path.join(temp_dir, 'sorted_file_contexts.txt')
|
|
|
|
command = ['fc_sort', merged_file_contexts_txt, sorted_file_contexts_txt]
|
2019-02-22 03:53:37 +01:00
|
|
|
common.RunAndWait(command, verbose=True)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
# Finally, the compile step creates the final META/file_contexts.bin.
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
file_contexts_bin = os.path.join(output_target_files_temp_dir, 'META',
|
|
|
|
'file_contexts.bin')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
command = [
|
|
|
|
'sefcontext_compile',
|
2019-04-17 23:54:06 +02:00
|
|
|
'-o',
|
|
|
|
file_contexts_bin,
|
2019-02-02 00:52:10 +01:00
|
|
|
sorted_file_contexts_txt,
|
|
|
|
]
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
common.RunAndWait(command, verbose=True)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
def process_special_cases(temp_dir, system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir,
|
|
|
|
output_target_files_temp_dir, system_misc_info_keys,
|
|
|
|
rebuild_recovery):
|
2019-02-02 00:52:10 +01:00
|
|
|
"""Perform special-case processing for certain target files items.
|
|
|
|
|
|
|
|
Certain files in the output target files package require special-case
|
|
|
|
processing. This function performs all that special-case processing.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
temp_dir: The name of a scratch directory that this function can use for
|
2019-04-17 23:54:06 +02:00
|
|
|
intermediate files generated during processing.
|
|
|
|
system_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the system target files package.
|
|
|
|
other_target_files_temp_dir: The name of a directory containing the special
|
|
|
|
items extracted from the other target files package.
|
|
|
|
output_target_files_temp_dir: The name of a directory that will be used to
|
|
|
|
create the output target files package after all the special cases are
|
|
|
|
processed.
|
|
|
|
system_misc_info_keys: A list of keys to obtain from the system instance of
|
|
|
|
META/misc_info.txt. The remaining keys from the other instance.
|
2019-03-30 02:27:23 +01:00
|
|
|
rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
|
2019-04-17 23:54:06 +02:00
|
|
|
devices and write it to the system image.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
2019-03-30 02:27:23 +01:00
|
|
|
if 'ab_update' in system_misc_info_keys:
|
|
|
|
process_ab_partitions_txt(
|
|
|
|
system_target_files_temp_dir=system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir=other_target_files_temp_dir,
|
|
|
|
output_target_files_temp_dir=output_target_files_temp_dir)
|
|
|
|
|
|
|
|
if rebuild_recovery:
|
|
|
|
append_recovery_to_filesystem_config(
|
|
|
|
output_target_files_temp_dir=output_target_files_temp_dir)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
process_misc_info_txt(
|
|
|
|
system_target_files_temp_dir=system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir=other_target_files_temp_dir,
|
2019-03-07 22:01:48 +01:00
|
|
|
output_target_files_temp_dir=output_target_files_temp_dir,
|
|
|
|
system_misc_info_keys=system_misc_info_keys)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
process_file_contexts_bin(
|
2019-02-02 00:52:10 +01:00
|
|
|
temp_dir=temp_dir,
|
|
|
|
output_target_files_temp_dir=output_target_files_temp_dir)
|
|
|
|
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
def merge_target_files(temp_dir, system_target_files, system_item_list,
|
|
|
|
system_misc_info_keys, other_target_files,
|
|
|
|
other_item_list, output_target_files, output_dir,
|
2019-04-18 21:32:18 +02:00
|
|
|
output_item_list, output_ota, output_img,
|
|
|
|
output_super_empty, rebuild_recovery):
|
2019-02-02 00:52:10 +01:00
|
|
|
"""Merge two target files packages together.
|
|
|
|
|
|
|
|
This function takes system and other target files packages as input, performs
|
|
|
|
various file extractions, special case processing, and finally creates a
|
|
|
|
merged zip archive as output.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
temp_dir: The name of a directory we use when we extract items from the
|
2019-04-17 23:54:06 +02:00
|
|
|
input target files packages, and also a scratch directory that we use for
|
|
|
|
temporary files.
|
2019-02-02 00:52:10 +01:00
|
|
|
system_target_files: The name of the zip archive containing the system
|
2019-04-17 23:54:06 +02:00
|
|
|
partial target files package.
|
2019-03-07 22:01:48 +01:00
|
|
|
system_item_list: The list of items to extract from the partial system
|
2019-04-17 23:54:06 +02:00
|
|
|
target files package as is, meaning these items will land in the output
|
|
|
|
target files package exactly as they appear in the input partial system
|
|
|
|
target files package.
|
2019-03-07 22:01:48 +01:00
|
|
|
system_misc_info_keys: The list of keys to obtain from the system instance
|
2019-04-17 23:54:06 +02:00
|
|
|
of META/misc_info.txt. The remaining keys from the other instance.
|
|
|
|
other_target_files: The name of the zip archive containing the other partial
|
|
|
|
target files package.
|
|
|
|
other_item_list: The list of items to extract from the partial other target
|
|
|
|
files package as is, meaning these items will land in the output target
|
|
|
|
files package exactly as they appear in the input partial other target
|
|
|
|
files package.
|
|
|
|
output_target_files: The name of the output zip archive target files package
|
|
|
|
created by merging system and other.
|
|
|
|
output_dir: The destination directory for saving merged files.
|
|
|
|
output_item_list: The list of items to copy into the output_dir.
|
2019-04-17 01:11:35 +02:00
|
|
|
output_ota: The name of the output zip archive ota package.
|
2019-04-18 21:32:18 +02:00
|
|
|
output_img: The name of the output zip archive img package.
|
2019-04-15 20:34:56 +02:00
|
|
|
output_super_empty: If provided, creates a super_empty.img file from the
|
2019-04-17 23:54:06 +02:00
|
|
|
merged target files package and saves it at this path.
|
2019-03-15 22:36:21 +01:00
|
|
|
rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
|
2019-04-17 23:54:06 +02:00
|
|
|
devices and write it to the system image.
|
2019-02-02 00:52:10 +01:00
|
|
|
"""
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
logger.info('starting: merge system %s and other %s into output %s',
|
|
|
|
system_target_files, other_target_files, output_target_files)
|
2019-03-07 22:01:48 +01:00
|
|
|
|
2019-02-02 00:52:10 +01:00
|
|
|
# Create directory names that we'll use when we extract files from system,
|
|
|
|
# and other, and for zipping the final output.
|
|
|
|
|
|
|
|
system_target_files_temp_dir = os.path.join(temp_dir, 'system')
|
|
|
|
other_target_files_temp_dir = os.path.join(temp_dir, 'other')
|
|
|
|
output_target_files_temp_dir = os.path.join(temp_dir, 'output')
|
|
|
|
|
|
|
|
# Extract "as is" items from the input system partial target files package.
|
|
|
|
# We extract them directly into the output temporary directory since the
|
|
|
|
# items do not need special case processing.
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
extract_items(
|
2019-02-02 00:52:10 +01:00
|
|
|
target_files=system_target_files,
|
|
|
|
target_files_temp_dir=output_target_files_temp_dir,
|
2019-03-07 22:01:48 +01:00
|
|
|
extract_item_list=system_item_list)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
# Extract "as is" items from the input other partial target files package. We
|
|
|
|
# extract them directly into the output temporary directory since the items
|
|
|
|
# do not need special case processing.
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
extract_items(
|
2019-02-02 00:52:10 +01:00
|
|
|
target_files=other_target_files,
|
|
|
|
target_files_temp_dir=output_target_files_temp_dir,
|
2019-03-07 22:01:48 +01:00
|
|
|
extract_item_list=other_item_list)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
# Extract "special" items from the input system partial target files package.
|
|
|
|
# We extract these items to different directory since they require special
|
|
|
|
# processing before they will end up in the output directory.
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
extract_items(
|
2019-02-02 00:52:10 +01:00
|
|
|
target_files=system_target_files,
|
|
|
|
target_files_temp_dir=system_target_files_temp_dir,
|
|
|
|
extract_item_list=system_extract_special_item_list)
|
|
|
|
|
|
|
|
# Extract "special" items from the input other partial target files package.
|
|
|
|
# We extract these items to different directory since they require special
|
|
|
|
# processing before they will end up in the output directory.
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
extract_items(
|
2019-02-02 00:52:10 +01:00
|
|
|
target_files=other_target_files,
|
|
|
|
target_files_temp_dir=other_target_files_temp_dir,
|
|
|
|
extract_item_list=other_extract_special_item_list)
|
|
|
|
|
|
|
|
# Now that the temporary directories contain all the extracted files, perform
|
|
|
|
# special case processing on any items that need it. After this function
|
|
|
|
# completes successfully, all the files we need to create the output target
|
|
|
|
# files package are in place.
|
|
|
|
|
2019-02-22 03:53:37 +01:00
|
|
|
process_special_cases(
|
2019-02-02 00:52:10 +01:00
|
|
|
temp_dir=temp_dir,
|
|
|
|
system_target_files_temp_dir=system_target_files_temp_dir,
|
|
|
|
other_target_files_temp_dir=other_target_files_temp_dir,
|
2019-03-07 22:01:48 +01:00
|
|
|
output_target_files_temp_dir=output_target_files_temp_dir,
|
2019-03-30 02:27:23 +01:00
|
|
|
system_misc_info_keys=system_misc_info_keys,
|
|
|
|
rebuild_recovery=rebuild_recovery)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
# Regenerate IMAGES in the temporary directory.
|
|
|
|
|
|
|
|
add_img_args = ['--verbose']
|
|
|
|
if rebuild_recovery:
|
|
|
|
add_img_args.append('--rebuild_recovery')
|
|
|
|
add_img_args.append(output_target_files_temp_dir)
|
|
|
|
|
|
|
|
add_img_to_target_files.main(add_img_args)
|
|
|
|
|
2019-04-15 20:34:56 +02:00
|
|
|
# Create super_empty.img using the merged misc_info.txt.
|
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
|
|
|
|
'misc_info.txt')
|
2019-04-17 23:54:06 +02:00
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
def read_helper():
|
|
|
|
with open(misc_info_txt) as f:
|
|
|
|
return list(f.read().splitlines())
|
2019-04-15 20:34:56 +02:00
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
use_dynamic_partitions = common.LoadDictionaryFromLines(
|
|
|
|
read_helper()).get('use_dynamic_partitions')
|
2019-04-15 20:34:56 +02:00
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
if use_dynamic_partitions != 'true' and output_super_empty:
|
|
|
|
raise ValueError(
|
|
|
|
'Building super_empty.img requires use_dynamic_partitions=true.')
|
|
|
|
elif use_dynamic_partitions == 'true':
|
|
|
|
super_empty_img = os.path.join(output_target_files_temp_dir, 'IMAGES',
|
|
|
|
'super_empty.img')
|
2019-04-15 20:34:56 +02:00
|
|
|
build_super_image_args = [
|
2019-04-17 23:54:06 +02:00
|
|
|
misc_info_txt,
|
2019-04-18 21:32:18 +02:00
|
|
|
super_empty_img,
|
2019-04-15 20:34:56 +02:00
|
|
|
]
|
|
|
|
build_super_image.main(build_super_image_args)
|
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
# Copy super_empty.img to the user-provided output_super_empty location.
|
|
|
|
if output_super_empty:
|
|
|
|
shutil.copyfile(super_empty_img, output_super_empty)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-24 21:55:51 +02:00
|
|
|
# Create the IMG package from the merged target files (before zipping, in
|
|
|
|
# order to avoid an unnecessary unzip and copy).
|
|
|
|
|
|
|
|
if output_img:
|
|
|
|
img_from_target_files_args = [
|
|
|
|
output_target_files_temp_dir,
|
|
|
|
output_img,
|
|
|
|
]
|
|
|
|
img_from_target_files.main(img_from_target_files_args)
|
|
|
|
|
2019-04-15 18:47:24 +02:00
|
|
|
# Finally, create the output target files zip archive and/or copy the
|
|
|
|
# output items to the output target files directory.
|
|
|
|
|
|
|
|
if output_dir:
|
|
|
|
copy_items(output_target_files_temp_dir, output_dir, output_item_list)
|
|
|
|
|
|
|
|
if not output_target_files:
|
|
|
|
return
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
output_zip = os.path.abspath(output_target_files)
|
|
|
|
output_target_files_list = os.path.join(temp_dir, 'output.list')
|
2019-04-17 23:54:06 +02:00
|
|
|
output_target_files_meta_dir = os.path.join(output_target_files_temp_dir,
|
|
|
|
'META')
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-25 02:59:01 +02:00
|
|
|
find_command = [
|
2019-02-02 00:52:10 +01:00
|
|
|
'find',
|
|
|
|
output_target_files_meta_dir,
|
|
|
|
]
|
2019-04-25 02:59:01 +02:00
|
|
|
find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False)
|
|
|
|
meta_content = common.RunAndCheckOutput(['sort'], stdin=find_process.stdout,
|
|
|
|
verbose=False)
|
|
|
|
|
|
|
|
find_command = [
|
2019-04-17 23:54:06 +02:00
|
|
|
'find', output_target_files_temp_dir, '-path',
|
|
|
|
output_target_files_meta_dir, '-prune', '-o', '-print'
|
2019-02-02 00:52:10 +01:00
|
|
|
]
|
2019-04-25 02:59:01 +02:00
|
|
|
find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False)
|
|
|
|
other_content = common.RunAndCheckOutput(['sort'], stdin=find_process.stdout,
|
|
|
|
verbose=False)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
with open(output_target_files_list, 'wb') as f:
|
|
|
|
f.write(meta_content)
|
|
|
|
f.write(other_content)
|
|
|
|
|
|
|
|
command = [
|
2019-02-20 03:02:46 +01:00
|
|
|
'soong_zip',
|
2019-02-02 00:52:10 +01:00
|
|
|
'-d',
|
2019-04-17 23:54:06 +02:00
|
|
|
'-o',
|
|
|
|
output_zip,
|
|
|
|
'-C',
|
|
|
|
output_target_files_temp_dir,
|
|
|
|
'-l',
|
|
|
|
output_target_files_list,
|
2019-02-02 00:52:10 +01:00
|
|
|
]
|
|
|
|
logger.info('creating %s', output_target_files)
|
2019-02-22 03:53:37 +01:00
|
|
|
common.RunAndWait(command, verbose=True)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-04-17 01:11:35 +02:00
|
|
|
# Create the OTA package from the merged target files package.
|
|
|
|
|
|
|
|
if output_ota:
|
|
|
|
ota_from_target_files_args = [
|
|
|
|
output_zip,
|
|
|
|
output_ota,
|
|
|
|
]
|
|
|
|
ota_from_target_files.main(ota_from_target_files_args)
|
|
|
|
|
2019-04-18 21:32:18 +02:00
|
|
|
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
def call_func_with_temp_dir(func, keep_tmp):
|
2019-02-02 00:52:10 +01:00
|
|
|
"""Manage the creation and cleanup of the temporary directory.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
This function calls the given function after first creating a temporary
|
2019-02-02 00:52:10 +01:00
|
|
|
directory. It also cleans up the temporary directory.
|
|
|
|
|
|
|
|
Args:
|
2019-04-17 23:54:06 +02:00
|
|
|
func: The function to call. Should accept one parameter, the path to the
|
|
|
|
temporary directory.
|
2019-02-02 00:52:10 +01:00
|
|
|
keep_tmp: Keep the temporary directory after processing is complete.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Create a temporary directory. This will serve as the parent of directories
|
|
|
|
# we use when we extract items from the input target files packages, and also
|
|
|
|
# a scratch directory that we use for temporary files.
|
|
|
|
|
|
|
|
temp_dir = common.MakeTempDir(prefix='merge_target_files_')
|
|
|
|
|
|
|
|
try:
|
2019-03-07 22:01:48 +01:00
|
|
|
func(temp_dir)
|
2019-02-02 00:52:10 +01:00
|
|
|
except:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
if keep_tmp:
|
|
|
|
logger.info('keeping %s', temp_dir)
|
|
|
|
else:
|
|
|
|
common.Cleanup()
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
"""The main function.
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
Process command line arguments, then call merge_target_files to
|
2019-02-02 00:52:10 +01:00
|
|
|
perform the heavy lifting.
|
|
|
|
"""
|
|
|
|
|
|
|
|
common.InitLogging()
|
|
|
|
|
2019-02-20 03:02:46 +01:00
|
|
|
def option_handler(o, a):
|
|
|
|
if o == '--system-target-files':
|
|
|
|
OPTIONS.system_target_files = a
|
2019-03-07 22:01:48 +01:00
|
|
|
elif o == '--system-item-list':
|
|
|
|
OPTIONS.system_item_list = a
|
|
|
|
elif o == '--system-misc-info-keys':
|
|
|
|
OPTIONS.system_misc_info_keys = a
|
2019-02-20 03:02:46 +01:00
|
|
|
elif o == '--other-target-files':
|
|
|
|
OPTIONS.other_target_files = a
|
2019-03-07 22:01:48 +01:00
|
|
|
elif o == '--other-item-list':
|
|
|
|
OPTIONS.other_item_list = a
|
2019-02-20 03:02:46 +01:00
|
|
|
elif o == '--output-target-files':
|
|
|
|
OPTIONS.output_target_files = a
|
2019-04-15 18:47:24 +02:00
|
|
|
elif o == '--output-dir':
|
|
|
|
OPTIONS.output_dir = a
|
|
|
|
elif o == '--output-item-list':
|
|
|
|
OPTIONS.output_item_list = a
|
2019-04-17 01:11:35 +02:00
|
|
|
elif o == '--output-ota':
|
|
|
|
OPTIONS.output_ota = a
|
2019-04-18 21:32:18 +02:00
|
|
|
elif o == '--output-img':
|
|
|
|
OPTIONS.output_img = a
|
2019-04-15 20:34:56 +02:00
|
|
|
elif o == '--output-super-empty':
|
|
|
|
OPTIONS.output_super_empty = a
|
2019-03-15 22:36:21 +01:00
|
|
|
elif o == '--rebuild_recovery':
|
|
|
|
OPTIONS.rebuild_recovery = True
|
2019-03-30 02:27:23 +01:00
|
|
|
elif o == '--keep-tmp':
|
2019-02-20 03:02:46 +01:00
|
|
|
OPTIONS.keep_tmp = True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
args = common.ParseOptions(
|
2019-04-17 23:54:06 +02:00
|
|
|
sys.argv[1:],
|
|
|
|
__doc__,
|
2019-02-20 03:02:46 +01:00
|
|
|
extra_long_opts=[
|
|
|
|
'system-target-files=',
|
2019-03-07 22:01:48 +01:00
|
|
|
'system-item-list=',
|
|
|
|
'system-misc-info-keys=',
|
2019-02-20 03:02:46 +01:00
|
|
|
'other-target-files=',
|
2019-03-07 22:01:48 +01:00
|
|
|
'other-item-list=',
|
2019-02-20 03:02:46 +01:00
|
|
|
'output-target-files=',
|
2019-04-15 18:47:24 +02:00
|
|
|
'output-dir=',
|
|
|
|
'output-item-list=',
|
2019-04-17 01:11:35 +02:00
|
|
|
'output-ota=',
|
2019-04-18 21:32:18 +02:00
|
|
|
'output-img=',
|
2019-04-15 20:34:56 +02:00
|
|
|
'output-super-empty=',
|
2019-03-15 22:36:21 +01:00
|
|
|
'rebuild_recovery',
|
2019-03-30 02:27:23 +01:00
|
|
|
'keep-tmp',
|
2019-02-20 03:02:46 +01:00
|
|
|
],
|
|
|
|
extra_option_handler=option_handler)
|
|
|
|
|
2019-04-17 23:54:06 +02:00
|
|
|
if (args or OPTIONS.system_target_files is None or
|
|
|
|
OPTIONS.other_target_files is None or
|
|
|
|
(OPTIONS.output_target_files is None and OPTIONS.output_dir is None) or
|
|
|
|
(OPTIONS.output_dir is not None and OPTIONS.output_item_list is None)):
|
2019-02-20 03:02:46 +01:00
|
|
|
common.Usage(__doc__)
|
2019-02-22 03:53:37 +01:00
|
|
|
sys.exit(1)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
if OPTIONS.system_item_list:
|
|
|
|
system_item_list = read_config_list(OPTIONS.system_item_list)
|
|
|
|
else:
|
|
|
|
system_item_list = default_system_item_list
|
|
|
|
|
|
|
|
if OPTIONS.system_misc_info_keys:
|
|
|
|
system_misc_info_keys = read_config_list(OPTIONS.system_misc_info_keys)
|
|
|
|
else:
|
|
|
|
system_misc_info_keys = default_system_misc_info_keys
|
|
|
|
|
|
|
|
if OPTIONS.other_item_list:
|
|
|
|
other_item_list = read_config_list(OPTIONS.other_item_list)
|
|
|
|
else:
|
|
|
|
other_item_list = default_other_item_list
|
|
|
|
|
2019-04-15 18:47:24 +02:00
|
|
|
if OPTIONS.output_item_list:
|
|
|
|
output_item_list = read_config_list(OPTIONS.output_item_list)
|
|
|
|
else:
|
|
|
|
output_item_list = None
|
|
|
|
|
2019-03-19 18:32:03 +01:00
|
|
|
if not validate_config_lists(
|
|
|
|
system_item_list=system_item_list,
|
2019-03-19 22:48:02 +01:00
|
|
|
system_misc_info_keys=system_misc_info_keys,
|
2019-03-19 18:32:03 +01:00
|
|
|
other_item_list=other_item_list):
|
|
|
|
sys.exit(1)
|
|
|
|
|
2019-03-07 22:01:48 +01:00
|
|
|
call_func_with_temp_dir(
|
|
|
|
lambda temp_dir: merge_target_files(
|
|
|
|
temp_dir=temp_dir,
|
|
|
|
system_target_files=OPTIONS.system_target_files,
|
|
|
|
system_item_list=system_item_list,
|
|
|
|
system_misc_info_keys=system_misc_info_keys,
|
|
|
|
other_target_files=OPTIONS.other_target_files,
|
|
|
|
other_item_list=other_item_list,
|
2019-03-15 22:36:21 +01:00
|
|
|
output_target_files=OPTIONS.output_target_files,
|
2019-04-15 18:47:24 +02:00
|
|
|
output_dir=OPTIONS.output_dir,
|
|
|
|
output_item_list=output_item_list,
|
2019-04-17 01:11:35 +02:00
|
|
|
output_ota=OPTIONS.output_ota,
|
2019-04-18 21:32:18 +02:00
|
|
|
output_img=OPTIONS.output_img,
|
2019-04-15 20:34:56 +02:00
|
|
|
output_super_empty=OPTIONS.output_super_empty,
|
2019-04-17 23:54:06 +02:00
|
|
|
rebuild_recovery=OPTIONS.rebuild_recovery), OPTIONS.keep_tmp)
|
2019-02-02 00:52:10 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2019-02-22 03:53:37 +01:00
|
|
|
main()
|