2011-10-29 02:02:30 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (C) 2011 The Android Open Source Project
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
"""
|
|
|
|
Build image output_image_file from input_directory and properties_file.
|
|
|
|
|
|
|
|
Usage: build_image input_directory properties_file output_image_file
|
|
|
|
|
|
|
|
"""
|
|
|
|
import os
|
2012-11-27 03:10:23 +01:00
|
|
|
import os.path
|
2011-10-29 02:02:30 +02:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2013-06-17 02:26:08 +02:00
|
|
|
import commands
|
|
|
|
import shutil
|
2014-05-06 07:19:37 +02:00
|
|
|
import tempfile
|
2011-10-29 02:02:30 +02:00
|
|
|
|
2014-05-17 04:14:30 +02:00
|
|
|
FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
|
|
|
|
|
2012-11-27 03:10:23 +01:00
|
|
|
def RunCommand(cmd):
|
|
|
|
""" Echo and run the given command
|
|
|
|
|
|
|
|
Args:
|
|
|
|
cmd: the command represented as a list of strings.
|
|
|
|
Returns:
|
|
|
|
The exit code.
|
|
|
|
"""
|
|
|
|
print "Running: ", " ".join(cmd)
|
|
|
|
p = subprocess.Popen(cmd)
|
|
|
|
p.communicate()
|
|
|
|
return p.returncode
|
2011-10-29 02:02:30 +02:00
|
|
|
|
2013-06-17 02:26:08 +02:00
|
|
|
def GetVerityTreeSize(partition_size):
|
2014-04-17 03:49:56 +02:00
|
|
|
cmd = "build_verity_tree -s %d"
|
2013-06-17 02:26:08 +02:00
|
|
|
cmd %= partition_size
|
|
|
|
status, output = commands.getstatusoutput(cmd)
|
|
|
|
if status:
|
|
|
|
print output
|
|
|
|
return False, 0
|
|
|
|
return True, int(output)
|
|
|
|
|
|
|
|
def GetVerityMetadataSize(partition_size):
|
|
|
|
cmd = "system/extras/verity/build_verity_metadata.py -s %d"
|
|
|
|
cmd %= partition_size
|
|
|
|
status, output = commands.getstatusoutput(cmd)
|
|
|
|
if status:
|
|
|
|
print output
|
|
|
|
return False, 0
|
|
|
|
return True, int(output)
|
|
|
|
|
|
|
|
def AdjustPartitionSizeForVerity(partition_size):
|
|
|
|
"""Modifies the provided partition size to account for the verity metadata.
|
|
|
|
|
|
|
|
This information is used to size the created image appropriately.
|
|
|
|
Args:
|
|
|
|
partition_size: the size of the partition to be verified.
|
|
|
|
Returns:
|
|
|
|
The size of the partition adjusted for verity metadata.
|
|
|
|
"""
|
|
|
|
success, verity_tree_size = GetVerityTreeSize(partition_size)
|
|
|
|
if not success:
|
2015-03-24 03:13:21 +01:00
|
|
|
return 0
|
2013-06-17 02:26:08 +02:00
|
|
|
success, verity_metadata_size = GetVerityMetadataSize(partition_size)
|
|
|
|
if not success:
|
|
|
|
return 0
|
|
|
|
return partition_size - verity_tree_size - verity_metadata_size
|
|
|
|
|
2014-04-17 03:49:56 +02:00
|
|
|
def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
|
2015-03-24 03:13:21 +01:00
|
|
|
cmd = "build_verity_tree -A %s %s %s" % (
|
|
|
|
FIXED_SALT, sparse_image_path, verity_image_path)
|
2013-06-17 02:26:08 +02:00
|
|
|
print cmd
|
|
|
|
status, output = commands.getstatusoutput(cmd)
|
|
|
|
if status:
|
|
|
|
print "Could not build verity tree! Error: %s" % output
|
|
|
|
return False
|
|
|
|
root, salt = output.split()
|
|
|
|
prop_dict["verity_root_hash"] = root
|
|
|
|
prop_dict["verity_salt"] = salt
|
|
|
|
return True
|
|
|
|
|
|
|
|
def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
|
|
|
|
block_device, signer_path, key):
|
2015-03-24 03:13:21 +01:00
|
|
|
cmd_template = (
|
|
|
|
"system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s")
|
|
|
|
cmd = cmd_template % (image_size, verity_metadata_path, root_hash, salt,
|
|
|
|
block_device, signer_path, key)
|
2013-06-17 02:26:08 +02:00
|
|
|
print cmd
|
|
|
|
status, output = commands.getstatusoutput(cmd)
|
|
|
|
if status:
|
|
|
|
print "Could not build verity metadata! Error: %s" % output
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
|
|
|
|
"""Appends the unsparse image to the given sparse image.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
sparse_image_path: the path to the (sparse) image
|
|
|
|
unsparse_image_path: the path to the (unsparse) image
|
|
|
|
Returns:
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
|
|
|
cmd = "append2simg %s %s"
|
|
|
|
cmd %= (sparse_image_path, unsparse_image_path)
|
|
|
|
print cmd
|
|
|
|
status, output = commands.getstatusoutput(cmd)
|
|
|
|
if status:
|
|
|
|
print "%s: %s" % (error_message, output)
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2015-03-24 03:13:21 +01:00
|
|
|
def BuildVerifiedImage(data_image_path, verity_image_path,
|
|
|
|
verity_metadata_path):
|
|
|
|
if not Append2Simg(data_image_path, verity_metadata_path,
|
|
|
|
"Could not append verity metadata!"):
|
2013-06-17 02:26:08 +02:00
|
|
|
return False
|
2015-03-24 03:13:21 +01:00
|
|
|
if not Append2Simg(data_image_path, verity_image_path,
|
|
|
|
"Could not append verity tree!"):
|
2013-06-17 02:26:08 +02:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2013-12-06 02:09:18 +01:00
|
|
|
def UnsparseImage(sparse_image_path, replace=True):
|
2013-06-17 02:26:08 +02:00
|
|
|
img_dir = os.path.dirname(sparse_image_path)
|
|
|
|
unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
|
|
|
|
unsparse_image_path = os.path.join(img_dir, unsparse_image_path)
|
|
|
|
if os.path.exists(unsparse_image_path):
|
2013-12-06 02:09:18 +01:00
|
|
|
if replace:
|
|
|
|
os.unlink(unsparse_image_path)
|
|
|
|
else:
|
|
|
|
return True, unsparse_image_path
|
2013-06-17 02:26:08 +02:00
|
|
|
inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
|
|
|
|
exit_code = RunCommand(inflate_command)
|
|
|
|
if exit_code != 0:
|
|
|
|
os.remove(unsparse_image_path)
|
|
|
|
return False, None
|
|
|
|
return True, unsparse_image_path
|
|
|
|
|
|
|
|
def MakeVerityEnabledImage(out_file, prop_dict):
|
|
|
|
"""Creates an image that is verifiable using dm-verity.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
out_file: the location to write the verifiable image at
|
2015-03-24 03:13:21 +01:00
|
|
|
prop_dict: a dictionary of properties required for image creation and
|
|
|
|
verification
|
2013-06-17 02:26:08 +02:00
|
|
|
Returns:
|
|
|
|
True on success, False otherwise.
|
|
|
|
"""
|
|
|
|
# get properties
|
|
|
|
image_size = prop_dict["partition_size"]
|
|
|
|
block_dev = prop_dict["verity_block_device"]
|
2014-11-14 02:54:30 +01:00
|
|
|
signer_key = prop_dict["verity_key"] + ".pk8"
|
2013-06-17 02:26:08 +02:00
|
|
|
signer_path = prop_dict["verity_signer_cmd"]
|
|
|
|
|
|
|
|
# make a tempdir
|
2014-05-06 07:19:37 +02:00
|
|
|
tempdir_name = tempfile.mkdtemp(suffix="_verity_images")
|
2013-06-17 02:26:08 +02:00
|
|
|
|
|
|
|
# get partial image paths
|
|
|
|
verity_image_path = os.path.join(tempdir_name, "verity.img")
|
|
|
|
verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
|
|
|
|
|
|
|
|
# build the verity tree and get the root hash and salt
|
2014-04-17 03:49:56 +02:00
|
|
|
if not BuildVerityTree(out_file, verity_image_path, prop_dict):
|
2014-05-06 07:19:37 +02:00
|
|
|
shutil.rmtree(tempdir_name, ignore_errors=True)
|
2013-06-17 02:26:08 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
# build the metadata blocks
|
|
|
|
root_hash = prop_dict["verity_root_hash"]
|
|
|
|
salt = prop_dict["verity_salt"]
|
2015-03-24 03:13:21 +01:00
|
|
|
if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
|
|
|
|
block_dev, signer_path, signer_key):
|
2014-05-06 07:19:37 +02:00
|
|
|
shutil.rmtree(tempdir_name, ignore_errors=True)
|
2013-06-17 02:26:08 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
# build the full verified image
|
|
|
|
if not BuildVerifiedImage(out_file,
|
|
|
|
verity_image_path,
|
|
|
|
verity_metadata_path):
|
2014-05-06 07:19:37 +02:00
|
|
|
shutil.rmtree(tempdir_name, ignore_errors=True)
|
2013-06-17 02:26:08 +02:00
|
|
|
return False
|
|
|
|
|
2014-05-06 07:19:37 +02:00
|
|
|
shutil.rmtree(tempdir_name, ignore_errors=True)
|
2013-06-17 02:26:08 +02:00
|
|
|
return True
|
|
|
|
|
2015-03-25 03:07:40 +01:00
|
|
|
def BuildImage(in_dir, prop_dict, out_file):
|
2011-10-29 02:02:30 +02:00
|
|
|
"""Build an image to out_file from in_dir with property prop_dict.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
in_dir: path of input directory.
|
|
|
|
prop_dict: property dictionary.
|
|
|
|
out_file: path of the output image file.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True iff the image is built successfully.
|
|
|
|
"""
|
2015-03-25 03:07:40 +01:00
|
|
|
# system_root_image=true: build a system.img that combines the contents of /system
|
|
|
|
# and the ramdisk, and can be mounted at the root of the file system.
|
|
|
|
origin_in = in_dir
|
|
|
|
fs_config = prop_dict.get("fs_config")
|
|
|
|
if (prop_dict.get("system_root_image") == "true"
|
|
|
|
and prop_dict["mount_point"] == "system"):
|
|
|
|
in_dir = tempfile.mkdtemp()
|
|
|
|
# Change the mount point to "/"
|
|
|
|
prop_dict["mount_point"] = "/"
|
|
|
|
if fs_config:
|
|
|
|
# We need to merge the fs_config files of system and ramdisk.
|
|
|
|
fd, merged_fs_config = tempfile.mkstemp(prefix="root_fs_config",
|
|
|
|
suffix=".txt")
|
|
|
|
os.close(fd)
|
|
|
|
with open(merged_fs_config, "w") as fw:
|
|
|
|
if "ramdisk_fs_config" in prop_dict:
|
|
|
|
with open(prop_dict["ramdisk_fs_config"]) as fr:
|
|
|
|
fw.writelines(fr.readlines())
|
|
|
|
with open(fs_config) as fr:
|
|
|
|
fw.writelines(fr.readlines())
|
|
|
|
fs_config = merged_fs_config
|
|
|
|
|
2011-10-29 02:02:30 +02:00
|
|
|
build_command = []
|
|
|
|
fs_type = prop_dict.get("fs_type", "")
|
2012-11-27 03:10:23 +01:00
|
|
|
run_fsck = False
|
2013-06-17 02:26:08 +02:00
|
|
|
|
2015-03-24 20:42:03 +01:00
|
|
|
fs_spans_partition = True
|
|
|
|
if fs_type.startswith("squash"):
|
|
|
|
fs_spans_partition = False
|
|
|
|
|
2014-07-11 00:42:38 +02:00
|
|
|
is_verity_partition = "verity_block_device" in prop_dict
|
2014-05-06 07:19:37 +02:00
|
|
|
verity_supported = prop_dict.get("verity") == "true"
|
2015-03-24 20:42:03 +01:00
|
|
|
# adjust the partition size to make room for the hashes if this is to be verified
|
|
|
|
if verity_supported and is_verity_partition and fs_spans_partition:
|
2013-06-17 02:26:08 +02:00
|
|
|
partition_size = int(prop_dict.get("partition_size"))
|
|
|
|
adjusted_size = AdjustPartitionSizeForVerity(partition_size)
|
|
|
|
if not adjusted_size:
|
|
|
|
return False
|
|
|
|
prop_dict["partition_size"] = str(adjusted_size)
|
|
|
|
prop_dict["original_partition_size"] = str(partition_size)
|
|
|
|
|
2011-10-29 02:02:30 +02:00
|
|
|
if fs_type.startswith("ext"):
|
|
|
|
build_command = ["mkuserimg.sh"]
|
|
|
|
if "extfs_sparse_flag" in prop_dict:
|
|
|
|
build_command.append(prop_dict["extfs_sparse_flag"])
|
2012-11-27 03:10:23 +01:00
|
|
|
run_fsck = True
|
2011-10-29 02:02:30 +02:00
|
|
|
build_command.extend([in_dir, out_file, fs_type,
|
|
|
|
prop_dict["mount_point"]])
|
2013-12-06 00:54:55 +01:00
|
|
|
build_command.append(prop_dict["partition_size"])
|
2014-11-19 03:03:13 +01:00
|
|
|
if "journal_size" in prop_dict:
|
|
|
|
build_command.extend(["-j", prop_dict["journal_size"]])
|
2013-12-06 00:54:55 +01:00
|
|
|
if "timestamp" in prop_dict:
|
|
|
|
build_command.extend(["-T", str(prop_dict["timestamp"])])
|
2015-03-25 03:07:40 +01:00
|
|
|
if fs_config:
|
2014-06-16 18:10:55 +02:00
|
|
|
build_command.extend(["-C", fs_config])
|
2015-03-25 03:07:40 +01:00
|
|
|
if "block_list" in prop_dict:
|
|
|
|
build_command.extend(["-B", prop_dict["block_list"]])
|
2014-12-17 21:34:12 +01:00
|
|
|
build_command.extend(["-L", prop_dict["mount_point"]])
|
2015-03-25 03:07:40 +01:00
|
|
|
if "selinux_fc" in prop_dict:
|
2012-04-08 19:42:34 +02:00
|
|
|
build_command.append(prop_dict["selinux_fc"])
|
2015-03-03 21:30:37 +01:00
|
|
|
elif fs_type.startswith("squash"):
|
|
|
|
build_command = ["mksquashfsimage.sh"]
|
|
|
|
build_command.extend([in_dir, out_file])
|
|
|
|
build_command.extend(["-m", prop_dict["mount_point"]])
|
2015-03-25 03:07:40 +01:00
|
|
|
if "selinux_fc" in prop_dict:
|
2015-03-03 21:30:37 +01:00
|
|
|
build_command.extend(["-c", prop_dict["selinux_fc"]])
|
2014-06-16 23:17:40 +02:00
|
|
|
elif fs_type.startswith("f2fs"):
|
|
|
|
build_command = ["mkf2fsuserimg.sh"]
|
|
|
|
build_command.extend([out_file, prop_dict["partition_size"]])
|
2011-10-29 02:02:30 +02:00
|
|
|
else:
|
|
|
|
build_command = ["mkyaffs2image", "-f"]
|
|
|
|
if prop_dict.get("mkyaffs2_extra_flags", None):
|
|
|
|
build_command.extend(prop_dict["mkyaffs2_extra_flags"].split())
|
|
|
|
build_command.append(in_dir)
|
|
|
|
build_command.append(out_file)
|
2012-04-08 19:42:34 +02:00
|
|
|
if "selinux_fc" in prop_dict:
|
|
|
|
build_command.append(prop_dict["selinux_fc"])
|
|
|
|
build_command.append(prop_dict["mount_point"])
|
2011-10-29 02:02:30 +02:00
|
|
|
|
2015-03-25 03:07:40 +01:00
|
|
|
if in_dir != origin_in:
|
|
|
|
# Construct a staging directory of the root file system.
|
|
|
|
ramdisk_dir = prop_dict.get("ramdisk_dir")
|
|
|
|
if ramdisk_dir:
|
|
|
|
shutil.rmtree(in_dir)
|
|
|
|
shutil.copytree(ramdisk_dir, in_dir, symlinks=True)
|
|
|
|
staging_system = os.path.join(in_dir, "system")
|
|
|
|
shutil.rmtree(staging_system, ignore_errors=True)
|
|
|
|
shutil.copytree(origin_in, staging_system, symlinks=True)
|
|
|
|
try:
|
|
|
|
exit_code = RunCommand(build_command)
|
|
|
|
finally:
|
|
|
|
if in_dir != origin_in:
|
|
|
|
# Clean up temporary directories and files.
|
|
|
|
shutil.rmtree(in_dir, ignore_errors=True)
|
|
|
|
if fs_config:
|
|
|
|
os.remove(fs_config)
|
2012-11-27 03:10:23 +01:00
|
|
|
if exit_code != 0:
|
|
|
|
return False
|
|
|
|
|
2015-03-24 20:42:03 +01:00
|
|
|
if not fs_spans_partition:
|
|
|
|
mount_point = prop_dict.get("mount_point")
|
|
|
|
partition_size = int(prop_dict.get("partition_size"))
|
|
|
|
image_size = os.stat(out_file).st_size
|
|
|
|
if image_size > partition_size:
|
|
|
|
print "Error: %s image size of %d is larger than partition size of %d" % (mount_point, image_size, partition_size)
|
|
|
|
return False
|
|
|
|
if verity_supported and is_verity_partition:
|
|
|
|
if 2 * image_size - AdjustPartitionSizeForVerity(image_size) > partition_size:
|
|
|
|
print "Error: No more room on %s to fit verity data" % mount_point
|
|
|
|
return False
|
|
|
|
prop_dict["original_partition_size"] = prop_dict["partition_size"]
|
|
|
|
prop_dict["partition_size"] = str(image_size)
|
|
|
|
|
2013-06-17 02:26:08 +02:00
|
|
|
# create the verified image if this is to be verified
|
2014-05-06 07:19:37 +02:00
|
|
|
if verity_supported and is_verity_partition:
|
2013-06-17 02:26:08 +02:00
|
|
|
if not MakeVerityEnabledImage(out_file, prop_dict):
|
|
|
|
return False
|
|
|
|
|
2013-02-27 22:54:02 +01:00
|
|
|
if run_fsck and prop_dict.get("skip_fsck") != "true":
|
2013-12-06 02:09:18 +01:00
|
|
|
success, unsparse_image = UnsparseImage(out_file, replace=False)
|
2013-06-17 02:26:08 +02:00
|
|
|
if not success:
|
2012-11-27 03:10:23 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
# Run e2fsck on the inflated image file
|
|
|
|
e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
|
|
|
|
exit_code = RunCommand(e2fsck_command)
|
|
|
|
|
|
|
|
os.remove(unsparse_image)
|
|
|
|
|
|
|
|
return exit_code == 0
|
2011-10-29 02:02:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
def ImagePropFromGlobalDict(glob_dict, mount_point):
|
|
|
|
"""Build an image property dictionary from the global dictionary.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
glob_dict: the global dictionary from the build system.
|
|
|
|
mount_point: such as "system", "data" etc.
|
|
|
|
"""
|
2013-12-06 20:53:27 +01:00
|
|
|
d = {}
|
|
|
|
if "build.prop" in glob_dict:
|
|
|
|
bp = glob_dict["build.prop"]
|
|
|
|
if "ro.build.date.utc" in bp:
|
|
|
|
d["timestamp"] = bp["ro.build.date.utc"]
|
2011-11-04 19:37:01 +01:00
|
|
|
|
|
|
|
def copy_prop(src_p, dest_p):
|
|
|
|
if src_p in glob_dict:
|
|
|
|
d[dest_p] = str(glob_dict[src_p])
|
|
|
|
|
2011-10-29 02:02:30 +02:00
|
|
|
common_props = (
|
|
|
|
"extfs_sparse_flag",
|
|
|
|
"mkyaffs2_extra_flags",
|
2012-04-08 19:42:34 +02:00
|
|
|
"selinux_fc",
|
2013-02-27 22:54:02 +01:00
|
|
|
"skip_fsck",
|
2013-06-17 02:26:08 +02:00
|
|
|
"verity",
|
|
|
|
"verity_key",
|
2014-07-11 00:42:38 +02:00
|
|
|
"verity_signer_cmd"
|
2011-10-29 02:02:30 +02:00
|
|
|
)
|
|
|
|
for p in common_props:
|
2011-11-04 19:37:01 +01:00
|
|
|
copy_prop(p, p)
|
2011-10-29 02:02:30 +02:00
|
|
|
|
|
|
|
d["mount_point"] = mount_point
|
|
|
|
if mount_point == "system":
|
2011-11-04 19:37:01 +01:00
|
|
|
copy_prop("fs_type", "fs_type")
|
2015-03-24 03:13:21 +01:00
|
|
|
# Copy the generic sysetem fs type first, override with specific one if
|
|
|
|
# available.
|
2015-03-03 21:30:37 +01:00
|
|
|
copy_prop("system_fs_type", "fs_type")
|
2011-11-04 19:37:01 +01:00
|
|
|
copy_prop("system_size", "partition_size")
|
2014-11-19 03:03:13 +01:00
|
|
|
copy_prop("system_journal_size", "journal_size")
|
2014-07-11 00:42:38 +02:00
|
|
|
copy_prop("system_verity_block_device", "verity_block_device")
|
2015-03-25 03:07:40 +01:00
|
|
|
copy_prop("system_root_image","system_root_image")
|
|
|
|
copy_prop("ramdisk_dir","ramdisk_dir")
|
2011-10-29 02:02:30 +02:00
|
|
|
elif mount_point == "data":
|
2014-06-16 23:17:40 +02:00
|
|
|
# Copy the generic fs type first, override with specific one if available.
|
2011-11-04 19:37:01 +01:00
|
|
|
copy_prop("fs_type", "fs_type")
|
2014-06-16 23:17:40 +02:00
|
|
|
copy_prop("userdata_fs_type", "fs_type")
|
2011-11-04 19:37:01 +01:00
|
|
|
copy_prop("userdata_size", "partition_size")
|
|
|
|
elif mount_point == "cache":
|
|
|
|
copy_prop("cache_fs_type", "fs_type")
|
|
|
|
copy_prop("cache_size", "partition_size")
|
2013-03-20 19:02:05 +01:00
|
|
|
elif mount_point == "vendor":
|
|
|
|
copy_prop("vendor_fs_type", "fs_type")
|
|
|
|
copy_prop("vendor_size", "partition_size")
|
2014-11-19 03:03:13 +01:00
|
|
|
copy_prop("vendor_journal_size", "journal_size")
|
2014-07-11 00:42:38 +02:00
|
|
|
copy_prop("vendor_verity_block_device", "verity_block_device")
|
2014-03-12 01:13:27 +01:00
|
|
|
elif mount_point == "oem":
|
|
|
|
copy_prop("fs_type", "fs_type")
|
|
|
|
copy_prop("oem_size", "partition_size")
|
2014-11-19 03:03:13 +01:00
|
|
|
copy_prop("oem_journal_size", "journal_size")
|
2011-10-29 02:02:30 +02:00
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
|
|
def LoadGlobalDict(filename):
|
|
|
|
"""Load "name=value" pairs from filename"""
|
|
|
|
d = {}
|
|
|
|
f = open(filename)
|
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
|
|
|
if not line or line.startswith("#"):
|
|
|
|
continue
|
|
|
|
k, v = line.split("=", 1)
|
|
|
|
d[k] = v
|
|
|
|
f.close()
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
|
|
def main(argv):
|
|
|
|
if len(argv) != 3:
|
|
|
|
print __doc__
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
in_dir = argv[0]
|
|
|
|
glob_dict_file = argv[1]
|
|
|
|
out_file = argv[2]
|
|
|
|
|
|
|
|
glob_dict = LoadGlobalDict(glob_dict_file)
|
|
|
|
image_filename = os.path.basename(out_file)
|
|
|
|
mount_point = ""
|
|
|
|
if image_filename == "system.img":
|
|
|
|
mount_point = "system"
|
|
|
|
elif image_filename == "userdata.img":
|
|
|
|
mount_point = "data"
|
2011-11-04 19:37:01 +01:00
|
|
|
elif image_filename == "cache.img":
|
|
|
|
mount_point = "cache"
|
2013-03-20 19:02:05 +01:00
|
|
|
elif image_filename == "vendor.img":
|
|
|
|
mount_point = "vendor"
|
2014-03-12 01:13:27 +01:00
|
|
|
elif image_filename == "oem.img":
|
|
|
|
mount_point = "oem"
|
2011-11-04 19:37:01 +01:00
|
|
|
else:
|
|
|
|
print >> sys.stderr, "error: unknown image file name ", image_filename
|
|
|
|
exit(1)
|
2011-10-29 02:02:30 +02:00
|
|
|
|
|
|
|
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
|
|
|
|
if not BuildImage(in_dir, image_properties, out_file):
|
2015-03-24 03:13:21 +01:00
|
|
|
print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
|
|
|
|
in_dir)
|
2011-10-29 02:02:30 +02:00
|
|
|
exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main(sys.argv[1:])
|