Prototype of build orchestrator

The code assumes a different source structure. It requires updating
the path configurations.

Bug: 230448564
Test: N/A
Change-Id: I38bf181eecdd291712212ba8ee6d61b85c3fbb07
This commit is contained in:
Justin Yun 2022-04-27 16:21:16 +09:00
parent 62bf0d5c88
commit 0daf186a89
7 changed files with 1094 additions and 0 deletions

View file

@ -0,0 +1,46 @@
#!/usr/bin/env python3
import os
import sys
import yaml
from hierarchy import parse_hierarchy
def main():
if len(sys.argv) != 2:
print('usage: %s target' % sys.argv[0])
exit(1)
args = sys.argv[1].split('-')
if len(args) != 2:
print('target format: {target}-{variant}')
exit(1)
target, variant = args
if variant not in ['eng', 'user', 'userdebug']:
print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
variant)
exit(1)
build_top = os.getenv('BUFFET_BUILD_TOP')
if not build_top:
print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
exit(1)
hierarchy_map = parse_hierarchy(build_top)
if target not in hierarchy_map:
raise RuntimeError(
"unknown target '%s': couldn't find the target. Supported targets are: %s"
% (target, list(hierarchy_map.keys())))
hierarchy = [target]
while hierarchy_map[hierarchy[-1]]:
hierarchy.append(hierarchy_map[hierarchy[-1]])
print('Target hierarchy for %s: %s' % (target, hierarchy))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,367 @@
#!/usr/bin/env python3
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import copy
import hierarchy
import json
import logging
import filecmp
import os
import shutil
import subprocess
import sys
import tempfile
import collect_metadata
import utils
BUILD_CMD_TO_ALL = (
'clean',
'installclean',
'update-meta',
)
BUILD_ALL_EXEMPTION = (
'art',
)
def get_supported_product(ctx, supported_products):
hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
target = ctx.target_product()
while target not in supported_products:
if target not in hierarchy_map:
return None
target = hierarchy_map[target]
return target
def parse_goals(ctx, metadata, goals):
"""Parse goals and returns a map from each component to goals.
e.g.
"m main art timezone:foo timezone:bar" will return the following dict: {
"main": {"all"},
"art": {"all"},
"timezone": {"foo", "bar"},
}
"""
# for now, goal should look like:
# {component} or {component}:{subgoal}
ret = collections.defaultdict(set)
for goal in goals:
# check if the command is for all components
if goal in BUILD_CMD_TO_ALL:
ret['all'].add(goal)
continue
# should be {component} or {component}:{subgoal}
try:
component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
except ValueError:
raise RuntimeError(
'unknown goal: %s: should be {component} or {component}:{subgoal}' %
goal)
if component not in metadata:
raise RuntimeError('unknown goal: %s: component %s not found' %
(goal, component))
if not get_supported_product(ctx, metadata[component]['lunch_targets']):
raise RuntimeError("can't find matching target. Supported targets are: " +
str(metadata[component]['lunch_targets']))
ret[component].add(subgoal)
return ret
def find_cycle(metadata):
""" Finds a cyclic dependency among components.
This is for debugging.
"""
visited = set()
parent_node = dict()
in_stack = set()
# Returns a cycle if one is found
def dfs(node):
# visit_order[visit_time[node] - 1] == node
nonlocal visited, parent_node, in_stack
visited.add(node)
in_stack.add(node)
if 'deps' not in metadata[node]:
in_stack.remove(node)
return None
for next in metadata[node]['deps']:
# We found a cycle (next ~ node) if next is still in the stack
if next in in_stack:
cycle = [node]
while cycle[-1] != next:
cycle.append(parent_node[cycle[-1]])
return cycle
# Else, continue searching
if next in visited:
continue
parent_node[next] = node
result = dfs(next)
if result:
return result
in_stack.remove(node)
return None
for component in metadata:
if component in visited:
continue
result = dfs(component)
if result:
return result
return None
def topological_sort_components(metadata):
""" Performs topological sort on components.
If A depends on B, B appears first.
"""
# If A depends on B, we want B to appear before A. But the graph in metadata
# is represented as A -> B (B in metadata[A]['deps']). So we sort in the
# reverse order, and then reverse the result again to get the desired order.
indegree = collections.defaultdict(int)
for component in metadata:
if 'deps' not in metadata[component]:
continue
for dep in metadata[component]['deps']:
indegree[dep] += 1
component_queue = collections.deque()
for component in metadata:
if indegree[component] == 0:
component_queue.append(component)
result = []
while component_queue:
component = component_queue.popleft()
result.append(component)
if 'deps' not in metadata[component]:
continue
for dep in metadata[component]['deps']:
indegree[dep] -= 1
if indegree[dep] == 0:
component_queue.append(dep)
# If topological sort fails, there must be a cycle.
if len(result) != len(metadata):
cycle = find_cycle(metadata)
raise RuntimeError('circular dependency found among metadata: %s' % cycle)
return result[::-1]
def add_dependency_goals(ctx, metadata, component, goals):
""" Adds goals that given component depends on."""
# For now, let's just add "all"
# TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
if 'deps' not in metadata[component]:
return
for dep in metadata[component]['deps']:
goals[dep].add('all')
def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
""" Analyzes the dependency graph among components, adds build commands for
dependencies, and then sorts the goals.
Returns a list of tuples: (component_name, set of subgoals).
Builds should be run in the list's order.
"""
# TODO(inseob@): after topological sort, some components may be built in
# parallel.
topological_order = topological_sort_components(metadata)
combined_goals = copy.deepcopy(parsed_goals)
# Add build rules for each component's dependencies
# We do this in reverse order, so it can be transitive.
# e.g. if A depends on B and B depends on C, and we build A,
# C should also be built, in addition to B.
for component in topological_order[::-1]:
if component in combined_goals:
add_dependency_goals(ctx, metadata, component, combined_goals)
ret = []
for component in ['all'] + topological_order:
if component in combined_goals:
ret.append((component, combined_goals[component]))
return ret
def run_build(ctx, metadata, component, subgoals):
build_cmd = metadata[component]['build_cmd']
out_dir = metadata[component]['out_dir']
default_goals = ''
if 'default_goals' in metadata[component]:
default_goals = metadata[component]['default_goals']
if 'all' in subgoals:
goal = default_goals
else:
goal = ' '.join(subgoals)
build_vars = ''
if 'update-meta' in subgoals:
build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
# TODO(inseob@): shell escape
cmd = [
'/bin/bash', '-c',
'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
(get_supported_product(ctx, metadata[component]['lunch_targets']),
ctx.target_build_variant(), build_vars, build_cmd, goal)
]
logging.debug('cwd: ' + metadata[component]['path'])
logging.debug('running build: ' + str(cmd))
subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
def run_build_all(ctx, metadata, subgoals):
for component in metadata:
if component in BUILD_ALL_EXEMPTION:
continue
run_build(ctx, metadata, component, subgoals)
def find_components(metadata, predicate):
for component in metadata:
if predicate(component):
yield component
def import_filegroups(metadata, component, exporting_component, target_file_pairs):
imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
bp_content = ''
for name, outpaths in target_file_pairs:
bp_content += ('filegroup {{\n'
' name: "{fname}",\n'
' srcs: [\n'.format(fname=name))
for outpath in outpaths:
bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
bp_content += (' ],\n'
'}\n')
with tempfile.TemporaryDirectory() as tmp_dir:
with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
fout.write(bp_content)
for _, outpaths in target_file_pairs:
for outpath in outpaths:
os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
os.path.join(tmp_dir, os.path.basename(outpath)))
cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
if os.path.exists(imported_filegroup_dir) and len(
cmp_result.left_only) + len(cmp_result.right_only) + len(
cmp_result.diff_files) == 0:
# Files are identical, it doesn't need to be written
logging.info(
'imported files exists and the contents are identical: {} -> {}'
.format(component, exporting_component))
continue
logging.info('creating symlinks for imported files: {} -> {}'.format(
component, exporting_component))
os.makedirs(imported_filegroup_dir, exist_ok=True)
shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
shutil.move(tmp_dir, imported_filegroup_dir)
def prepare_build(metadata, component):
imported_dir = os.path.join(metadata[component]['path'], 'imported')
if utils.META_DEPS not in metadata[component]:
if os.path.exists(imported_dir):
logging.debug('remove {}'.format(imported_dir))
shutil.rmtree(imported_dir)
return
imported_components = set()
for exp_comp in metadata[component][utils.META_DEPS]:
if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
target_file_pairs = []
for name in filegroups:
target_file_pairs.append((name, filegroups[name]))
import_filegroups(metadata, component, exp_comp, target_file_pairs)
imported_components.add(exp_comp)
# Remove directories that are not generated this time.
if os.path.exists(imported_dir):
if len(imported_components) == 0:
shutil.rmtree(imported_dir)
else:
for remove_target in set(os.listdir(imported_dir)) - imported_components:
logging.info('remove unnecessary imported dir: {}'.format(remove_target))
shutil.rmtree(os.path.join(imported_dir, remove_target))
def main():
utils.set_logging_config(logging.DEBUG)
ctx = utils.get_build_context()
logging.info('collecting metadata')
utils.set_logging_config(True)
goals = sys.argv[1:]
if not goals:
logging.debug('empty goals. defaults to main')
goals = ['main']
logging.debug('goals: ' + str(goals))
# Force update the metadata for the 'update-meta' build
metadata_collector = collect_metadata.MetadataCollector(
ctx.components_top(), ctx.out_dir(),
collect_metadata.COMPONENT_METADATA_DIR,
collect_metadata.COMPONENT_METADATA_FILE,
force_update='update-meta' in goals)
metadata_collector.collect()
metadata = metadata_collector.get_metadata()
logging.debug('metadata: ' + str(metadata))
parsed_goals = parse_goals(ctx, metadata, goals)
logging.debug('parsed goals: ' + str(parsed_goals))
sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
logging.debug('sorted goals with deps: ' + str(sorted_goals))
for component, subgoals in sorted_goals:
if component == 'all':
run_build_all(ctx, metadata, subgoals)
continue
prepare_build(metadata, component)
run_build(ctx, metadata, component, subgoals)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,428 @@
#!/usr/bin/env python3
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import copy
import json
import logging
import os
import sys
import yaml
from collections import defaultdict
from typing import (
List,
Set,
)
import utils
# SKIP_COMPONENT_SEARCH = (
# 'tools',
# )
COMPONENT_METADATA_DIR = '.repo'
COMPONENT_METADATA_FILE = 'treeinfo.yaml'
GENERATED_METADATA_FILE = 'metadata.json'
COMBINED_METADATA_FILENAME = 'multitree_meta.json'
class Dep(object):
def __init__(self, name, component, deps_type):
self.name = name
self.component = component
self.type = deps_type
self.out_paths = list()
class ExportedDep(Dep):
def __init__(self, name, component, deps_type):
super().__init__(name, component, deps_type)
def setOutputPaths(self, output_paths: list):
self.out_paths = output_paths
class ImportedDep(Dep):
required_type_map = {
# import type: (required type, get imported module list)
utils.META_FILEGROUP: (utils.META_MODULES, True),
}
def __init__(self, name, component, deps_type, import_map):
super().__init__(name, component, deps_type)
self.exported_deps: Set[ExportedDep] = set()
self.imported_modules: List[str] = list()
self.required_type = deps_type
get_imported_module = False
if deps_type in ImportedDep.required_type_map:
self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
if get_imported_module:
self.imported_modules = import_map[name]
else:
self.imported_modules.append(name)
def verify_and_add(self, exported: ExportedDep):
if self.required_type != exported.type:
raise RuntimeError(
'{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
.format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
self.exported_deps.add(exported)
self.out_paths.extend(exported.out_paths)
# Remove duplicates. We may not use set() which is not JSON serializable
self.out_paths = list(dict.fromkeys(self.out_paths))
class MetadataCollector(object):
"""Visit all component directories and collect the metadata from them.
Example of metadata:
==========
build_cmd: m # build command for this component. 'm' if omitted
out_dir: out # out dir of this component. 'out' if omitted
exports:
libraries:
- name: libopenjdkjvm
- name: libopenjdkjvmd
build_cmd: mma # build command for libopenjdkjvmd if specified
out_dir: out/soong # out dir for libopenjdkjvmd if specified
- name: libctstiagent
APIs:
- api1
- api2
imports:
libraries:
- lib1
- lib2
APIs:
- import_api1
- import_api2
lunch_targets:
- arm64
- x86_64
"""
def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
if not os.path.exists(out_dir):
os.makedirs(out_dir)
self.__component_top = component_top
self.__out_dir = out_dir
self.__metadata_path = os.path.join(meta_dir, meta_file)
self.__combined_metadata_path = os.path.join(self.__out_dir,
COMBINED_METADATA_FILENAME)
self.__force_update = force_update
self.__metadata = dict()
self.__map_exports = dict()
self.__component_set = set()
def collect(self):
""" Read precomputed combined metadata from the json file.
If any components have updated their metadata, update the metadata
information and the json file.
"""
timestamp = self.__restore_metadata()
if timestamp and os.path.getmtime(__file__) > timestamp:
logging.info('Update the metadata as the orchestrator has been changed')
self.__force_update = True
self.__collect_from_components(timestamp)
def get_metadata(self):
""" Returns collected metadata from all components"""
if not self.__metadata:
logging.warning('Metadata is empty')
return copy.deepcopy(self.__metadata)
def __collect_from_components(self, timestamp):
""" Read metadata from all components
If any components have newer metadata files or are removed, update the
combined metadata.
"""
metadata_updated = False
for component in os.listdir(self.__component_top):
# if component in SKIP_COMPONENT_SEARCH:
# continue
if self.__read_component_metadata(timestamp, component):
metadata_updated = True
if self.__read_generated_metadata(timestamp, component):
metadata_updated = True
deleted_components = set()
for meta in self.__metadata:
if meta not in self.__component_set:
logging.info('Component {} is removed'.format(meta))
deleted_components.add(meta)
metadata_updated = True
for meta in deleted_components:
del self.__metadata[meta]
if metadata_updated:
self.__update_dependencies()
self.__store_metadata()
logging.info('Metadata updated')
def __read_component_metadata(self, timestamp, component):
""" Search for the metadata file from a component.
If the metadata is modified, read the file and update the metadata.
"""
component_path = os.path.join(self.__component_top, component)
metadata_file = os.path.join(component_path, self.__metadata_path)
logging.info(
'Reading a metadata file from {} component ...'.format(component))
if not os.path.isfile(metadata_file):
logging.warning('Metadata file {} not found!'.format(metadata_file))
return False
self.__component_set.add(component)
if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
logging.info('... yaml not changed. Skip')
return False
with open(metadata_file) as f:
meta = yaml.load(f, Loader=yaml.SafeLoader)
meta['path'] = component_path
if utils.META_BUILDCMD not in meta:
meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
if utils.META_OUTDIR not in meta:
meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
if utils.META_IMPORTS not in meta:
meta[utils.META_IMPORTS] = defaultdict(dict)
if utils.META_EXPORTS not in meta:
meta[utils.META_EXPORTS] = defaultdict(dict)
self.__metadata[component] = meta
return True
def __read_generated_metadata(self, timestamp, component):
""" Read a metadata gerated by 'update-meta' build command from the soong build system
Soong generate the metadata that has the information of import/export module/files.
Build orchestrator read the generated metadata to collect the dependency information.
Generated metadata has the following format:
{
"Imported": {
"FileGroups": {
"<name_of_filegroup>": [
"<exported_module_name>",
...
],
...
}
}
"Exported": {
"<exported_module_name>": [
"<output_file_path>",
...
],
...
}
}
"""
if component not in self.__component_set:
# skip reading generated metadata if the component metadata file was missing
return False
component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
if not os.path.isfile(generated_metadata_file):
logging.info('... Soong did not generated the metadata file. Skip')
return False
if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
logging.info('... Soong generated metadata not changed. Skip')
return False
with open(generated_metadata_file, 'r') as gen_meta_json:
try:
gen_metadata = json.load(gen_meta_json)
except json.decoder.JSONDecodeError:
logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
generated_metadata_file))
return False
if utils.SOONG_IMPORTED in gen_metadata:
imported = gen_metadata[utils.SOONG_IMPORTED]
if utils.SOONG_IMPORTED_FILEGROUPS in imported:
self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
if utils.SOONG_EXPORTED in gen_metadata:
self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
return True
def __update_export_map(self):
""" Read metadata of all components and update the export map
'libraries' and 'APIs' are special exproted types that are provided manually
from the .yaml metadata files. These need to be replaced with the implementation
in soong gerated metadata.
The export type 'module' is generated from the soong build system from the modules
with 'export: true' property. This export type includes a dictionary with module
names as keys and their output files as values. These output files will be used as
prebuilt sources when generating the imported modules.
"""
self.__map_exports = dict()
for comp in self.__metadata:
if utils.META_EXPORTS not in self.__metadata[comp]:
continue
exports = self.__metadata[comp][utils.META_EXPORTS]
for export_type in exports:
for module in exports[export_type]:
if export_type == utils.META_LIBS:
name = module[utils.META_LIB_NAME]
else:
name = module
if name in self.__map_exports:
raise RuntimeError(
'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
.format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
exported_deps = ExportedDep(name, comp, export_type)
if export_type == utils.META_MODULES:
exported_deps.setOutputPaths(exports[export_type][module])
self.__map_exports[name] = exported_deps
def __verify_and_add_dependencies(self, component):
""" Search all imported items from the export_map.
If any imported items are not provided by the other components, report
an error.
Otherwise, add the component dependency and update the exported information to the
import maps.
"""
def verify_and_add_dependencies(imported_dep: ImportedDep):
for module in imported_dep.imported_modules:
if module not in self.__map_exports:
raise RuntimeError(
'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
.format(module=module, comp=imported_dep.component))
imported_dep.verify_and_add(self.__map_exports[module])
deps = self.__metadata[component][utils.META_DEPS]
exp_comp = self.__map_exports[module].component
if exp_comp not in deps:
deps[exp_comp] = defaultdict(defaultdict)
deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
self.__metadata[component][utils.META_DEPS] = defaultdict()
imports = self.__metadata[component][utils.META_IMPORTS]
for import_type in imports:
for module in imports[import_type]:
verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
def __check_imports(self):
""" Search the export map to find the component to import libraries or APIs.
Update the 'deps' field that includes the dependent components.
"""
for component in self.__metadata:
self.__verify_and_add_dependencies(component)
if utils.META_DEPS in self.__metadata[component]:
logging.debug('{comp} depends on {list} components'.format(
comp=component, list=self.__metadata[component][utils.META_DEPS]))
def __update_dependencies(self):
""" Generate a dependency graph for the components
Update __map_exports and the dependency graph with the maps.
"""
self.__update_export_map()
self.__check_imports()
def __store_metadata(self):
""" Store the __metadata dictionary as json format"""
with open(self.__combined_metadata_path, 'w') as json_file:
json.dump(self.__metadata, json_file, indent=2)
def __restore_metadata(self):
""" Read the stored json file and return the time stamps of the
metadata file.
"""
if not os.path.exists(self.__combined_metadata_path):
return None
with open(self.__combined_metadata_path, 'r') as json_file:
try:
self.__metadata = json.load(json_file)
except json.decoder.JSONDecodeError:
logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
self.__combined_metadata_path))
return None
logging.info('Metadata restored from {}'.format(
self.__combined_metadata_path))
self.__update_export_map()
return os.path.getmtime(self.__combined_metadata_path)
def get_args():
def check_dir(path):
if os.path.exists(path) and os.path.isdir(path):
return os.path.normpath(path)
else:
raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
parser = argparse.ArgumentParser()
parser.add_argument(
'--component-top',
help='Scan all components under this directory.',
default=os.path.join(os.path.dirname(__file__), '../../../components'),
type=check_dir)
parser.add_argument(
'--meta-file',
help='Name of the metadata file.',
default=COMPONENT_METADATA_FILE,
type=str)
parser.add_argument(
'--meta-dir',
help='Each component has the metadata in this directory.',
default=COMPONENT_METADATA_DIR,
type=str)
parser.add_argument(
'--out-dir',
help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
default=os.path.join(os.path.dirname(__file__), '../../../out'),
type=os.path.normpath)
parser.add_argument(
'--force',
'-f',
action='store_true',
help='Force to collect metadata',
)
parser.add_argument(
'--verbose',
'-v',
help='Increase output verbosity, e.g. "-v", "-vv".',
action='count',
default=0)
return parser.parse_args()
def main():
args = get_args()
utils.set_logging_config(args.verbose)
metadata_collector = MetadataCollector(args.component_top, args.out_dir,
args.meta_dir, args.meta_file, args.force)
metadata_collector.collect()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,48 @@
#!/bin/bash
function buffet()
{
local product variant selection
if [[ $# -ne 1 ]]; then
echo "usage: buffet [target]" >&2
return 1
fi
selection=$1
product=${selection%%-*} # Trim everything after first dash
variant=${selection#*-} # Trim everything up to first dash
if [ -z "$product" ]
then
echo
echo "Invalid lunch combo: $selection"
return 1
fi
if [ -z "$variant" ]
then
if [[ "$product" =~ .*_(eng|user|userdebug) ]]
then
echo "Did you mean -${product/*_/}? (dash instead of underscore)"
fi
return 1
fi
BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
export BUFFET_BUILD_TOP=$(pwd)
export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
export BUFFET_TARGET_PRODUCT=$product
export BUFFET_TARGET_BUILD_VARIANT=$variant
export BUFFET_TARGET_BUILD_TYPE=release
}
function m()
{
if [ -z "$BUFFET_BUILD_TOP" ]
then
echo "Run \"buffet [target]\" first"
return 1
fi
python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
}

View file

@ -0,0 +1,79 @@
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import yaml
def parse_hierarchy(build_top):
"""Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
Example of hierarchy file:
==========
aosp_arm64:
- armv8
- aosp_cf_arm64_phone
armv8:
- aosp_oriole
- aosp_sunfish
aosp_oriole:
- oriole
aosp_sunfish:
- sunfish
oriole:
# leaf
sunfish:
# leaf
==========
If we parse this yaml, we get a dict looking like:
{
"sunfish": "aosp_sunfish",
"oriole": "aosp_oriole",
"aosp_oriole": "armv8",
"aosp_sunfish": "armv8",
"armv8": "aosp_arm64",
"aosp_cf_arm64_phone": "aosp_arm64",
"aosp_arm64": None, # no parent
}
"""
metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
if not os.path.isfile(metadata_path):
raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
with open(metadata_path, 'r') as f:
hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
hierarchy_map = dict()
for parent_target, child_targets in hierarchy_yaml.items():
if not child_targets:
# leaf
continue
for child_target in child_targets:
hierarchy_map[child_target] = parent_target
for parent_target in hierarchy_yaml:
# targets with no parent
if parent_target not in hierarchy_map:
hierarchy_map[parent_target] = None
return hierarchy_map

View file

@ -0,0 +1,37 @@
# hierarchy of targets
aosp_arm64:
- armv8
- aosp_cf_arm64_phone
armv8:
- mainline_modules_arm64
mainline_modules_arm64:
- aosp_oriole
- aosp_sunfish
- aosp_raven
aosp_oriole:
- oriole
aosp_sunfish:
- sunfish
aosp_raven:
- raven
oriole:
# leaf
sunfish:
# leaf
raven:
# leaf
aosp_cf_arm64_phone:
- cf_arm64_phone
cf_arm64_phone:
# leaf

View file

@ -0,0 +1,89 @@
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
# default build configuration for each component
DEFAULT_BUILDCMD = 'm'
DEFAULT_OUTDIR = 'out'
# yaml fields
META_BUILDCMD = 'build_cmd'
META_OUTDIR = 'out_dir'
META_EXPORTS = 'exports'
META_IMPORTS = 'imports'
META_TARGETS = 'lunch_targets'
META_DEPS = 'deps'
# fields under 'exports' and 'imports'
META_LIBS = 'libraries'
META_APIS = 'APIs'
META_FILEGROUP = 'filegroup'
META_MODULES = 'modules'
# fields under 'libraries'
META_LIB_NAME = 'name'
# fields for generated metadata file
SOONG_IMPORTED = 'Imported'
SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
SOONG_EXPORTED = 'Exported'
# export map items
EXP_COMPONENT = 'component'
EXP_TYPE = 'type'
EXP_OUTPATHS = 'outpaths'
class BuildContext:
def __init__(self):
self._build_top = os.getenv('BUFFET_BUILD_TOP')
self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
self._out_dir = os.path.join(self._build_top, 'out')
if not self._build_top:
raise RuntimeError("Can't find root. Did you run buffet?")
def build_top(self):
return self._build_top
def components_top(self):
return self._components_top
def target_product(self):
return self._target_product
def target_build_variant(self):
return self._target_build_variant
def target_build_type(self):
return self._target_build_type
def out_dir(self):
return self._out_dir
def get_build_context():
return BuildContext()
def set_logging_config(verbose_level):
verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
verbosity = min(verbose_level, 2)
logging.basicConfig(
format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
level=verbose_map[verbosity])