move orchestrator/ to its own project.

Bug: 240497793
Test: manual
Change-Id: I8eafae946fbf5113b1d4ff4ebbd19aeb14cc2970
This commit is contained in:
LaMont Jones 2022-08-04 21:23:20 +00:00
parent ef1d2f06c6
commit 4bcca6d8d3
54 changed files with 0 additions and 3368 deletions

View file

@ -1,8 +0,0 @@
DEMO
from the root of the workspace
multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
rm -rf out && multitree_build && echo "==== Files ====" && find out -type f

View file

@ -1,156 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import json
import os
import sys
import api_assembly_cc
import ninja_tools
ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
def assemble_apis(context, inner_trees):
# Find all of the contributions from the inner tree
contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
# Load and validate the contribution files
# TODO: Check timestamps and skip unnecessary work
contributions = []
for tree_key, filenames in contribution_files_dict.items():
for filename in filenames:
json_data = load_contribution_file(context, filename)
if not json_data:
continue
# TODO: Validate the configs, especially that the domains match what we asked for
# from the lunch config.
contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
# Group contributions by language and API surface
stub_libraries = collate_contributions(contributions)
# Initialize the ninja file writer
with open(context.out.api_ninja_file(), "w") as ninja_file:
ninja = ninja_tools.Ninja(context, ninja_file)
# Initialize the build file writer
build_file = BuildFile() # TODO: parameters?
# Iterate through all of the stub libraries and generate rules to assemble them
# and Android.bp/BUILD files to make those available to inner trees.
# TODO: Parallelize? Skip unnecessary work?
for stub_library in stub_libraries:
STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
# TODO: Handle host_executables separately or as a StubLibrary language?
# Finish writing the ninja file
ninja.write()
def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
"Scan an inner_tree's out dir for the api contribution files."
directory = inner_tree.out.api_contributions_dir()
result = []
with os.scandir(directory) as it:
for dirent in it:
if not dirent.is_file():
break
if dirent.name.endswith(".json"):
result.append(os.path.join(directory, dirent.name))
return result
def load_contribution_file(context, filename):
"Load and return the API contribution at filename. On error report error and return None."
with open(filename) as f:
try:
return json.load(f)
except json.decoder.JSONDecodeError as ex:
# TODO: Error reporting
context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
raise ex
class StubLibraryContribution(object):
def __init__(self, inner_tree, api_domain, library_contribution):
self.inner_tree = inner_tree
self.api_domain = api_domain
self.library_contribution = library_contribution
class StubLibrary(object):
def __init__(self, language, api_surface, api_surface_version, name):
self.language = language
self.api_surface = api_surface
self.api_surface_version = api_surface_version
self.name = name
self.contributions = []
def add_contribution(self, contrib):
self.contributions.append(contrib)
def collate_contributions(contributions):
"""Take the list of parsed API contribution files, and group targets by API Surface, version,
language and library name, and return a StubLibrary object for each of those.
"""
grouped = {}
for contribution in contributions:
for language in STUB_LANGUAGE_HANDLERS.keys():
for library in contribution.json_data.get(language, []):
key = (language, contribution.json_data["name"],
contribution.json_data["version"], library["name"])
stub_library = grouped.get(key)
if not stub_library:
stub_library = StubLibrary(language, contribution.json_data["name"],
contribution.json_data["version"], library["name"])
grouped[key] = stub_library
stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
contribution.json_data["api_domain"], library))
return list(grouped.values())
def assemble_java_api_library(context, ninja, build_file, stub_library):
print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
stub_library.api_surface_version, stub_library.name))
for contrib in stub_library.contributions:
print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
# TODO: Implement me
def assemble_resource_api_library(context, ninja, build_file, stub_library):
print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
stub_library.api_surface_version, stub_library.name))
for contrib in stub_library.contributions:
print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
# TODO: Implement me
STUB_LANGUAGE_HANDLERS = {
"cc_libraries": api_assembly_cc.assemble_cc_api_library,
"java_libraries": assemble_java_api_library,
"resource_libraries": assemble_resource_api_library,
}
class BuildFile(object):
"Abstract generator for Android.bp files and BUILD files."
pass

View file

@ -1,48 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
def assemble_cc_api_library(context, ninja, build_file, stub_library):
staging_dir = context.out.api_library_dir(stub_library.api_surface,
stub_library.api_surface_version, stub_library.name)
work_dir = context.out.api_library_work_dir(stub_library.api_surface,
stub_library.api_surface_version, stub_library.name)
# Generate rules to copy headers
includes = []
include_dir = os.path.join(staging_dir, "include")
for contrib in stub_library.contributions:
for headers in contrib.library_contribution["headers"]:
root = headers["root"]
for file in headers["files"]:
# TODO: Deal with collisions of the same name from multiple contributions
include = os.path.join(include_dir, file)
ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
includes.append(include)
# Generate rule to run ndkstubgen
# Generate rule to compile stubs to library
# Generate phony rule to build the library
# TODO: This name probably conflictgs with something
ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
stub_library.name)), includes)
# Generate build files

View file

@ -1,28 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ApiDomain(object):
def __init__(self, name, tree, product):
# Product will be null for modules
self.name = name
self.tree = tree
self.product = product
def __str__(self):
return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
self.name, self.tree.root,
"None" if self.product is None else "\"%s\"" % self.product)

View file

@ -1,20 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def export_apis_from_tree(tree_key, inner_tree, cookie):
inner_tree.invoke(["export_api_contributions"])

View file

@ -1,117 +0,0 @@
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
import ninja_tools
import ninja_syntax # Has to be after ninja_tools because of the path hack
def final_packaging(context, inner_trees):
"""Pull together all of the previously defined rules into the final build stems."""
with open(context.out.outer_ninja_file(), "w") as ninja_file:
ninja = ninja_tools.Ninja(context, ninja_file)
# Add the api surfaces file
ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
# For each inner tree
for tree in inner_trees.keys():
# TODO: Verify that inner_tree.ninja was generated
# Read and verify file
build_targets = read_build_targets_json(context, tree)
if not build_targets:
continue
# Generate the ninja and build files for this inner tree
generate_cross_domain_build_rules(context, ninja, tree, build_targets)
# Finish writing the ninja file
ninja.write()
def read_build_targets_json(context, tree):
"""Read and validate the build_targets.json file for the given tree."""
try:
f = open(tree.out.build_targets_file())
except FileNotFoundError:
# It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
return None
data = None
with f:
try:
data = json.load(f)
except json.decoder.JSONDecodeError as ex:
sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
# TODO: Error reporting
raise ex
# TODO: Better error handling
# TODO: Validate json schema
return data
def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
"Generate the ninja and build files for the inner tree."
# Include the inner tree's inner_tree.ninja
ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
# Generate module rules and files
for module in build_targets.get("modules", []):
generate_shared_module(context, ninja, tree, module)
# Generate staging rules
staging_dir = context.out.staging_dir()
for staged in build_targets.get("staging", []):
# TODO: Enforce that dest isn't in disallowed subdir of out or absolute
dest = staged["dest"]
dest = os.path.join(staging_dir, dest)
if "src" in staged and "obj" in staged:
context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
) # TODO: Filename and line if possible
if "src" in staged:
ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
elif "obj" in staged:
ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
ninja.add_global_phony("staging", [dest])
# Generate dist rules
dist_dir = context.out.dist_dir()
for disted in build_targets.get("dist", []):
# TODO: Enforce that dest absolute
dest = disted["dest"]
dest = os.path.join(dist_dir, dest)
ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
ninja.add_global_phony("dist", [dest])
def generate_shared_module(context, ninja, tree, module):
"""Generate ninja rules for the given build_targets.json defined module."""
module_name = module["name"]
module_type = module["type"]
share_dir = context.out.module_share_dir(module_type, module_name)
src_file = os.path.join(tree.root, module["file"])
if module_type == "apex":
ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
# TODO: Generate build file
else:
# TODO: Better error handling
raise Exception("Invalid module type: %s" % module)

View file

@ -1,210 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import subprocess
import sys
import textwrap
class InnerTreeKey(object):
"""Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
If a single tree uses two different prdoucts, then we won't make assumptions about
them sharing _anything_.
TODO: This is true for soong. It's more likely that bazel could do analysis for two
products at the same time in a single tree, so there's an optimization there to do
eventually."""
def __init__(self, root, product):
if isinstance(root, list):
self.melds = root[1:]
root = root[0]
else:
self.melds = []
self.root = root
self.product = product
def __str__(self):
return (f"TreeKey(root={enquote(self.root)} "
f"product={enquote(self.product)}")
def __hash__(self):
return hash((self.root, self.product))
def _cmp(self, other):
assert isinstance(other, InnerTreeKey)
if self.root < other.root:
return -1
if self.root > other.root:
return 1
if self.melds < other.melds:
return -1
if self.melds > other.melds:
return 1
if self.product == other.product:
return 0
if self.product is None:
return -1
if other.product is None:
return 1
if self.product < other.product:
return -1
return 1
def __eq__(self, other):
return self._cmp(other) == 0
def __ne__(self, other):
return self._cmp(other) != 0
def __lt__(self, other):
return self._cmp(other) < 0
def __le__(self, other):
return self._cmp(other) <= 0
def __gt__(self, other):
return self._cmp(other) > 0
def __ge__(self, other):
return self._cmp(other) >= 0
class InnerTree(object):
def __init__(self, context, paths, product):
"""Initialize with the inner tree root (relative to the workspace root)"""
if not isinstance(paths, list):
paths = [paths]
self.root = paths[0]
self.meld_dirs = paths[1:]
self.product = product
self.domains = {}
# TODO: Base directory on OUT_DIR
out_root = context.out.inner_tree_dir(self.root)
if product:
out_root += "_" + product
else:
out_root += "_unbundled"
self.out = OutDirLayout(out_root)
def __str__(self):
return (f"InnerTree(root={enquote(self.root)} "
f"product={enquote(self.product)} "
f"domains={enquote(list(self.domains.keys()))} "
f"meld={enquote(self.meld_dirs)})")
def invoke(self, args):
"""Call the inner tree command for this inner tree. Exits on failure."""
# TODO: Build time tracing
# Validate that there is a .inner_build command to run at the root of the tree
# so we can print a good error message
inner_build_tool = os.path.join(self.root, ".inner_build")
if not os.access(inner_build_tool, os.X_OK):
sys.stderr.write(
f"Unable to execute {inner_build_tool}. Is there an inner tree "
"or lunch combo misconfiguration?\n")
sys.exit(1)
# TODO: This is where we should set up the shared trees
# Build the command
cmd = [inner_build_tool, "--out_dir", self.out.root()]
for domain_name in sorted(self.domains.keys()):
cmd.append("--api_domain")
cmd.append(domain_name)
cmd += args
# Run the command
process = subprocess.run(cmd, shell=False)
# TODO: Probably want better handling of inner tree failures
if process.returncode:
sys.stderr.write(
f"Build error in inner tree: {self.root}\nstopping "
"multitree build.\n")
sys.exit(1)
class InnerTrees(object):
def __init__(self, trees, domains):
self.trees = trees
self.domains = domains
def __str__(self):
"Return a debugging dump of this object"
def _vals(values):
return ("\n" + " " * 16).join(sorted([str(t) for t in values]))
return textwrap.dedent(f"""\
InnerTrees {{
trees: [
{_vals(self.trees.values())}
]
domains: [
{_vals(self.domains.values())}
]
}}""")
def for_each_tree(self, func, cookie=None):
"""Call func for each of the inner trees once for each product that will be built in it.
The calls will be in a stable order.
Return a map of the InnerTreeKey to any results returned from func().
"""
result = {}
for key in sorted(self.trees.keys()):
result[key] = func(key, self.trees[key], cookie)
return result
def get(self, tree_key):
"""Get an inner tree for tree_key"""
return self.trees.get(tree_key)
def keys(self):
"Get the keys for the inner trees in name order."
return [self.trees[k] for k in sorted(self.trees.keys())]
class OutDirLayout(object):
"""Encapsulates the logic about the layout of the inner tree out directories.
See also context.OutDir for outer tree out dir contents."""
def __init__(self, root):
"Initialize with the root of the OUT_DIR for the inner tree."
self._root = root
def root(self):
return self._root
def tree_info_file(self):
return os.path.join(self._root, "tree_info.json")
def api_contributions_dir(self):
return os.path.join(self._root, "api_contributions")
def build_targets_file(self):
return os.path.join(self._root, "build_targets.json")
def main_ninja_file(self):
return os.path.join(self._root, "inner_tree.ninja")
def enquote(s):
return json.dumps(s)

View file

@ -1,29 +0,0 @@
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
def interrogate_tree(tree_key, inner_tree, cookie):
inner_tree.invoke(["describe"])
info_json_filename = inner_tree.out.tree_info_file()
# TODO: Error handling
with open(info_json_filename) as f:
info_json = json.load(f)
# TODO: Check orchestrator protocol

View file

@ -1,417 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import glob
import json
import os
import sys
EXIT_STATUS_OK = 0
EXIT_STATUS_ERROR = 1
EXIT_STATUS_NEED_HELP = 2
def find_dirs(path, name, ttl=6):
"""Search at most ttl directories deep inside path for a directory called name
and yield directories that match."""
# The dance with subdirs is so that we recurse in sorted order.
subdirs = []
with os.scandir(path) as it:
for dirent in sorted(it, key=lambda x: x.name):
try:
if dirent.is_dir():
if dirent.name == name:
yield os.path.join(path, dirent.name)
elif ttl > 0:
subdirs.append(dirent.name)
except OSError:
# Consume filesystem errors, e.g. too many links, permission etc.
pass
for subdir in subdirs:
yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
def walk_paths(path, matcher, ttl=10):
"""Do a traversal of all files under path yielding each file that matches
matcher."""
# First look for files, then recurse into directories as needed.
# The dance with subdirs is so that we recurse in sorted order.
subdirs = []
with os.scandir(path) as it:
for dirent in sorted(it, key=lambda x: x.name):
try:
if dirent.is_file():
if matcher(dirent.name):
yield os.path.join(path, dirent.name)
if dirent.is_dir():
if ttl > 0:
subdirs.append(dirent.name)
except OSError:
# Consume filesystem errors, e.g. too many links, permission etc.
pass
for subdir in sorted(subdirs):
yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
def find_file(path, filename):
"""Return a file called filename inside path, no more than ttl levels deep.
Directories are searched alphabetically.
"""
for f in walk_paths(path, lambda x: x == filename):
return f
# TODO: When orchestrator is in its own git project remove the "build" and "make" here
class LunchContext(object):
"""Mockable container for lunch"""
def __init__(self, workspace_root, orchestrator_path_prefix_components=["build", "build", "make"]):
self.workspace_root = workspace_root
self.orchestrator_path_prefix_components = orchestrator_path_prefix_components
def find_config_dirs(context):
"""Find the configuration files in the well known locations inside workspace_root
<workspace_root>/<orchestrator>/<path>/<prefix>/orchestrator/multitree_combos
(AOSP devices, such as cuttlefish)
<workspace_root>/vendor/**/multitree_combos
(specific to a vendor and not open sourced)
<workspace_root>/device/**/multitree_combos
(specific to a vendor and are open sourced)
Directories are returned specifically in this order, so that aosp can't be
overridden, but vendor overrides device.
"""
# TODO: This is not looking in inner trees correctly.
yield os.path.join(context.workspace_root, *context.orchestrator_path_prefix_components, "orchestrator/multitree_combos")
dirs = ["vendor", "device"]
for d in dirs:
yield from find_dirs(os.path.join(context.workspace_root, d), "multitree_combos")
def find_named_config(context, shortname):
"""Find the config with the given shortname inside context.workspace_root.
Config directories are searched in the order described in find_config_dirs,
and inside those directories, alphabetically."""
filename = shortname + ".mcombo"
for config_dir in find_config_dirs(context):
found = find_file(config_dir, filename)
if found:
return found
return None
def parse_product_variant(s):
"""Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
split = s.split("-")
if len(split) != 2:
return None
return split
def choose_config_from_args(context, args):
"""Return the config file we should use for the given argument,
or null if there's no file that matches that."""
if len(args) == 1:
# Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
# file we don't match that.
pv = parse_product_variant(args[0])
if pv:
config = find_named_config(context, pv[0])
if config:
return (config, pv[1])
return None, None
# Look for a specifically named file
if os.path.isfile(args[0]):
return (args[0], args[1] if len(args) > 1 else None)
# That file didn't exist, return that we didn't find it.
return None, None
class ConfigException(Exception):
ERROR_IDENTIFY = "identify"
ERROR_PARSE = "parse"
ERROR_CYCLE = "cycle"
ERROR_VALIDATE = "validate"
def __init__(self, kind, message, locations=[], line=0):
"""Error thrown when loading and parsing configurations.
Args:
message: Error message to display to user
locations: List of filenames of the include history. The 0 index one
the location where the actual error occurred
"""
if len(locations):
s = locations[0]
if line:
s += ":"
s += str(line)
s += ": "
else:
s = ""
s += message
if len(locations):
for loc in locations[1:]:
s += "\n included from %s" % loc
super().__init__(s)
self.kind = kind
self.message = message
self.locations = locations
self.line = line
def load_config(filename):
"""Load a config, including processing the inherits fields.
Raises:
ConfigException on errors
"""
def load_and_merge(fn, visited):
with open(fn) as f:
try:
contents = json.load(f)
except json.decoder.JSONDecodeError as ex:
if True:
raise ConfigException(ConfigException.ERROR_PARSE, ex.msg, visited, ex.lineno)
else:
sys.stderr.write("exception %s" % ex.__dict__)
raise ex
# Merge all the parents into one data, with first-wins policy
inherited_data = {}
for parent in contents.get("inherits", []):
if parent in visited:
raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
visited)
deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
# Then merge inherited_data into contents, but what's already there will win.
deep_merge(contents, inherited_data)
contents.pop("inherits", None)
return contents
return load_and_merge(filename, [filename,])
def deep_merge(merged, addition):
"""Merge all fields of addition into merged. Pre-existing fields win."""
for k, v in addition.items():
if k in merged:
if isinstance(v, dict) and isinstance(merged[k], dict):
deep_merge(merged[k], v)
else:
merged[k] = v
def make_config_header(config_file, config, variant):
def make_table(rows):
maxcols = max([len(row) for row in rows])
widths = [0] * maxcols
for row in rows:
for i in range(len(row)):
widths[i] = max(widths[i], len(row[i]))
text = []
for row in rows:
rowtext = []
for i in range(len(row)):
cell = row[i]
rowtext.append(str(cell))
rowtext.append(" " * (widths[i] - len(cell)))
rowtext.append(" ")
text.append("".join(rowtext))
return "\n".join(text)
trees = [("Component", "Path", "Product"),
("---------", "----", "-------")]
entry = config.get("system", None)
def add_config_tuple(trees, entry, name):
if entry:
trees.append(
(name, entry.get("inner-tree"), entry.get("product", "")))
add_config_tuple(trees, config.get("system"), "system")
add_config_tuple(trees, config.get("vendor"), "vendor")
for k, v in config.get("modules", {}).items():
add_config_tuple(trees, v, k)
return """========================================
TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
%(trees)s
========================================\n""" % {
"TARGET_BUILD_COMBO": config_file,
"TARGET_BUILD_VARIANT": variant,
"trees": make_table(trees),
}
def do_lunch(args):
"""Handle the lunch command."""
# Check that we're at the top of a multitree workspace by seeing if this script exists.
if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
return EXIT_STATUS_ERROR
# Choose the config file
config_file, variant = choose_config_from_args(".", args)
if config_file == None:
sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
return EXIT_STATUS_NEED_HELP
if variant == None:
sys.stderr.write("Can't find variant for: %s\n" % " ".join(args))
return EXIT_STATUS_NEED_HELP
# Parse the config file
try:
config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write(str(ex))
return EXIT_STATUS_ERROR
# Fail if the lunchable bit isn't set, because this isn't a usable config
if not config.get("lunchable", False):
sys.stderr.write("%s: Lunch config file (or inherited files) does not have the 'lunchable'"
% config_file)
sys.stderr.write(" flag set, which means it is probably not a complete lunch spec.\n")
# All the validation has passed, so print the name of the file and the variant
sys.stdout.write("%s\n" % config_file)
sys.stdout.write("%s\n" % variant)
# Write confirmation message to stderr
sys.stderr.write(make_config_header(config_file, config, variant))
return EXIT_STATUS_OK
def find_all_combo_files(context):
"""Find all .mcombo files in the prescribed locations in the tree."""
for dir in find_config_dirs(context):
for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
yield file
def is_file_lunchable(config_file):
"""Parse config_file, flatten the inheritance, and return whether it can be
used as a lunch target."""
try:
config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write("%s" % ex)
return False
return config.get("lunchable", False)
def find_all_lunchable(context):
"""Find all mcombo files in the tree (rooted at context.workspace_root) that when
parsed (and inheritance is flattened) have lunchable: true."""
for f in [x for x in find_all_combo_files(context) if is_file_lunchable(x)]:
yield f
def load_current_config():
"""Load, validate and return the config as specified in TARGET_BUILD_COMBO. Throws
ConfigException if there is a problem."""
# Identify the config file
config_file = os.environ.get("TARGET_BUILD_COMBO")
if not config_file:
raise ConfigException(ConfigException.ERROR_IDENTIFY,
"TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
# Parse the config file
config = load_config(config_file)
# Validate the config file
if not config.get("lunchable", False):
raise ConfigException(ConfigException.ERROR_VALIDATE,
"Lunch config file (or inherited files) does not have the 'lunchable'"
+ " flag set, which means it is probably not a complete lunch spec.",
[config_file,])
# TODO: Validate that:
# - there are no modules called system or vendor
# - everything has all the required files
variant = os.environ.get("TARGET_BUILD_VARIANT")
if not variant:
variant = "eng" # TODO: Is this the right default?
# Validate variant is user, userdebug or eng
return config_file, config, variant
def do_list():
"""Handle the --list command."""
lunch_context = LunchContext(".")
for f in sorted(find_all_lunchable(lunch_context)):
print(f)
def do_print(args):
"""Handle the --print command."""
# Parse args
if len(args) == 0:
config_file = os.environ.get("TARGET_BUILD_COMBO")
if not config_file:
sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
return EXIT_STATUS_NEED_HELP
elif len(args) == 1:
config_file = args[0]
else:
return EXIT_STATUS_NEED_HELP
# Parse the config file
try:
config = load_config(config_file)
except ConfigException as ex:
sys.stderr.write(str(ex))
return EXIT_STATUS_ERROR
# Print the config in json form
json.dump(config, sys.stdout, indent=4)
return EXIT_STATUS_OK
def main(argv):
if len(argv) < 2 or argv[1] == "-h" or argv[1] == "--help":
return EXIT_STATUS_NEED_HELP
if len(argv) == 2 and argv[1] == "--list":
do_list()
return EXIT_STATUS_OK
if len(argv) == 2 and argv[1] == "--print":
return do_print(argv[2:])
return EXIT_STATUS_OK
if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
return do_lunch(argv[2:])
sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
return EXIT_STATUS_NEED_HELP
if __name__ == "__main__":
sys.exit(main(sys.argv))
# vim: sts=4:ts=4:sw=4

View file

@ -1,37 +0,0 @@
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import sys
def run_ninja(context, targets):
"""Run ninja.
"""
# Construct the command
cmd = [
context.tools.ninja(),
"-f",
context.out.outer_ninja_file(),
] + targets
# Run the command
process = subprocess.run(cmd, shell=False)
# TODO: Probably want better handling of inner tree failures
if process.returncode:
sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
sys.exit(1)

View file

@ -1,59 +0,0 @@
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
# Workaround for python include path
_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
if _ninja_dir not in sys.path:
sys.path.append(_ninja_dir)
import ninja_writer
from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
class Ninja(ninja_writer.Writer):
"""Some higher level constructs on top of raw ninja writing.
TODO: Not sure where these should be."""
def __init__(self, context, file):
super(Ninja, self).__init__(file)
self._context = context
self._did_copy_file = False
self._phonies = {}
def add_copy_file(self, copy_to, copy_from):
if not self._did_copy_file:
self._did_copy_file = True
rule = Rule("copy_file")
rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
+ " -f ${in} ${out}")
self.add_rule(rule)
build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
implicits=[self._context.tools.acp()])
build_action.add_variable("out_dir", os.path.dirname(copy_to))
self.add_build_action(build_action)
def add_global_phony(self, name, deps):
"""Add a phony target where there are multiple places that will want to add to
the same phony. If you can, to save memory, use add_phony instead of this function."""
if type(deps) not in (list, tuple):
raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
self._phonies.setdefault(name, []).extend(deps)
def write(self):
for phony, deps in self._phonies.items():
self.add_phony(phony, deps)
super(Ninja, self).write()

View file

@ -1,121 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
sys.dont_write_bytecode = True
import api_assembly
import api_domain
import api_export
import final_packaging
import inner_tree
import tree_analysis
import interrogate
import lunch
import ninja_runner
import utils
EXIT_STATUS_OK = 0
EXIT_STATUS_ERROR = 1
API_DOMAIN_SYSTEM = "system"
API_DOMAIN_VENDOR = "vendor"
API_DOMAIN_MODULE = "module"
def process_config(context, lunch_config):
"""Returns a InnerTrees object based on the configuration requested in the lunch config."""
def add(domain_name, tree_root, product):
tree_key = inner_tree.InnerTreeKey(tree_root, product)
if tree_key in trees:
tree = trees[tree_key]
else:
tree = inner_tree.InnerTree(context, tree_root, product)
trees[tree_key] = tree
domain = api_domain.ApiDomain(domain_name, tree, product)
domains[domain_name] = domain
tree.domains[domain_name] = domain
trees = {}
domains = {}
system_entry = lunch_config.get("system")
if system_entry:
add(API_DOMAIN_SYSTEM, system_entry["inner-tree"],
system_entry["product"])
vendor_entry = lunch_config.get("vendor")
if vendor_entry:
add(API_DOMAIN_VENDOR, vendor_entry["inner-tree"],
vendor_entry["product"])
for module_name, module_entry in lunch_config.get("modules", []).items():
add(module_name, module_entry["inner-tree"], None)
return inner_tree.InnerTrees(trees, domains)
def build():
# Choose the out directory, set up error handling, etc.
context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
# Read the lunch config file
try:
config_file, config, variant = lunch.load_current_config()
except lunch.ConfigException as ex:
sys.stderr.write("%s\n" % ex)
return EXIT_STATUS_ERROR
sys.stdout.write(lunch.make_config_header(config_file, config, variant))
# Construct the trees and domains dicts
inner_trees = process_config(context, config)
# 1. Interrogate the trees
inner_trees.for_each_tree(interrogate.interrogate_tree)
# TODO: Detect bazel-only mode
# 2a. API Export
inner_trees.for_each_tree(api_export.export_apis_from_tree)
# 2b. API Surface Assembly
api_assembly.assemble_apis(context, inner_trees)
# 3a. Inner tree analysis
tree_analysis.analyze_trees(context, inner_trees)
# 3b. Final Packaging Rules
final_packaging.final_packaging(context, inner_trees)
# 4. Build Execution
# TODO: Decide what we want the UX for selecting targets to be across
# branches... since there are very likely to be conflicting soong short
# names.
print("Running ninja...")
targets = ["staging", "system"]
ninja_runner.run_ninja(context, targets)
# Success!
return EXIT_STATUS_OK
def main(argv):
return build()
if __name__ == "__main__":
sys.exit(main(sys.argv))
# vim: sts=4:ts=4:sw=4

View file

@ -1 +0,0 @@
a

View file

@ -1 +0,0 @@
INVALID FILE

View file

@ -1,3 +0,0 @@
{
"lunchable": "true"
}

View file

@ -1,5 +0,0 @@
{
"inherits": [
"test/configs/parsing/cycles/2.mcombo"
]
}

View file

@ -1,6 +0,0 @@
{
"inherits": [
"test/configs/parsing/cycles/3.mcombo"
]
}

View file

@ -1,6 +0,0 @@
{
"inherits": [
"test/configs/parsing/cycles/1.mcombo"
]
}

View file

@ -1,13 +0,0 @@
{
"inherits": [
"test/configs/parsing/merge/2.mcombo",
"test/configs/parsing/merge/3.mcombo"
],
"in_1": "1",
"in_1_2": "1",
"merged": {
"merged_1": "1",
"merged_1_2": "1"
},
"dict_1": { "a" : "b" }
}

View file

@ -1,12 +0,0 @@
{
"in_1_2": "2",
"in_2": "2",
"in_2_3": "2",
"merged": {
"merged_1_2": "2",
"merged_2": "2",
"merged_2_3": "2"
},
"dict_2": { "a" : "b" }
}

View file

@ -1,10 +0,0 @@
{
"in_3": "3",
"in_2_3": "3",
"merged": {
"merged_3": "3",
"merged_2_3": "3"
},
"dict_3": { "a" : "b" }
}

View file

@ -1,133 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
sys.dont_write_bytecode = True
import lunch
# Create a test LunchContext object
# Test workspace is in test/configs
# Orchestrator prefix inside it is build/make
test_lunch_context = lunch.LunchContext("test/configs", ["build", "make"])
class TestStringMethods(unittest.TestCase):
def test_find_dirs(self):
self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
"test/configs/build/make/orchestrator/multitree_combos",
"test/configs/device/aa/bb/multitree_combos",
"test/configs/vendor/aa/bb/multitree_combos"])
def test_find_file(self):
# Finds the one in device first because this is searching from the root,
# not using find_named_config.
self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
"test/configs/device/aa/bb/multitree_combos/v.mcombo")
def test_find_config_dirs(self):
self.assertEqual([x for x in lunch.find_config_dirs(test_lunch_context)], [
"test/configs/build/make/orchestrator/multitree_combos",
"test/configs/vendor/aa/bb/multitree_combos",
"test/configs/device/aa/bb/multitree_combos"])
def test_find_named_config(self):
# Inside build/orchestrator, overriding device and vendor
self.assertEqual(lunch.find_named_config(test_lunch_context, "b"),
"test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
# Nested dir inside a combo dir
self.assertEqual(lunch.find_named_config(test_lunch_context, "nested"),
"test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
# Inside vendor, overriding device
self.assertEqual(lunch.find_named_config(test_lunch_context, "v"),
"test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
# Inside device
self.assertEqual(lunch.find_named_config(test_lunch_context, "d"),
"test/configs/device/aa/bb/multitree_combos/d.mcombo")
# Make sure we don't look too deep (for performance)
self.assertIsNone(lunch.find_named_config(test_lunch_context, "too_deep"))
def test_choose_config_file(self):
# Empty string argument
self.assertEqual(lunch.choose_config_from_args(test_lunch_context, [""]),
(None, None))
# A PRODUCT-VARIANT name
self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["v-eng"]),
("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
# A PRODUCT-VARIANT name that conflicts with a file
self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["b-eng"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
# A PRODUCT-VARIANT that doesn't exist
self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["z-user"]),
(None, None))
# An explicit file
self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
# An explicit file that doesn't exist
self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/doesnt_exist.mcombo", "eng"]),
(None, None))
# An explicit file without a variant should fail
self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
def test_config_cycles(self):
# Test that we catch cycles
with self.assertRaises(lunch.ConfigException) as context:
lunch.load_config("test/configs/parsing/cycles/1.mcombo")
self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
def test_config_merge(self):
# Test the merge logic
self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
"in_1": "1",
"in_1_2": "1",
"merged": {"merged_1": "1",
"merged_1_2": "1",
"merged_2": "2",
"merged_2_3": "2",
"merged_3": "3"},
"dict_1": {"a": "b"},
"in_2": "2",
"in_2_3": "2",
"dict_2": {"a": "b"},
"in_3": "3",
"dict_3": {"a": "b"}
})
def test_list(self):
self.assertEqual(sorted(lunch.find_all_lunchable(test_lunch_context)),
["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
if __name__ == "__main__":
unittest.main()
# vim: sts=4:ts=4:sw=4

View file

@ -1,24 +0,0 @@
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def analyze_trees(context, inner_trees):
inner_trees.for_each_tree(run_analysis)
def run_analysis(tree_key, inner_tree, cookie):
inner_tree.invoke(["analyze"])

View file

@ -1,141 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import platform
class Context(object):
"""Mockable container for global state."""
def __init__(self, out_root, errors):
self.out = OutDir(out_root)
self.errors = errors
self.tools = HostTools()
class TestContext(Context):
"Context for testing. The real Context is manually constructed in orchestrator.py."
def __init__(self, test_work_dir, test_name):
super(TestContext, self).__init__(os.path.join(test_work_dir, test_name),
Errors(None))
class OutDir(object):
"""Encapsulates the logic about the out directory at the outer-tree level.
See also inner_tree.OutDirLayout for inner tree out dir contents."""
def __init__(self, root):
"Initialize with the root of the OUT_DIR for the outer tree."
self._out_root = root
self._intermediates = "intermediates"
def root(self):
return self._out_root
def inner_tree_dir(self, tree_root):
"""Root directory for inner tree inside the out dir."""
return os.path.join(self._out_root, "trees", tree_root)
def api_ninja_file(self):
"""The ninja file that assembles API surfaces."""
return os.path.join(self._out_root, "api_surfaces.ninja")
def api_library_dir(self, surface, version, library):
"""Directory for all the contents of a library inside an API surface, including
the build files. Any intermediates should go in api_library_work_dir."""
return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
def api_library_work_dir(self, surface, version, library):
"""Intermediates / scratch directory for library inside an API surface."""
return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
str(version), library)
def outer_ninja_file(self):
return os.path.join(self._out_root, "multitree.ninja")
def module_share_dir(self, module_type, module_name):
return os.path.join(self._out_root, "shared", module_type, module_name)
def staging_dir(self):
return os.path.join(self._out_root, "staging")
def dist_dir(self):
"The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
return os.path.join(self._out_root, "dist")
class Errors(object):
"""Class for reporting and tracking errors."""
def __init__(self, stream):
"""Initialize Error reporter with a file-like object."""
self._stream = stream
self._all = []
def error(self, message, file=None, line=None, col=None):
"""Record the error message."""
s = ""
if file:
s += str(file)
s += ":"
if line:
s += str(line)
s += ":"
if col:
s += str(col)
s += ":"
if s:
s += " "
s += str(message)
if s[-1] != "\n":
s += "\n"
self._all.append(s)
if self._stream:
self._stream.write(s)
def had_error(self):
"""Return if there were any errors reported."""
return len(self._all)
def get_errors(self):
"""Get all errors that were reported."""
return self._all
class HostTools(object):
def __init__(self):
if platform.system() == "Linux":
self._arch = "linux-x86"
else:
raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
# Some of these are called a lot, so pre-compute the strings to save memory
self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
self._acp = os.path.join(self._prebuilts, "acp")
self._ninja = os.path.join(self._prebuilts, "ninja")
def acp(self):
return self._acp
def ninja(self):
return self._ninja
def choose_out_dir():
"""Get the root of the out dir, either from the environment or by picking
a default."""
result = os.environ.get("OUT_DIR")
if result:
return result
else:
return "out"

View file

@ -1,46 +0,0 @@
#!/usr/bin/env python3
import os
import sys
import yaml
from hierarchy import parse_hierarchy
def main():
if len(sys.argv) != 2:
print('usage: %s target' % sys.argv[0])
exit(1)
args = sys.argv[1].split('-')
if len(args) != 2:
print('target format: {target}-{variant}')
exit(1)
target, variant = args
if variant not in ['eng', 'user', 'userdebug']:
print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
variant)
exit(1)
build_top = os.getenv('BUFFET_BUILD_TOP')
if not build_top:
print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
exit(1)
hierarchy_map = parse_hierarchy(build_top)
if target not in hierarchy_map:
raise RuntimeError(
"unknown target '%s': couldn't find the target. Supported targets are: %s"
% (target, list(hierarchy_map.keys())))
hierarchy = [target]
while hierarchy_map[hierarchy[-1]]:
hierarchy.append(hierarchy_map[hierarchy[-1]])
print('Target hierarchy for %s: %s' % (target, hierarchy))
if __name__ == '__main__':
main()

View file

@ -1,367 +0,0 @@
#!/usr/bin/env python3
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import copy
import hierarchy
import json
import logging
import filecmp
import os
import shutil
import subprocess
import sys
import tempfile
import collect_metadata
import utils
BUILD_CMD_TO_ALL = (
'clean',
'installclean',
'update-meta',
)
BUILD_ALL_EXEMPTION = (
'art',
)
def get_supported_product(ctx, supported_products):
hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
target = ctx.target_product()
while target not in supported_products:
if target not in hierarchy_map:
return None
target = hierarchy_map[target]
return target
def parse_goals(ctx, metadata, goals):
"""Parse goals and returns a map from each component to goals.
e.g.
"m main art timezone:foo timezone:bar" will return the following dict: {
"main": {"all"},
"art": {"all"},
"timezone": {"foo", "bar"},
}
"""
# for now, goal should look like:
# {component} or {component}:{subgoal}
ret = collections.defaultdict(set)
for goal in goals:
# check if the command is for all components
if goal in BUILD_CMD_TO_ALL:
ret['all'].add(goal)
continue
# should be {component} or {component}:{subgoal}
try:
component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
except ValueError:
raise RuntimeError(
'unknown goal: %s: should be {component} or {component}:{subgoal}' %
goal)
if component not in metadata:
raise RuntimeError('unknown goal: %s: component %s not found' %
(goal, component))
if not get_supported_product(ctx, metadata[component]['lunch_targets']):
raise RuntimeError("can't find matching target. Supported targets are: " +
str(metadata[component]['lunch_targets']))
ret[component].add(subgoal)
return ret
def find_cycle(metadata):
""" Finds a cyclic dependency among components.
This is for debugging.
"""
visited = set()
parent_node = dict()
in_stack = set()
# Returns a cycle if one is found
def dfs(node):
# visit_order[visit_time[node] - 1] == node
nonlocal visited, parent_node, in_stack
visited.add(node)
in_stack.add(node)
if 'deps' not in metadata[node]:
in_stack.remove(node)
return None
for next in metadata[node]['deps']:
# We found a cycle (next ~ node) if next is still in the stack
if next in in_stack:
cycle = [node]
while cycle[-1] != next:
cycle.append(parent_node[cycle[-1]])
return cycle
# Else, continue searching
if next in visited:
continue
parent_node[next] = node
result = dfs(next)
if result:
return result
in_stack.remove(node)
return None
for component in metadata:
if component in visited:
continue
result = dfs(component)
if result:
return result
return None
def topological_sort_components(metadata):
""" Performs topological sort on components.
If A depends on B, B appears first.
"""
# If A depends on B, we want B to appear before A. But the graph in metadata
# is represented as A -> B (B in metadata[A]['deps']). So we sort in the
# reverse order, and then reverse the result again to get the desired order.
indegree = collections.defaultdict(int)
for component in metadata:
if 'deps' not in metadata[component]:
continue
for dep in metadata[component]['deps']:
indegree[dep] += 1
component_queue = collections.deque()
for component in metadata:
if indegree[component] == 0:
component_queue.append(component)
result = []
while component_queue:
component = component_queue.popleft()
result.append(component)
if 'deps' not in metadata[component]:
continue
for dep in metadata[component]['deps']:
indegree[dep] -= 1
if indegree[dep] == 0:
component_queue.append(dep)
# If topological sort fails, there must be a cycle.
if len(result) != len(metadata):
cycle = find_cycle(metadata)
raise RuntimeError('circular dependency found among metadata: %s' % cycle)
return result[::-1]
def add_dependency_goals(ctx, metadata, component, goals):
""" Adds goals that given component depends on."""
# For now, let's just add "all"
# TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
if 'deps' not in metadata[component]:
return
for dep in metadata[component]['deps']:
goals[dep].add('all')
def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
""" Analyzes the dependency graph among components, adds build commands for
dependencies, and then sorts the goals.
Returns a list of tuples: (component_name, set of subgoals).
Builds should be run in the list's order.
"""
# TODO(inseob@): after topological sort, some components may be built in
# parallel.
topological_order = topological_sort_components(metadata)
combined_goals = copy.deepcopy(parsed_goals)
# Add build rules for each component's dependencies
# We do this in reverse order, so it can be transitive.
# e.g. if A depends on B and B depends on C, and we build A,
# C should also be built, in addition to B.
for component in topological_order[::-1]:
if component in combined_goals:
add_dependency_goals(ctx, metadata, component, combined_goals)
ret = []
for component in ['all'] + topological_order:
if component in combined_goals:
ret.append((component, combined_goals[component]))
return ret
def run_build(ctx, metadata, component, subgoals):
build_cmd = metadata[component]['build_cmd']
out_dir = metadata[component]['out_dir']
default_goals = ''
if 'default_goals' in metadata[component]:
default_goals = metadata[component]['default_goals']
if 'all' in subgoals:
goal = default_goals
else:
goal = ' '.join(subgoals)
build_vars = ''
if 'update-meta' in subgoals:
build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
# TODO(inseob@): shell escape
cmd = [
'/bin/bash', '-c',
'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
(get_supported_product(ctx, metadata[component]['lunch_targets']),
ctx.target_build_variant(), build_vars, build_cmd, goal)
]
logging.debug('cwd: ' + metadata[component]['path'])
logging.debug('running build: ' + str(cmd))
subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
def run_build_all(ctx, metadata, subgoals):
for component in metadata:
if component in BUILD_ALL_EXEMPTION:
continue
run_build(ctx, metadata, component, subgoals)
def find_components(metadata, predicate):
for component in metadata:
if predicate(component):
yield component
def import_filegroups(metadata, component, exporting_component, target_file_pairs):
imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
bp_content = ''
for name, outpaths in target_file_pairs:
bp_content += ('filegroup {{\n'
' name: "{fname}",\n'
' srcs: [\n'.format(fname=name))
for outpath in outpaths:
bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
bp_content += (' ],\n'
'}\n')
with tempfile.TemporaryDirectory() as tmp_dir:
with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
fout.write(bp_content)
for _, outpaths in target_file_pairs:
for outpath in outpaths:
os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
os.path.join(tmp_dir, os.path.basename(outpath)))
cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
if os.path.exists(imported_filegroup_dir) and len(
cmp_result.left_only) + len(cmp_result.right_only) + len(
cmp_result.diff_files) == 0:
# Files are identical, it doesn't need to be written
logging.info(
'imported files exists and the contents are identical: {} -> {}'
.format(component, exporting_component))
continue
logging.info('creating symlinks for imported files: {} -> {}'.format(
component, exporting_component))
os.makedirs(imported_filegroup_dir, exist_ok=True)
shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
shutil.move(tmp_dir, imported_filegroup_dir)
def prepare_build(metadata, component):
imported_dir = os.path.join(metadata[component]['path'], 'imported')
if utils.META_DEPS not in metadata[component]:
if os.path.exists(imported_dir):
logging.debug('remove {}'.format(imported_dir))
shutil.rmtree(imported_dir)
return
imported_components = set()
for exp_comp in metadata[component][utils.META_DEPS]:
if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
target_file_pairs = []
for name in filegroups:
target_file_pairs.append((name, filegroups[name]))
import_filegroups(metadata, component, exp_comp, target_file_pairs)
imported_components.add(exp_comp)
# Remove directories that are not generated this time.
if os.path.exists(imported_dir):
if len(imported_components) == 0:
shutil.rmtree(imported_dir)
else:
for remove_target in set(os.listdir(imported_dir)) - imported_components:
logging.info('remove unnecessary imported dir: {}'.format(remove_target))
shutil.rmtree(os.path.join(imported_dir, remove_target))
def main():
utils.set_logging_config(logging.DEBUG)
ctx = utils.get_build_context()
logging.info('collecting metadata')
utils.set_logging_config(True)
goals = sys.argv[1:]
if not goals:
logging.debug('empty goals. defaults to main')
goals = ['main']
logging.debug('goals: ' + str(goals))
# Force update the metadata for the 'update-meta' build
metadata_collector = collect_metadata.MetadataCollector(
ctx.components_top(), ctx.out_dir(),
collect_metadata.COMPONENT_METADATA_DIR,
collect_metadata.COMPONENT_METADATA_FILE,
force_update='update-meta' in goals)
metadata_collector.collect()
metadata = metadata_collector.get_metadata()
logging.debug('metadata: ' + str(metadata))
parsed_goals = parse_goals(ctx, metadata, goals)
logging.debug('parsed goals: ' + str(parsed_goals))
sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
logging.debug('sorted goals with deps: ' + str(sorted_goals))
for component, subgoals in sorted_goals:
if component == 'all':
run_build_all(ctx, metadata, subgoals)
continue
prepare_build(metadata, component)
run_build(ctx, metadata, component, subgoals)
if __name__ == '__main__':
main()

View file

@ -1,428 +0,0 @@
#!/usr/bin/env python3
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import copy
import json
import logging
import os
import sys
import yaml
from collections import defaultdict
from typing import (
List,
Set,
)
import utils
# SKIP_COMPONENT_SEARCH = (
# 'tools',
# )
COMPONENT_METADATA_DIR = '.repo'
COMPONENT_METADATA_FILE = 'treeinfo.yaml'
GENERATED_METADATA_FILE = 'metadata.json'
COMBINED_METADATA_FILENAME = 'multitree_meta.json'
class Dep(object):
def __init__(self, name, component, deps_type):
self.name = name
self.component = component
self.type = deps_type
self.out_paths = list()
class ExportedDep(Dep):
def __init__(self, name, component, deps_type):
super().__init__(name, component, deps_type)
def setOutputPaths(self, output_paths: list):
self.out_paths = output_paths
class ImportedDep(Dep):
required_type_map = {
# import type: (required type, get imported module list)
utils.META_FILEGROUP: (utils.META_MODULES, True),
}
def __init__(self, name, component, deps_type, import_map):
super().__init__(name, component, deps_type)
self.exported_deps: Set[ExportedDep] = set()
self.imported_modules: List[str] = list()
self.required_type = deps_type
get_imported_module = False
if deps_type in ImportedDep.required_type_map:
self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
if get_imported_module:
self.imported_modules = import_map[name]
else:
self.imported_modules.append(name)
def verify_and_add(self, exported: ExportedDep):
if self.required_type != exported.type:
raise RuntimeError(
'{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
.format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
self.exported_deps.add(exported)
self.out_paths.extend(exported.out_paths)
# Remove duplicates. We may not use set() which is not JSON serializable
self.out_paths = list(dict.fromkeys(self.out_paths))
class MetadataCollector(object):
"""Visit all component directories and collect the metadata from them.
Example of metadata:
==========
build_cmd: m # build command for this component. 'm' if omitted
out_dir: out # out dir of this component. 'out' if omitted
exports:
libraries:
- name: libopenjdkjvm
- name: libopenjdkjvmd
build_cmd: mma # build command for libopenjdkjvmd if specified
out_dir: out/soong # out dir for libopenjdkjvmd if specified
- name: libctstiagent
APIs:
- api1
- api2
imports:
libraries:
- lib1
- lib2
APIs:
- import_api1
- import_api2
lunch_targets:
- arm64
- x86_64
"""
def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
if not os.path.exists(out_dir):
os.makedirs(out_dir)
self.__component_top = component_top
self.__out_dir = out_dir
self.__metadata_path = os.path.join(meta_dir, meta_file)
self.__combined_metadata_path = os.path.join(self.__out_dir,
COMBINED_METADATA_FILENAME)
self.__force_update = force_update
self.__metadata = dict()
self.__map_exports = dict()
self.__component_set = set()
def collect(self):
""" Read precomputed combined metadata from the json file.
If any components have updated their metadata, update the metadata
information and the json file.
"""
timestamp = self.__restore_metadata()
if timestamp and os.path.getmtime(__file__) > timestamp:
logging.info('Update the metadata as the orchestrator has been changed')
self.__force_update = True
self.__collect_from_components(timestamp)
def get_metadata(self):
""" Returns collected metadata from all components"""
if not self.__metadata:
logging.warning('Metadata is empty')
return copy.deepcopy(self.__metadata)
def __collect_from_components(self, timestamp):
""" Read metadata from all components
If any components have newer metadata files or are removed, update the
combined metadata.
"""
metadata_updated = False
for component in os.listdir(self.__component_top):
# if component in SKIP_COMPONENT_SEARCH:
# continue
if self.__read_component_metadata(timestamp, component):
metadata_updated = True
if self.__read_generated_metadata(timestamp, component):
metadata_updated = True
deleted_components = set()
for meta in self.__metadata:
if meta not in self.__component_set:
logging.info('Component {} is removed'.format(meta))
deleted_components.add(meta)
metadata_updated = True
for meta in deleted_components:
del self.__metadata[meta]
if metadata_updated:
self.__update_dependencies()
self.__store_metadata()
logging.info('Metadata updated')
def __read_component_metadata(self, timestamp, component):
""" Search for the metadata file from a component.
If the metadata is modified, read the file and update the metadata.
"""
component_path = os.path.join(self.__component_top, component)
metadata_file = os.path.join(component_path, self.__metadata_path)
logging.info(
'Reading a metadata file from {} component ...'.format(component))
if not os.path.isfile(metadata_file):
logging.warning('Metadata file {} not found!'.format(metadata_file))
return False
self.__component_set.add(component)
if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
logging.info('... yaml not changed. Skip')
return False
with open(metadata_file) as f:
meta = yaml.load(f, Loader=yaml.SafeLoader)
meta['path'] = component_path
if utils.META_BUILDCMD not in meta:
meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
if utils.META_OUTDIR not in meta:
meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
if utils.META_IMPORTS not in meta:
meta[utils.META_IMPORTS] = defaultdict(dict)
if utils.META_EXPORTS not in meta:
meta[utils.META_EXPORTS] = defaultdict(dict)
self.__metadata[component] = meta
return True
def __read_generated_metadata(self, timestamp, component):
""" Read a metadata gerated by 'update-meta' build command from the soong build system
Soong generate the metadata that has the information of import/export module/files.
Build orchestrator read the generated metadata to collect the dependency information.
Generated metadata has the following format:
{
"Imported": {
"FileGroups": {
"<name_of_filegroup>": [
"<exported_module_name>",
...
],
...
}
}
"Exported": {
"<exported_module_name>": [
"<output_file_path>",
...
],
...
}
}
"""
if component not in self.__component_set:
# skip reading generated metadata if the component metadata file was missing
return False
component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
if not os.path.isfile(generated_metadata_file):
logging.info('... Soong did not generated the metadata file. Skip')
return False
if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
logging.info('... Soong generated metadata not changed. Skip')
return False
with open(generated_metadata_file, 'r') as gen_meta_json:
try:
gen_metadata = json.load(gen_meta_json)
except json.decoder.JSONDecodeError:
logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
generated_metadata_file))
return False
if utils.SOONG_IMPORTED in gen_metadata:
imported = gen_metadata[utils.SOONG_IMPORTED]
if utils.SOONG_IMPORTED_FILEGROUPS in imported:
self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
if utils.SOONG_EXPORTED in gen_metadata:
self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
return True
def __update_export_map(self):
""" Read metadata of all components and update the export map
'libraries' and 'APIs' are special exproted types that are provided manually
from the .yaml metadata files. These need to be replaced with the implementation
in soong gerated metadata.
The export type 'module' is generated from the soong build system from the modules
with 'export: true' property. This export type includes a dictionary with module
names as keys and their output files as values. These output files will be used as
prebuilt sources when generating the imported modules.
"""
self.__map_exports = dict()
for comp in self.__metadata:
if utils.META_EXPORTS not in self.__metadata[comp]:
continue
exports = self.__metadata[comp][utils.META_EXPORTS]
for export_type in exports:
for module in exports[export_type]:
if export_type == utils.META_LIBS:
name = module[utils.META_LIB_NAME]
else:
name = module
if name in self.__map_exports:
raise RuntimeError(
'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
.format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
exported_deps = ExportedDep(name, comp, export_type)
if export_type == utils.META_MODULES:
exported_deps.setOutputPaths(exports[export_type][module])
self.__map_exports[name] = exported_deps
def __verify_and_add_dependencies(self, component):
""" Search all imported items from the export_map.
If any imported items are not provided by the other components, report
an error.
Otherwise, add the component dependency and update the exported information to the
import maps.
"""
def verify_and_add_dependencies(imported_dep: ImportedDep):
for module in imported_dep.imported_modules:
if module not in self.__map_exports:
raise RuntimeError(
'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
.format(module=module, comp=imported_dep.component))
imported_dep.verify_and_add(self.__map_exports[module])
deps = self.__metadata[component][utils.META_DEPS]
exp_comp = self.__map_exports[module].component
if exp_comp not in deps:
deps[exp_comp] = defaultdict(defaultdict)
deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
self.__metadata[component][utils.META_DEPS] = defaultdict()
imports = self.__metadata[component][utils.META_IMPORTS]
for import_type in imports:
for module in imports[import_type]:
verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
def __check_imports(self):
""" Search the export map to find the component to import libraries or APIs.
Update the 'deps' field that includes the dependent components.
"""
for component in self.__metadata:
self.__verify_and_add_dependencies(component)
if utils.META_DEPS in self.__metadata[component]:
logging.debug('{comp} depends on {list} components'.format(
comp=component, list=self.__metadata[component][utils.META_DEPS]))
def __update_dependencies(self):
""" Generate a dependency graph for the components
Update __map_exports and the dependency graph with the maps.
"""
self.__update_export_map()
self.__check_imports()
def __store_metadata(self):
""" Store the __metadata dictionary as json format"""
with open(self.__combined_metadata_path, 'w') as json_file:
json.dump(self.__metadata, json_file, indent=2)
def __restore_metadata(self):
""" Read the stored json file and return the time stamps of the
metadata file.
"""
if not os.path.exists(self.__combined_metadata_path):
return None
with open(self.__combined_metadata_path, 'r') as json_file:
try:
self.__metadata = json.load(json_file)
except json.decoder.JSONDecodeError:
logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
self.__combined_metadata_path))
return None
logging.info('Metadata restored from {}'.format(
self.__combined_metadata_path))
self.__update_export_map()
return os.path.getmtime(self.__combined_metadata_path)
def get_args():
def check_dir(path):
if os.path.exists(path) and os.path.isdir(path):
return os.path.normpath(path)
else:
raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
parser = argparse.ArgumentParser()
parser.add_argument(
'--component-top',
help='Scan all components under this directory.',
default=os.path.join(os.path.dirname(__file__), '../../../components'),
type=check_dir)
parser.add_argument(
'--meta-file',
help='Name of the metadata file.',
default=COMPONENT_METADATA_FILE,
type=str)
parser.add_argument(
'--meta-dir',
help='Each component has the metadata in this directory.',
default=COMPONENT_METADATA_DIR,
type=str)
parser.add_argument(
'--out-dir',
help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
default=os.path.join(os.path.dirname(__file__), '../../../out'),
type=os.path.normpath)
parser.add_argument(
'--force',
'-f',
action='store_true',
help='Force to collect metadata',
)
parser.add_argument(
'--verbose',
'-v',
help='Increase output verbosity, e.g. "-v", "-vv".',
action='count',
default=0)
return parser.parse_args()
def main():
args = get_args()
utils.set_logging_config(args.verbose)
metadata_collector = MetadataCollector(args.component_top, args.out_dir,
args.meta_dir, args.meta_file, args.force)
metadata_collector.collect()
if __name__ == '__main__':
main()

View file

@ -1,48 +0,0 @@
#!/bin/bash
function buffet()
{
local product variant selection
if [[ $# -ne 1 ]]; then
echo "usage: buffet [target]" >&2
return 1
fi
selection=$1
product=${selection%%-*} # Trim everything after first dash
variant=${selection#*-} # Trim everything up to first dash
if [ -z "$product" ]
then
echo
echo "Invalid lunch combo: $selection"
return 1
fi
if [ -z "$variant" ]
then
if [[ "$product" =~ .*_(eng|user|userdebug) ]]
then
echo "Did you mean -${product/*_/}? (dash instead of underscore)"
fi
return 1
fi
BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
export BUFFET_BUILD_TOP=$(pwd)
export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
export BUFFET_TARGET_PRODUCT=$product
export BUFFET_TARGET_BUILD_VARIANT=$variant
export BUFFET_TARGET_BUILD_TYPE=release
}
function m()
{
if [ -z "$BUFFET_BUILD_TOP" ]
then
echo "Run \"buffet [target]\" first"
return 1
fi
python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
}

View file

@ -1,79 +0,0 @@
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import yaml
def parse_hierarchy(build_top):
"""Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
Example of hierarchy file:
==========
aosp_arm64:
- armv8
- aosp_cf_arm64_phone
armv8:
- aosp_oriole
- aosp_sunfish
aosp_oriole:
- oriole
aosp_sunfish:
- sunfish
oriole:
# leaf
sunfish:
# leaf
==========
If we parse this yaml, we get a dict looking like:
{
"sunfish": "aosp_sunfish",
"oriole": "aosp_oriole",
"aosp_oriole": "armv8",
"aosp_sunfish": "armv8",
"armv8": "aosp_arm64",
"aosp_cf_arm64_phone": "aosp_arm64",
"aosp_arm64": None, # no parent
}
"""
metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
if not os.path.isfile(metadata_path):
raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
with open(metadata_path, 'r') as f:
hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
hierarchy_map = dict()
for parent_target, child_targets in hierarchy_yaml.items():
if not child_targets:
# leaf
continue
for child_target in child_targets:
hierarchy_map[child_target] = parent_target
for parent_target in hierarchy_yaml:
# targets with no parent
if parent_target not in hierarchy_map:
hierarchy_map[parent_target] = None
return hierarchy_map

View file

@ -1,37 +0,0 @@
# hierarchy of targets
aosp_arm64:
- armv8
- aosp_cf_arm64_phone
armv8:
- mainline_modules_arm64
mainline_modules_arm64:
- aosp_oriole
- aosp_sunfish
- aosp_raven
aosp_oriole:
- oriole
aosp_sunfish:
- sunfish
aosp_raven:
- raven
oriole:
# leaf
sunfish:
# leaf
raven:
# leaf
aosp_cf_arm64_phone:
- cf_arm64_phone
cf_arm64_phone:
# leaf

View file

@ -1,89 +0,0 @@
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
# default build configuration for each component
DEFAULT_BUILDCMD = 'm'
DEFAULT_OUTDIR = 'out'
# yaml fields
META_BUILDCMD = 'build_cmd'
META_OUTDIR = 'out_dir'
META_EXPORTS = 'exports'
META_IMPORTS = 'imports'
META_TARGETS = 'lunch_targets'
META_DEPS = 'deps'
# fields under 'exports' and 'imports'
META_LIBS = 'libraries'
META_APIS = 'APIs'
META_FILEGROUP = 'filegroup'
META_MODULES = 'modules'
# fields under 'libraries'
META_LIB_NAME = 'name'
# fields for generated metadata file
SOONG_IMPORTED = 'Imported'
SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
SOONG_EXPORTED = 'Exported'
# export map items
EXP_COMPONENT = 'component'
EXP_TYPE = 'type'
EXP_OUTPATHS = 'outpaths'
class BuildContext:
def __init__(self):
self._build_top = os.getenv('BUFFET_BUILD_TOP')
self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
self._out_dir = os.path.join(self._build_top, 'out')
if not self._build_top:
raise RuntimeError("Can't find root. Did you run buffet?")
def build_top(self):
return self._build_top
def components_top(self):
return self._components_top
def target_product(self):
return self._target_product
def target_build_variant(self):
return self._target_build_variant
def target_build_type(self):
return self._target_build_type
def out_dir(self):
return self._out_dir
def get_build_context():
return BuildContext()
def set_logging_config(verbose_level):
verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
verbosity = min(verbose_level, 2)
logging.basicConfig(
format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
level=verbose_map[verbosity])

View file

@ -1,60 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
def _parse_arguments(argv):
argv = argv[1:]
"""Return an argparse options object."""
# Top-level parser
parser = argparse.ArgumentParser(prog=".inner_build")
parser.add_argument("--out_dir", action="store", required=True,
help="root of the output directory for this inner tree's API contributions")
parser.add_argument("--api_domain", action="append", required=True,
help="which API domains are to be built in this inner tree")
subparsers = parser.add_subparsers(required=True, dest="command",
help="subcommands")
# inner_build describe command
describe_parser = subparsers.add_parser("describe",
help="describe the capabilities of this inner tree's build system")
# create the parser for the "b" command
export_parser = subparsers.add_parser("export_api_contributions",
help="export the API contributions of this inner tree")
# create the parser for the "b" command
export_parser = subparsers.add_parser("analyze",
help="main build analysis for this inner tree")
# Parse the arguments
return parser.parse_args(argv)
class Commands(object):
def Run(self, argv):
"""Parse the command arguments and call the corresponding subcommand method on
this object.
Throws AttributeError if the method for the command wasn't found.
"""
args = _parse_arguments(argv)
return getattr(self, args.command)(args)

View file

@ -1,110 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import textwrap
sys.dont_write_bytecode = True
import common
def mkdirs(path):
try:
os.makedirs(path)
except FileExistsError:
pass
class InnerBuildSoong(common.Commands):
def describe(self, args):
mkdirs(args.out_dir)
with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
f.write(textwrap.dedent("""\
{
"requires_ninja": true,
"orchestrator_protocol_version": 1
}"""))
def export_api_contributions(self, args):
contributions_dir = os.path.join(args.out_dir, "api_contributions")
mkdirs(contributions_dir)
if "system" in args.api_domain:
with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
# 'name: android' is android.jar
f.write(textwrap.dedent("""\
{
"name": "api_a",
"version": 1,
"api_domain": "system",
"cc_libraries": [
{
"name": "libhello1",
"headers": [
{
"root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
"files": [
"hello1.h"
]
}
],
"api": [
"build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
]
}
]
}"""))
def analyze(self, args):
if "system" in args.api_domain:
# Nothing to export in this demo
# Write a fake inner_tree.ninja; what the inner tree would have generated
with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
# TODO: Note that this uses paths relative to the workspace not the iner tree
# for demo purposes until we get the ninja chdir change in.
f.write(textwrap.dedent("""\
rule compile_c
command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
rule link_so
command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
out_dir = %(OUT_DIR)s/libhello1
cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
out_dir = %(OUT_DIR)s/libhello1
build system: phony %(OUT_DIR)s/libhello1/libhello1.so
""" % { "OUT_DIR": args.out_dir }))
with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
f.write(textwrap.dedent("""\
{
"staging": [
{
"dest": "staging/system/lib/libhello1.so",
"obj": "libhello1/libhello1.so"
}
]
}""" % { "OUT_DIR": args.out_dir }))
def main(argv):
return InnerBuildSoong().Run(argv)
if __name__ == "__main__":
sys.exit(main(sys.argv))
# vim: sts=4:ts=4:sw=4

View file

@ -1,37 +0,0 @@
#!/usr/bin/python3
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
sys.dont_write_bytecode = True
import common
class InnerBuildSoong(common.Commands):
def describe(self, args):
pass
def export_api_contributions(self, args):
pass
def main(argv):
return InnerBuildSoong().Run(argv)
if __name__ == "__main__":
sys.exit(main(sys.argv))

View file

@ -1,16 +0,0 @@
{
"lunchable": true,
"system": {
"inner-tree": "aosp-master-with-phones",
"product": "aosp_cf_arm64_phone"
},
"vendor": {
"inner-tree": "aosp-master-with-phones",
"product": "aosp_cf_arm64_phone"
},
"modules": {
"com.android.bionic": {
"inner-tree": "aosp-master-with-phones"
}
}
}

View file

@ -1,16 +0,0 @@
{
"lunchable": true,
"system": {
"inner-tree": "inner_tree_system",
"product": "system_lunch_product"
},
"vendor": {
"inner-tree": "inner_tree_vendor",
"product": "vendor_lunch_product"
},
"modules": {
"com.android.something": {
"inner-tree": ["inner_tree_module", "sc-common"]
}
}
}

View file

@ -1,172 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from collections.abc import Iterator
from typing import List
TAB = " "
class Node(ABC):
'''An abstract class that can be serialized to a ninja file
All other ninja-serializable classes inherit from this class'''
@abstractmethod
def stream(self) -> Iterator[str]:
pass
class Variable(Node):
'''A ninja variable that can be reused across build actions
https://ninja-build.org/manual.html#_variables'''
def __init__(self, name:str, value:str, indent=0):
self.name = name
self.value = value
self.indent = indent
def stream(self) -> Iterator[str]:
indent = TAB * self.indent
yield f"{indent}{self.name} = {self.value}"
class RuleException(Exception):
pass
# Ninja rules recognize a limited set of variables
# https://ninja-build.org/manual.html#ref_rule
# Keep this list sorted
RULE_VARIABLES = ["command",
"depfile",
"deps",
"description",
"dyndep",
"generator",
"msvc_deps_prefix",
"restat",
"rspfile",
"rspfile_content"]
class Rule(Node):
'''A shorthand for a command line that can be reused
https://ninja-build.org/manual.html#_rules'''
def __init__(self, name:str):
self.name = name
self.variables = []
def add_variable(self, name: str, value: str):
if name not in RULE_VARIABLES:
raise RuleException(f"{name} is not a recognized variable in a ninja rule")
self.variables.append(Variable(name=name, value=value, indent=1))
def stream(self) -> Iterator[str]:
self._validate_rule()
yield f"rule {self.name}"
# Yield rule variables sorted by `name`
for var in sorted(self.variables, key=lambda x: x.name):
# variables yield a single item, next() is sufficient
yield next(var.stream())
def _validate_rule(self):
# command is a required variable in a ninja rule
self._assert_variable_is_not_empty(variable_name="command")
def _assert_variable_is_not_empty(self, variable_name: str):
if not any(var.name == variable_name for var in self.variables):
raise RuleException(f"{variable_name} is required in a ninja rule")
class BuildActionException(Exception):
pass
class BuildAction(Node):
'''Describes the dependency edge between inputs and output
https://ninja-build.org/manual.html#_build_statements'''
def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
self.output = output
self.rule = rule
self.inputs = self._as_list(inputs)
self.implicits = self._as_list(implicits)
self.order_only = self._as_list(order_only)
self.variables = []
def add_variable(self, name: str, value: str):
'''Variables limited to the scope of this build action'''
self.variables.append(Variable(name=name, value=value, indent=1))
def stream(self) -> Iterator[str]:
self._validate()
build_statement = f"build {self.output}: {self.rule}"
if len(self.inputs) > 0:
build_statement += " "
build_statement += " ".join(self.inputs)
if len(self.implicits) > 0:
build_statement += " | "
build_statement += " ".join(self.implicits)
if len(self.order_only) > 0:
build_statement += " || "
build_statement += " ".join(self.order_only)
yield build_statement
# Yield variables sorted by `name`
for var in sorted(self.variables, key=lambda x: x.name):
# variables yield a single item, next() is sufficient
yield next(var.stream())
def _validate(self):
if not self.output:
raise BuildActionException("Output is required in a ninja build statement")
if not self.rule:
raise BuildActionException("Rule is required in a ninja build statement")
def _as_list(self, list_like):
if list_like is None:
return []
if isinstance(list_like, list):
return list_like
return [list_like]
class Pool(Node):
'''https://ninja-build.org/manual.html#ref_pool'''
def __init__(self, name: str, depth: int):
self.name = name
self.depth = Variable(name="depth", value=depth, indent=1)
def stream(self) -> Iterator[str]:
yield f"pool {self.name}"
yield next(self.depth.stream())
class Subninja(Node):
def __init__(self, subninja: str, chDir: str):
self.subninja = subninja
self.chDir = chDir
# TODO(spandandas): Update the syntax when aosp/2064612 lands
def stream(self) -> Iterator[str]:
yield f"subninja {self.subninja}"
class Line(Node):
'''Generic class that can be used for comments/newlines/default_target etc'''
def __init__(self, value:str):
self.value = value
def stream(self) -> Iterator[str]:
yield self.value

View file

@ -1,59 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
# TODO: Format the output according to a configurable width variable
# This will ensure that the generated content fits on a screen and does not
# require horizontal scrolling
class Writer:
def __init__(self, file):
self.file = file
self.nodes = [] # type Node
def add_variable(self, variable: Variable):
self.nodes.append(variable)
def add_rule(self, rule: Rule):
self.nodes.append(rule)
def add_build_action(self, build_action: BuildAction):
self.nodes.append(build_action)
def add_pool(self, pool: Pool):
self.nodes.append(pool)
def add_comment(self, comment: str):
self.nodes.append(Line(value=f"# {comment}"))
def add_default(self, default: str):
self.nodes.append(Line(value=f"default {default}"))
def add_newline(self):
self.nodes.append(Line(value=""))
def add_subninja(self, subninja: Subninja):
self.nodes.append(subninja)
def add_phony(self, name, deps):
build_action = BuildAction(name, "phony", inputs=deps)
self.add_build_action(build_action)
def write(self):
for node in self.nodes:
for line in node.stream():
print(line, file=self.file)

View file

@ -1,107 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
class TestVariable(unittest.TestCase):
def test_assignment(self):
variable = Variable(name="key", value="value")
self.assertEqual("key = value", next(variable.stream()))
variable = Variable(name="key", value="value with spaces")
self.assertEqual("key = value with spaces", next(variable.stream()))
variable = Variable(name="key", value="$some_other_variable")
self.assertEqual("key = $some_other_variable", next(variable.stream()))
def test_indentation(self):
variable = Variable(name="key", value="value", indent=0)
self.assertEqual("key = value", next(variable.stream()))
variable = Variable(name="key", value="value", indent=1)
self.assertEqual(" key = value", next(variable.stream()))
class TestRule(unittest.TestCase):
def test_rulename_comes_first(self):
rule = Rule(name="myrule")
rule.add_variable("command", "/bin/bash echo")
self.assertEqual("rule myrule", next(rule.stream()))
def test_command_is_a_required_variable(self):
rule = Rule(name="myrule")
with self.assertRaises(RuleException):
next(rule.stream())
def test_bad_rule_variable(self):
rule = Rule(name="myrule")
with self.assertRaises(RuleException):
rule.add_variable(name="unrecognize_rule_variable", value="value")
def test_rule_variables_are_indented(self):
rule = Rule(name="myrule")
rule.add_variable("command", "/bin/bash echo")
stream = rule.stream()
self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
self.assertEqual(" command = /bin/bash echo", next(stream))
def test_rule_variables_are_sorted(self):
rule = Rule(name="myrule")
rule.add_variable("description", "Adding description before command")
rule.add_variable("command", "/bin/bash echo")
stream = rule.stream()
self.assertEqual("rule myrule", next(stream)) # rule always comes first
self.assertEqual(" command = /bin/bash echo", next(stream))
self.assertEqual(" description = Adding description before command", next(stream))
class TestBuildAction(unittest.TestCase):
def test_no_inputs(self):
build = BuildAction(output="out", rule="phony")
stream = build.stream()
self.assertEqual("build out: phony", next(stream))
# Empty output
build = BuildAction(output="", rule="phony")
with self.assertRaises(BuildActionException):
next(build.stream())
# Empty rule
build = BuildAction(output="out", rule="")
with self.assertRaises(BuildActionException):
next(build.stream())
def test_inputs(self):
build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
self.assertEqual("build out: cat input1 input2", next(build.stream()))
build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
def test_variables(self):
build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
build.add_variable(name="myvar", value="myval")
stream = build.stream()
next(stream)
self.assertEqual(" myvar = myval", next(stream))
class TestPool(unittest.TestCase):
def test_pool(self):
pool = Pool(name="mypool", depth=10)
stream = pool.stream()
self.assertEqual("pool mypool", next(stream))
self.assertEqual(" depth = 10", next(stream))
if __name__ == "__main__":
unittest.main()

View file

@ -1,54 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from io import StringIO
from ninja_writer import Writer
from ninja_syntax import Variable, Rule, BuildAction
class TestWriter(unittest.TestCase):
def test_simple_writer(self):
with StringIO() as f:
writer = Writer(f)
writer.add_variable(Variable(name="cflags", value="-Wall"))
writer.add_newline()
cc = Rule(name="cc")
cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
writer.add_rule(cc)
writer.add_newline()
build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
writer.add_build_action(build_action)
writer.write()
self.assertEqual('''cflags = -Wall
rule cc
command = gcc $cflags -c $in -o $out
build foo.o: cc foo.c
''', f.getvalue())
def test_comment(self):
with StringIO() as f:
writer = Writer(f)
writer.add_comment("This is a comment in a ninja file")
writer.write()
self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
if __name__ == "__main__":
unittest.main()

View file

@ -1,17 +0,0 @@
{
"lunchable": true,
"system": {
"tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
"product": "test_product1"
},
"vendor": {
"tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
"product": "test_product2"
},
"modules": {
"module_1": {
"tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
}
}
}

View file

@ -1 +0,0 @@
../../inner_build/inner_build_demo.py

View file

@ -1,8 +0,0 @@
#include <stdio.h>
#include "hello1.h"
void hello1(void) {
printf("hello1");
}

View file

@ -1,4 +0,0 @@
#pragma once
extern "C" void hello1(void);