2015-01-23 23:15:10 +01:00
|
|
|
// Copyright 2014 Google Inc. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
package blueprint
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2019-01-23 22:21:48 +01:00
|
|
|
"context"
|
2023-03-15 23:49:17 +01:00
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/base64"
|
2021-04-01 18:27:31 +02:00
|
|
|
"encoding/json"
|
2014-05-28 01:34:41 +02:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2017-08-10 00:13:12 +02:00
|
|
|
"io/ioutil"
|
2017-08-24 02:30:05 +02:00
|
|
|
"os"
|
2014-05-28 01:34:41 +02:00
|
|
|
"path/filepath"
|
|
|
|
"reflect"
|
Add support for targets in Blueprints.
The default target selector uses the name of the host OS.
Modules can implement their own target selector by implementing
the context.TargetSelector interface and its unique selectTarget()
method.
Targets are defined this way in Blueprint files:
cc_library {
name: "libmylib",
deps: ["libmath", "libutils"],
zones: ["frontend"],
srcs: ["main.cpp"],
targets: {
darwin: {
moduleCflags: "-framework OpenGL -framework GLUT",
},
linux: {
deps: ["libx11headers", "libglu"],
},
},
}
In this example, a set of C flags are defined on OS X only and on
Linux, two new dependencies are added.
When a target is selected, its properties are merged with the properties
of the modules:
- a list is appended to the original list (see deps above)
- a string is concatenated to the original string
- a bool replaces the original bool
- a map adds or replaces the key/value pairs of the original map
Change-Id: Ic627d47f795d6a4ff56ca5f6f099cad157621af1
2014-08-13 02:50:11 +02:00
|
|
|
"runtime"
|
2019-01-23 22:21:48 +01:00
|
|
|
"runtime/pprof"
|
2014-05-28 01:34:41 +02:00
|
|
|
"sort"
|
|
|
|
"strings"
|
2016-11-01 19:10:51 +01:00
|
|
|
"sync"
|
2015-07-01 01:05:22 +02:00
|
|
|
"sync/atomic"
|
2014-05-28 01:34:41 +02:00
|
|
|
"text/scanner"
|
|
|
|
"text/template"
|
2015-04-21 01:50:54 +02:00
|
|
|
|
2022-03-25 05:56:02 +01:00
|
|
|
"github.com/google/blueprint/metrics"
|
2015-04-21 01:50:54 +02:00
|
|
|
"github.com/google/blueprint/parser"
|
2017-02-01 22:21:35 +01:00
|
|
|
"github.com/google/blueprint/pathtools"
|
2015-04-21 01:50:54 +02:00
|
|
|
"github.com/google/blueprint/proptools"
|
2014-05-28 01:34:41 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
var ErrBuildActionsNotReady = errors.New("build actions are not ready")
|
|
|
|
|
|
|
|
const maxErrors = 10
|
2017-11-15 23:49:48 +01:00
|
|
|
const MockModuleListFile = "bplist"
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// A Context contains all the state needed to parse a set of Blueprints files
|
|
|
|
// and generate a Ninja file. The process of generating a Ninja file proceeds
|
|
|
|
// through a series of four phases. Each phase corresponds with a some methods
|
|
|
|
// on the Context object
|
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// Phase Methods
|
|
|
|
// ------------ -------------------------------------------
|
|
|
|
// 1. Registration RegisterModuleType, RegisterSingletonType
|
2014-06-13 05:06:50 +02:00
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// 2. Parse ParseBlueprintsFiles, Parse
|
2014-06-13 05:06:50 +02:00
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// 3. Generate ResolveDependencies, PrepareBuildActions
|
2014-06-13 05:06:50 +02:00
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// 4. Write WriteBuildFile
|
2014-06-13 05:06:50 +02:00
|
|
|
//
|
|
|
|
// The registration phase prepares the context to process Blueprints files
|
|
|
|
// containing various types of modules. The parse phase reads in one or more
|
|
|
|
// Blueprints files and validates their contents against the module types that
|
|
|
|
// have been registered. The generate phase then analyzes the parsed Blueprints
|
|
|
|
// contents to create an internal representation for the build actions that must
|
|
|
|
// be performed. This phase also performs validation of the module dependencies
|
|
|
|
// and property values defined in the parsed Blueprints files. Finally, the
|
|
|
|
// write phase generates the Ninja manifest text based on the generated build
|
|
|
|
// actions.
|
2014-05-28 01:34:41 +02:00
|
|
|
type Context struct {
|
2019-01-23 22:21:48 +01:00
|
|
|
context.Context
|
|
|
|
|
2022-03-25 05:56:02 +01:00
|
|
|
// Used for metrics-related event logging.
|
|
|
|
EventHandler *metrics.EventHandler
|
|
|
|
|
2022-05-10 19:46:40 +02:00
|
|
|
BeforePrepareBuildActionsHook func() error
|
|
|
|
|
2015-03-11 04:08:19 +01:00
|
|
|
moduleFactories map[string]ModuleFactory
|
2017-11-11 00:12:08 +01:00
|
|
|
nameInterface NameInterface
|
2016-05-17 23:58:05 +02:00
|
|
|
moduleGroups []*moduleGroup
|
2015-03-11 04:08:19 +01:00
|
|
|
moduleInfo map[Module]*moduleInfo
|
|
|
|
modulesSorted []*moduleInfo
|
2017-11-07 22:29:54 +01:00
|
|
|
preSingletonInfo []*singletonInfo
|
2015-08-26 02:58:17 +02:00
|
|
|
singletonInfo []*singletonInfo
|
2015-03-11 04:08:19 +01:00
|
|
|
mutatorInfo []*mutatorInfo
|
|
|
|
variantMutatorNames []string
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
depsModified uint32 // positive if a mutator modified the dependencies
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
dependenciesReady bool // set to true on a successful ResolveDependencies
|
|
|
|
buildActionsReady bool // set to true on a successful PrepareBuildActions
|
|
|
|
|
|
|
|
// set by SetIgnoreUnknownModuleTypes
|
|
|
|
ignoreUnknownModuleTypes bool
|
|
|
|
|
2015-12-18 00:49:30 +01:00
|
|
|
// set by SetAllowMissingDependencies
|
|
|
|
allowMissingDependencies bool
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
// set during PrepareBuildActions
|
2015-11-26 00:29:32 +01:00
|
|
|
pkgNames map[*packageContext]string
|
2017-11-07 22:29:54 +01:00
|
|
|
liveGlobals *liveTracker
|
2020-01-29 21:58:03 +01:00
|
|
|
globalVariables map[Variable]ninjaString
|
2014-05-28 01:34:41 +02:00
|
|
|
globalPools map[Pool]*poolDef
|
|
|
|
globalRules map[Rule]*ruleDef
|
|
|
|
|
|
|
|
// set during PrepareBuildActions
|
2021-08-26 15:08:09 +02:00
|
|
|
outDir ninjaString // The builddir special Ninja variable
|
2020-01-29 21:58:03 +01:00
|
|
|
requiredNinjaMajor int // For the ninja_required_version variable
|
|
|
|
requiredNinjaMinor int // For the ninja_required_version variable
|
|
|
|
requiredNinjaMicro int // For the ninja_required_version variable
|
2014-09-25 05:26:52 +02:00
|
|
|
|
Run globs during earlier bootstrap phases
Instead of sometimes re-running minibp/the primary builder during the
next phase, run bpglob earlier to check dependencies.
We've run into issues where the environment is slightly different
between bootstrapping phase and the main build phase. It's also a
problem because our primary builder (Soong) exports information used by
another tool (Kati) that runs in between the bootstrapping phases and
the main phase. When Soong would run in the main phase, it could get out
of sync, and would require the build to be run again.
To do this, add a "subninja" include a build-globs.ninja file to each
build.ninja file. The first time, this will be an empty file, but we'll
always run minibp / the primary builder anyway. When the builder runs,
in addition to writing a dependency file, write out the
build-globs.ninja file with the rules to run bpglob.
Since bpglob may need to be run very early, before it would normally be
built, build it with microfactory.
Change-Id: I89fcd849a8729e892f163d40060ab90b5d4dfa5d
2018-07-06 06:56:59 +02:00
|
|
|
subninjas []string
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
// set lazily by sortedModuleGroups
|
|
|
|
cachedSortedModuleGroups []*moduleGroup
|
2020-12-01 00:30:45 +01:00
|
|
|
// cache deps modified to determine whether cachedSortedModuleGroups needs to be recalculated
|
|
|
|
cachedDepsModified bool
|
2016-06-03 00:30:20 +02:00
|
|
|
|
2021-04-06 02:20:34 +02:00
|
|
|
globs map[globKey]pathtools.GlobResult
|
2016-11-01 19:10:51 +01:00
|
|
|
globLock sync.Mutex
|
|
|
|
|
2019-12-17 22:12:35 +01:00
|
|
|
srcDir string
|
2017-08-10 00:13:12 +02:00
|
|
|
fs pathtools.FileSystem
|
|
|
|
moduleListFile string
|
2020-07-02 19:08:12 +02:00
|
|
|
|
|
|
|
// Mutators indexed by the ID of the provider associated with them. Not all mutators will
|
|
|
|
// have providers, and not all providers will have a mutator, or if they do the mutator may
|
|
|
|
// not be registered in this Context.
|
|
|
|
providerMutators []*mutatorInfo
|
|
|
|
|
|
|
|
// The currently running mutator
|
|
|
|
startedMutator *mutatorInfo
|
|
|
|
// True for any mutators that have already run over all modules
|
|
|
|
finishedMutators map[*mutatorInfo]bool
|
|
|
|
|
|
|
|
// Can be set by tests to avoid invalidating Module values after mutators.
|
|
|
|
skipCloneModulesAfterMutators bool
|
2022-11-08 20:44:01 +01:00
|
|
|
|
|
|
|
// String values that can be used to gate build graph traversal
|
|
|
|
includeTags *IncludeTags
|
2023-02-21 18:10:27 +01:00
|
|
|
|
|
|
|
sourceRootDirs *SourceRootDirs
|
|
|
|
}
|
|
|
|
|
|
|
|
// A container for String keys. The keys can be used to gate build graph traversal
|
|
|
|
type SourceRootDirs struct {
|
|
|
|
dirs []string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (dirs *SourceRootDirs) Add(names ...string) {
|
|
|
|
dirs.dirs = append(dirs.dirs, names...)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (dirs *SourceRootDirs) SourceRootDirAllowed(path string) (bool, string) {
|
|
|
|
sort.Slice(dirs.dirs, func(i, j int) bool {
|
|
|
|
return len(dirs.dirs[i]) < len(dirs.dirs[j])
|
|
|
|
})
|
|
|
|
last := len(dirs.dirs)
|
|
|
|
for i := range dirs.dirs {
|
|
|
|
// iterate from longest paths (most specific)
|
|
|
|
prefix := dirs.dirs[last-i-1]
|
|
|
|
disallowedPrefix := false
|
|
|
|
if len(prefix) >= 1 && prefix[0] == '-' {
|
|
|
|
prefix = prefix[1:]
|
|
|
|
disallowedPrefix = true
|
|
|
|
}
|
|
|
|
if strings.HasPrefix(path, prefix) {
|
|
|
|
if disallowedPrefix {
|
|
|
|
return false, prefix
|
|
|
|
} else {
|
|
|
|
return true, prefix
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true, ""
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) AddSourceRootDirs(dirs ...string) {
|
|
|
|
c.sourceRootDirs.Add(dirs...)
|
2022-11-08 20:44:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// A container for String keys. The keys can be used to gate build graph traversal
|
|
|
|
type IncludeTags map[string]bool
|
|
|
|
|
|
|
|
func (tags *IncludeTags) Add(names ...string) {
|
|
|
|
for _, name := range names {
|
|
|
|
(*tags)[name] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (tags *IncludeTags) Contains(tag string) bool {
|
|
|
|
_, exists := (*tags)[tag]
|
|
|
|
return exists
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) AddIncludeTags(names ...string) {
|
|
|
|
c.includeTags.Add(names...)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) ContainsIncludeTag(name string) bool {
|
|
|
|
return c.includeTags.Contains(name)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// An Error describes a problem that was encountered that is related to a
|
|
|
|
// particular location in a Blueprints file.
|
2016-10-08 02:13:10 +02:00
|
|
|
type BlueprintError struct {
|
2014-06-13 05:06:50 +02:00
|
|
|
Err error // the error that occurred
|
|
|
|
Pos scanner.Position // the relevant Blueprints file location
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:13:10 +02:00
|
|
|
// A ModuleError describes a problem that was encountered that is related to a
|
|
|
|
// particular module in a Blueprints file
|
|
|
|
type ModuleError struct {
|
|
|
|
BlueprintError
|
|
|
|
module *moduleInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
// A PropertyError describes a problem that was encountered that is related to a
|
|
|
|
// particular property in a Blueprints file
|
|
|
|
type PropertyError struct {
|
|
|
|
ModuleError
|
|
|
|
property string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *BlueprintError) Error() string {
|
|
|
|
return fmt.Sprintf("%s: %s", e.Pos, e.Err)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *ModuleError) Error() string {
|
|
|
|
return fmt.Sprintf("%s: %s: %s", e.Pos, e.module, e.Err)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *PropertyError) Error() string {
|
|
|
|
return fmt.Sprintf("%s: %s: %s: %s", e.Pos, e.module, e.property, e.Err)
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
type localBuildActions struct {
|
|
|
|
variables []*localVariable
|
|
|
|
rules []*localRule
|
|
|
|
buildDefs []*buildDef
|
|
|
|
}
|
|
|
|
|
2019-11-14 05:11:14 +01:00
|
|
|
type moduleAlias struct {
|
2020-08-13 21:07:30 +02:00
|
|
|
variant variant
|
|
|
|
target *moduleInfo
|
2019-11-14 05:11:14 +01:00
|
|
|
}
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
func (m *moduleAlias) alias() *moduleAlias { return m }
|
|
|
|
func (m *moduleAlias) module() *moduleInfo { return nil }
|
|
|
|
func (m *moduleAlias) moduleOrAliasTarget() *moduleInfo { return m.target }
|
|
|
|
func (m *moduleAlias) moduleOrAliasVariant() variant { return m.variant }
|
|
|
|
|
|
|
|
func (m *moduleInfo) alias() *moduleAlias { return nil }
|
|
|
|
func (m *moduleInfo) module() *moduleInfo { return m }
|
|
|
|
func (m *moduleInfo) moduleOrAliasTarget() *moduleInfo { return m }
|
|
|
|
func (m *moduleInfo) moduleOrAliasVariant() variant { return m.variant }
|
|
|
|
|
|
|
|
type moduleOrAlias interface {
|
|
|
|
alias() *moduleAlias
|
|
|
|
module() *moduleInfo
|
|
|
|
moduleOrAliasTarget() *moduleInfo
|
|
|
|
moduleOrAliasVariant() variant
|
|
|
|
}
|
|
|
|
|
|
|
|
type modulesOrAliases []moduleOrAlias
|
|
|
|
|
|
|
|
func (l modulesOrAliases) firstModule() *moduleInfo {
|
|
|
|
for _, moduleOrAlias := range l {
|
|
|
|
if m := moduleOrAlias.module(); m != nil {
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
}
|
|
|
|
panic(fmt.Errorf("no first module!"))
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l modulesOrAliases) lastModule() *moduleInfo {
|
|
|
|
for i := range l {
|
|
|
|
if m := l[len(l)-1-i].module(); m != nil {
|
|
|
|
return m
|
|
|
|
}
|
|
|
|
}
|
|
|
|
panic(fmt.Errorf("no last module!"))
|
|
|
|
}
|
|
|
|
|
2014-12-18 01:12:41 +01:00
|
|
|
type moduleGroup struct {
|
2015-03-11 08:57:25 +01:00
|
|
|
name string
|
|
|
|
ninjaName string
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
modules modulesOrAliases
|
2017-11-11 00:12:08 +01:00
|
|
|
|
|
|
|
namespace Namespace
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
func (group *moduleGroup) moduleOrAliasByVariantName(name string) moduleOrAlias {
|
|
|
|
for _, module := range group.modules {
|
|
|
|
if module.moduleOrAliasVariant().name == name {
|
|
|
|
return module
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (group *moduleGroup) moduleByVariantName(name string) *moduleInfo {
|
|
|
|
return group.moduleOrAliasByVariantName(name).module()
|
|
|
|
}
|
|
|
|
|
2014-12-18 01:12:41 +01:00
|
|
|
type moduleInfo struct {
|
2015-03-11 08:57:25 +01:00
|
|
|
// set during Parse
|
|
|
|
typeName string
|
2017-07-28 23:32:36 +02:00
|
|
|
factory ModuleFactory
|
2015-03-11 08:57:25 +01:00
|
|
|
relBlueprintsFile string
|
|
|
|
pos scanner.Position
|
|
|
|
propertyPos map[string]scanner.Position
|
2019-05-20 22:55:14 +02:00
|
|
|
createdBy *moduleInfo
|
2015-03-11 08:57:25 +01:00
|
|
|
|
2020-08-13 21:07:30 +02:00
|
|
|
variant variant
|
2015-03-11 22:35:41 +01:00
|
|
|
|
2017-07-28 23:31:03 +02:00
|
|
|
logicModule Module
|
|
|
|
group *moduleGroup
|
|
|
|
properties []interface{}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
|
|
|
// set during ResolveDependencies
|
2019-03-30 00:35:02 +01:00
|
|
|
missingDeps []string
|
|
|
|
newDirectDeps []depInfo
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
// set during updateDependencies
|
|
|
|
reverseDeps []*moduleInfo
|
2016-08-11 20:09:00 +02:00
|
|
|
forwardDeps []*moduleInfo
|
2019-03-30 00:35:02 +01:00
|
|
|
directDeps []depInfo
|
2015-03-11 23:43:52 +01:00
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
// used by parallelVisit
|
2015-03-11 23:43:52 +01:00
|
|
|
waitingCount int
|
|
|
|
|
2014-12-19 01:28:54 +01:00
|
|
|
// set during each runMutator
|
2020-08-25 01:18:21 +02:00
|
|
|
splitModules modulesOrAliases
|
2015-03-12 00:17:52 +01:00
|
|
|
|
2022-06-13 20:44:57 +02:00
|
|
|
// Used by TransitionMutator implementations
|
|
|
|
transitionVariations []string
|
|
|
|
currentTransitionMutator string
|
|
|
|
requiredVariationsLock sync.Mutex
|
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
// set during PrepareBuildActions
|
|
|
|
actionDefs localBuildActions
|
2020-07-02 19:08:12 +02:00
|
|
|
|
|
|
|
providers []interface{}
|
|
|
|
|
|
|
|
startedMutator *mutatorInfo
|
|
|
|
finishedMutator *mutatorInfo
|
|
|
|
|
|
|
|
startedGenerateBuildActions bool
|
|
|
|
finishedGenerateBuildActions bool
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2020-08-13 21:07:30 +02:00
|
|
|
type variant struct {
|
|
|
|
name string
|
|
|
|
variations variationMap
|
|
|
|
dependencyVariations variationMap
|
|
|
|
}
|
|
|
|
|
2016-04-12 00:47:28 +02:00
|
|
|
type depInfo struct {
|
|
|
|
module *moduleInfo
|
|
|
|
tag DependencyTag
|
|
|
|
}
|
|
|
|
|
2016-05-17 23:58:05 +02:00
|
|
|
func (module *moduleInfo) Name() string {
|
2020-05-04 12:00:03 +02:00
|
|
|
// If this is called from a LoadHook (which is run before the module has been registered)
|
|
|
|
// then group will not be set and so the name is retrieved from logicModule.Name().
|
|
|
|
// Usually, using that method is not safe as it does not track renames (group.name does).
|
|
|
|
// However, when called from LoadHook it is safe as there is no way to rename a module
|
|
|
|
// until after the LoadHook has run and the module has been registered.
|
|
|
|
if module.group != nil {
|
|
|
|
return module.group.name
|
|
|
|
} else {
|
|
|
|
return module.logicModule.Name()
|
|
|
|
}
|
2016-05-17 23:58:05 +02:00
|
|
|
}
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
func (module *moduleInfo) String() string {
|
2016-05-17 23:58:05 +02:00
|
|
|
s := fmt.Sprintf("module %q", module.Name())
|
2020-08-13 21:07:30 +02:00
|
|
|
if module.variant.name != "" {
|
|
|
|
s += fmt.Sprintf(" variant %q", module.variant.name)
|
2016-01-07 22:43:09 +01:00
|
|
|
}
|
2019-05-20 22:55:14 +02:00
|
|
|
if module.createdBy != nil {
|
|
|
|
s += fmt.Sprintf(" (created by %s)", module.createdBy)
|
|
|
|
}
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
func (module *moduleInfo) namespace() Namespace {
|
|
|
|
return module.group.namespace
|
|
|
|
}
|
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
// A Variation is a way that a variant of a module differs from other variants of the same module.
|
|
|
|
// For example, two variants of the same module might have Variation{"arch","arm"} and
|
|
|
|
// Variation{"arch","arm64"}
|
|
|
|
type Variation struct {
|
|
|
|
// Mutator is the axis on which this variation applies, i.e. "arch" or "link"
|
2015-03-11 04:08:19 +01:00
|
|
|
Mutator string
|
2015-03-14 00:02:36 +01:00
|
|
|
// Variation is the name of the variation on the axis, i.e. "arm" or "arm64" for arch, or
|
|
|
|
// "shared" or "static" for link.
|
|
|
|
Variation string
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
// A variationMap stores a map of Mutator to Variation to specify a variant of a module.
|
|
|
|
type variationMap map[string]string
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
func (vm variationMap) clone() variationMap {
|
2019-11-14 05:11:04 +01:00
|
|
|
if vm == nil {
|
|
|
|
return nil
|
|
|
|
}
|
2015-03-14 00:02:36 +01:00
|
|
|
newVm := make(variationMap)
|
2015-03-11 22:35:41 +01:00
|
|
|
for k, v := range vm {
|
|
|
|
newVm[k] = v
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
2015-03-11 22:35:41 +01:00
|
|
|
|
|
|
|
return newVm
|
|
|
|
}
|
|
|
|
|
2015-05-08 20:14:54 +02:00
|
|
|
// Compare this variationMap to another one. Returns true if the every entry in this map
|
2020-08-24 23:46:13 +02:00
|
|
|
// exists and has the same value in the other map.
|
|
|
|
func (vm variationMap) subsetOf(other variationMap) bool {
|
2015-05-08 20:14:54 +02:00
|
|
|
for k, v1 := range vm {
|
2020-08-24 23:46:13 +02:00
|
|
|
if v2, ok := other[k]; !ok || v1 != v2 {
|
2015-05-08 20:14:54 +02:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
func (vm variationMap) equal(other variationMap) bool {
|
2015-03-11 22:35:41 +01:00
|
|
|
return reflect.DeepEqual(vm, other)
|
2014-12-18 01:12:41 +01:00
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
type singletonInfo struct {
|
2014-09-25 02:51:52 +02:00
|
|
|
// set during RegisterSingletonType
|
|
|
|
factory SingletonFactory
|
2014-05-28 01:34:41 +02:00
|
|
|
singleton Singleton
|
2015-08-26 02:58:17 +02:00
|
|
|
name string
|
2023-05-15 22:59:49 +02:00
|
|
|
parallel bool
|
2014-05-28 01:34:41 +02:00
|
|
|
|
|
|
|
// set during PrepareBuildActions
|
|
|
|
actionDefs localBuildActions
|
|
|
|
}
|
|
|
|
|
2014-12-19 01:28:54 +01:00
|
|
|
type mutatorInfo struct {
|
|
|
|
// set during RegisterMutator
|
2015-01-03 00:19:28 +01:00
|
|
|
topDownMutator TopDownMutator
|
|
|
|
bottomUpMutator BottomUpMutator
|
|
|
|
name string
|
2016-08-06 07:30:44 +02:00
|
|
|
parallel bool
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2017-07-28 23:32:36 +02:00
|
|
|
func newContext() *Context {
|
2022-03-25 05:56:02 +01:00
|
|
|
eventHandler := metrics.EventHandler{}
|
2017-07-28 23:32:36 +02:00
|
|
|
return &Context{
|
2019-01-23 22:21:48 +01:00
|
|
|
Context: context.Background(),
|
2022-03-25 05:56:02 +01:00
|
|
|
EventHandler: &eventHandler,
|
2017-11-07 22:29:54 +01:00
|
|
|
moduleFactories: make(map[string]ModuleFactory),
|
2017-11-11 00:12:08 +01:00
|
|
|
nameInterface: NewSimpleNameInterface(),
|
2017-11-07 22:29:54 +01:00
|
|
|
moduleInfo: make(map[Module]*moduleInfo),
|
2021-04-06 02:20:34 +02:00
|
|
|
globs: make(map[globKey]pathtools.GlobResult),
|
2017-11-07 22:29:54 +01:00
|
|
|
fs: pathtools.OsFs,
|
2020-07-02 19:08:12 +02:00
|
|
|
finishedMutators: make(map[*mutatorInfo]bool),
|
2023-02-13 23:55:50 +01:00
|
|
|
includeTags: &IncludeTags{},
|
2023-02-21 18:10:27 +01:00
|
|
|
sourceRootDirs: &SourceRootDirs{},
|
2021-08-26 15:08:09 +02:00
|
|
|
outDir: nil,
|
2017-11-07 22:29:54 +01:00
|
|
|
requiredNinjaMajor: 1,
|
|
|
|
requiredNinjaMinor: 7,
|
|
|
|
requiredNinjaMicro: 0,
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2017-07-28 23:32:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewContext creates a new Context object. The created context initially has
|
|
|
|
// no module or singleton factories registered, so the RegisterModuleFactory and
|
|
|
|
// RegisterSingletonFactory methods must be called before it can do anything
|
|
|
|
// useful.
|
|
|
|
func NewContext() *Context {
|
|
|
|
ctx := newContext()
|
2015-10-29 23:32:56 +01:00
|
|
|
|
|
|
|
ctx.RegisterBottomUpMutator("blueprint_deps", blueprintDepsMutator)
|
|
|
|
|
|
|
|
return ctx
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 02:51:52 +02:00
|
|
|
// A ModuleFactory function creates a new Module object. See the
|
|
|
|
// Context.RegisterModuleType method for details about how a registered
|
|
|
|
// ModuleFactory is used by a Context.
|
|
|
|
type ModuleFactory func() (m Module, propertyStructs []interface{})
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// RegisterModuleType associates a module type name (which can appear in a
|
2014-09-25 02:51:52 +02:00
|
|
|
// Blueprints file) with a Module factory function. When the given module type
|
|
|
|
// name is encountered in a Blueprints file during parsing, the Module factory
|
|
|
|
// is invoked to instantiate a new Module object to handle the build action
|
2014-12-19 01:28:54 +01:00
|
|
|
// generation for the module. If a Mutator splits a module into multiple variants,
|
|
|
|
// the factory is invoked again to create a new Module for each variant.
|
2014-06-13 05:06:50 +02:00
|
|
|
//
|
2014-09-25 02:51:52 +02:00
|
|
|
// The module type names given here must be unique for the context. The factory
|
|
|
|
// function should be a named function so that its package and name can be
|
|
|
|
// included in the generated Ninja file for debugging purposes.
|
|
|
|
//
|
|
|
|
// The factory function returns two values. The first is the newly created
|
|
|
|
// Module object. The second is a slice of pointers to that Module object's
|
|
|
|
// properties structs. Each properties struct is examined when parsing a module
|
|
|
|
// definition of this type in a Blueprints file. Exported fields of the
|
|
|
|
// properties structs are automatically set to the property values specified in
|
|
|
|
// the Blueprints file. The properties struct field names determine the name of
|
|
|
|
// the Blueprints file properties that are used - the Blueprints property name
|
|
|
|
// matches that of the properties struct field name with the first letter
|
|
|
|
// converted to lower-case.
|
|
|
|
//
|
|
|
|
// The fields of the properties struct must be either []string, a string, or
|
|
|
|
// bool. The Context will panic if a Module gets instantiated with a properties
|
|
|
|
// struct containing a field that is not one these supported types.
|
|
|
|
//
|
|
|
|
// Any properties that appear in the Blueprints files that are not built-in
|
|
|
|
// module properties (such as "name" and "deps") and do not have a corresponding
|
|
|
|
// field in the returned module properties struct result in an error during the
|
|
|
|
// Context's parse phase.
|
|
|
|
//
|
|
|
|
// As an example, the follow code:
|
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// type myModule struct {
|
|
|
|
// properties struct {
|
|
|
|
// Foo string
|
|
|
|
// Bar []string
|
|
|
|
// }
|
|
|
|
// }
|
2014-09-25 02:51:52 +02:00
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// func NewMyModule() (blueprint.Module, []interface{}) {
|
|
|
|
// module := new(myModule)
|
|
|
|
// properties := &module.properties
|
|
|
|
// return module, []interface{}{properties}
|
|
|
|
// }
|
2014-09-25 02:51:52 +02:00
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// func main() {
|
|
|
|
// ctx := blueprint.NewContext()
|
|
|
|
// ctx.RegisterModuleType("my_module", NewMyModule)
|
|
|
|
// // ...
|
|
|
|
// }
|
2014-09-25 02:51:52 +02:00
|
|
|
//
|
|
|
|
// would support parsing a module defined in a Blueprints file as follows:
|
|
|
|
//
|
2023-02-13 23:55:50 +01:00
|
|
|
// my_module {
|
|
|
|
// name: "myName",
|
|
|
|
// foo: "my foo string",
|
|
|
|
// bar: ["my", "bar", "strings"],
|
|
|
|
// }
|
2014-09-25 02:51:52 +02:00
|
|
|
//
|
2015-01-08 01:22:45 +01:00
|
|
|
// The factory function may be called from multiple goroutines. Any accesses
|
|
|
|
// to global variables must be synchronized.
|
2014-09-25 02:51:52 +02:00
|
|
|
func (c *Context) RegisterModuleType(name string, factory ModuleFactory) {
|
|
|
|
if _, present := c.moduleFactories[name]; present {
|
2023-03-08 20:51:49 +01:00
|
|
|
panic(fmt.Errorf("module type %q is already registered", name))
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2014-09-25 02:51:52 +02:00
|
|
|
c.moduleFactories[name] = factory
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 02:51:52 +02:00
|
|
|
// A SingletonFactory function creates a new Singleton object. See the
|
|
|
|
// Context.RegisterSingletonType method for details about how a registered
|
|
|
|
// SingletonFactory is used by a Context.
|
|
|
|
type SingletonFactory func() Singleton
|
|
|
|
|
|
|
|
// RegisterSingletonType registers a singleton type that will be invoked to
|
2022-01-11 04:46:23 +01:00
|
|
|
// generate build actions. Each registered singleton type is instantiated
|
2023-05-15 22:59:49 +02:00
|
|
|
// and invoked exactly once as part of the generate phase.
|
|
|
|
//
|
|
|
|
// Those singletons registered with parallel=true are run in parallel, after
|
|
|
|
// which the other registered singletons are run in registration order.
|
2014-09-25 02:51:52 +02:00
|
|
|
//
|
|
|
|
// The singleton type names given here must be unique for the context. The
|
|
|
|
// factory function should be a named function so that its package and name can
|
|
|
|
// be included in the generated Ninja file for debugging purposes.
|
2023-05-15 22:59:49 +02:00
|
|
|
func (c *Context) RegisterSingletonType(name string, factory SingletonFactory, parallel bool) {
|
2015-08-26 02:58:17 +02:00
|
|
|
for _, s := range c.singletonInfo {
|
|
|
|
if s.name == name {
|
2023-03-08 20:51:49 +01:00
|
|
|
panic(fmt.Errorf("singleton %q is already registered", name))
|
2015-08-26 02:58:17 +02:00
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2014-09-25 02:51:52 +02:00
|
|
|
|
2015-08-26 02:58:17 +02:00
|
|
|
c.singletonInfo = append(c.singletonInfo, &singletonInfo{
|
2014-09-25 02:51:52 +02:00
|
|
|
factory: factory,
|
|
|
|
singleton: factory(),
|
2015-08-26 02:58:17 +02:00
|
|
|
name: name,
|
2023-05-15 22:59:49 +02:00
|
|
|
parallel: parallel,
|
2015-08-26 02:58:17 +02:00
|
|
|
})
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2017-11-07 22:29:54 +01:00
|
|
|
// RegisterPreSingletonType registers a presingleton type that will be invoked to
|
|
|
|
// generate build actions before any Blueprint files have been read. Each registered
|
|
|
|
// presingleton type is instantiated and invoked exactly once at the beginning of the
|
|
|
|
// parse phase. Each registered presingleton is invoked in registration order.
|
|
|
|
//
|
|
|
|
// The presingleton type names given here must be unique for the context. The
|
|
|
|
// factory function should be a named function so that its package and name can
|
|
|
|
// be included in the generated Ninja file for debugging purposes.
|
|
|
|
func (c *Context) RegisterPreSingletonType(name string, factory SingletonFactory) {
|
|
|
|
for _, s := range c.preSingletonInfo {
|
|
|
|
if s.name == name {
|
2023-03-08 20:51:49 +01:00
|
|
|
panic(fmt.Errorf("presingleton %q is already registered", name))
|
2017-11-07 22:29:54 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
c.preSingletonInfo = append(c.preSingletonInfo, &singletonInfo{
|
|
|
|
factory: factory,
|
|
|
|
singleton: factory(),
|
|
|
|
name: name,
|
2023-05-15 22:59:49 +02:00
|
|
|
parallel: false,
|
2017-11-07 22:29:54 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
func (c *Context) SetNameInterface(i NameInterface) {
|
|
|
|
c.nameInterface = i
|
|
|
|
}
|
|
|
|
|
2019-12-17 22:12:35 +01:00
|
|
|
func (c *Context) SetSrcDir(path string) {
|
|
|
|
c.srcDir = path
|
|
|
|
c.fs = pathtools.NewOsFs(path)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) SrcDir() string {
|
|
|
|
return c.srcDir
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func singletonPkgPath(singleton Singleton) string {
|
|
|
|
typ := reflect.TypeOf(singleton)
|
|
|
|
for typ.Kind() == reflect.Ptr {
|
|
|
|
typ = typ.Elem()
|
|
|
|
}
|
|
|
|
return typ.PkgPath()
|
|
|
|
}
|
|
|
|
|
|
|
|
func singletonTypeName(singleton Singleton) string {
|
|
|
|
typ := reflect.TypeOf(singleton)
|
|
|
|
for typ.Kind() == reflect.Ptr {
|
|
|
|
typ = typ.Elem()
|
|
|
|
}
|
|
|
|
return typ.PkgPath() + "." + typ.Name()
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
// RegisterTopDownMutator registers a mutator that will be invoked to propagate dependency info
|
|
|
|
// top-down between Modules. Each registered mutator is invoked in registration order (mixing
|
|
|
|
// TopDownMutators and BottomUpMutators) once per Module, and the invocation on any module will
|
|
|
|
// have returned before it is in invoked on any of its dependencies.
|
2014-12-19 01:28:54 +01:00
|
|
|
//
|
2015-03-11 04:08:19 +01:00
|
|
|
// The mutator type names given here must be unique to all top down mutators in
|
|
|
|
// the Context.
|
2016-08-11 20:09:00 +02:00
|
|
|
//
|
|
|
|
// Returns a MutatorHandle, on which Parallel can be called to set the mutator to visit modules in
|
|
|
|
// parallel while maintaining ordering.
|
|
|
|
func (c *Context) RegisterTopDownMutator(name string, mutator TopDownMutator) MutatorHandle {
|
2014-12-19 01:28:54 +01:00
|
|
|
for _, m := range c.mutatorInfo {
|
|
|
|
if m.name == name && m.topDownMutator != nil {
|
2023-03-08 20:51:49 +01:00
|
|
|
panic(fmt.Errorf("mutator %q is already registered", name))
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
info := &mutatorInfo{
|
2014-12-19 01:28:54 +01:00
|
|
|
topDownMutator: mutator,
|
2015-01-03 00:19:28 +01:00
|
|
|
name: name,
|
2016-08-11 20:09:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
c.mutatorInfo = append(c.mutatorInfo, info)
|
|
|
|
|
|
|
|
return info
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
// RegisterBottomUpMutator registers a mutator that will be invoked to split Modules into variants.
|
|
|
|
// Each registered mutator is invoked in registration order (mixing TopDownMutators and
|
|
|
|
// BottomUpMutators) once per Module, will not be invoked on a module until the invocations on all
|
|
|
|
// of the modules dependencies have returned.
|
2014-12-19 01:28:54 +01:00
|
|
|
//
|
2015-03-11 04:08:19 +01:00
|
|
|
// The mutator type names given here must be unique to all bottom up or early
|
|
|
|
// mutators in the Context.
|
2016-08-06 07:30:44 +02:00
|
|
|
//
|
2016-08-11 20:09:00 +02:00
|
|
|
// Returns a MutatorHandle, on which Parallel can be called to set the mutator to visit modules in
|
|
|
|
// parallel while maintaining ordering.
|
|
|
|
func (c *Context) RegisterBottomUpMutator(name string, mutator BottomUpMutator) MutatorHandle {
|
2015-03-11 04:08:19 +01:00
|
|
|
for _, m := range c.variantMutatorNames {
|
|
|
|
if m == name {
|
2023-03-08 20:51:49 +01:00
|
|
|
panic(fmt.Errorf("mutator %q is already registered", name))
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
info := &mutatorInfo{
|
2014-12-19 01:28:54 +01:00
|
|
|
bottomUpMutator: mutator,
|
2015-01-03 00:19:28 +01:00
|
|
|
name: name,
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
c.mutatorInfo = append(c.mutatorInfo, info)
|
2015-03-11 04:08:19 +01:00
|
|
|
|
|
|
|
c.variantMutatorNames = append(c.variantMutatorNames, name)
|
2016-08-06 07:30:44 +02:00
|
|
|
|
|
|
|
return info
|
|
|
|
}
|
|
|
|
|
2022-06-13 20:44:57 +02:00
|
|
|
type IncomingTransitionContext interface {
|
|
|
|
// Module returns the target of the dependency edge for which the transition
|
|
|
|
// is being computed
|
|
|
|
Module() Module
|
|
|
|
|
|
|
|
// Config returns the config object that was passed to
|
|
|
|
// Context.PrepareBuildActions.
|
|
|
|
Config() interface{}
|
|
|
|
}
|
|
|
|
|
|
|
|
type OutgoingTransitionContext interface {
|
|
|
|
// Module returns the target of the dependency edge for which the transition
|
|
|
|
// is being computed
|
|
|
|
Module() Module
|
|
|
|
|
|
|
|
// DepTag() Returns the dependency tag through which this dependency is
|
|
|
|
// reached
|
|
|
|
DepTag() DependencyTag
|
|
|
|
}
|
|
|
|
|
2022-06-24 10:14:48 +02:00
|
|
|
// Transition mutators implement a top-down mechanism where a module tells its
|
|
|
|
// direct dependencies what variation they should be built in but the dependency
|
|
|
|
// has the final say.
|
|
|
|
//
|
|
|
|
// When implementing a transition mutator, one needs to implement four methods:
|
|
|
|
// - Split() that tells what variations a module has by itself
|
|
|
|
// - OutgoingTransition() where a module tells what it wants from its
|
|
|
|
// dependency
|
|
|
|
// - IncomingTransition() where a module has the final say about its own
|
|
|
|
// variation
|
|
|
|
// - Mutate() that changes the state of a module depending on its variation
|
|
|
|
//
|
|
|
|
// That the effective variation of module B when depended on by module A is the
|
|
|
|
// composition the outgoing transition of module A and the incoming transition
|
|
|
|
// of module B.
|
|
|
|
//
|
|
|
|
// the outgoing transition should not take the properties of the dependency into
|
|
|
|
// account, only those of the module that depends on it. For this reason, the
|
|
|
|
// dependency is not even passed into it as an argument. Likewise, the incoming
|
|
|
|
// transition should not take the properties of the depending module into
|
|
|
|
// account and is thus not informed about it. This makes for a nice
|
|
|
|
// decomposition of the decision logic.
|
|
|
|
//
|
|
|
|
// A given transition mutator only affects its own variation; other variations
|
|
|
|
// stay unchanged along the dependency edges.
|
|
|
|
//
|
|
|
|
// Soong makes sure that all modules are created in the desired variations and
|
|
|
|
// that dependency edges are set up correctly. This ensures that "missing
|
|
|
|
// variation" errors do not happen and allows for more flexible changes in the
|
|
|
|
// value of the variation among dependency edges (as oppposed to bottom-up
|
|
|
|
// mutators where if module A in variation X depends on module B and module B
|
|
|
|
// has that variation X, A must depend on variation X of B)
|
|
|
|
//
|
|
|
|
// The limited power of the context objects passed to individual mutators
|
|
|
|
// methods also makes it more difficult to shoot oneself in the foot. Complete
|
|
|
|
// safety is not guaranteed because no one prevents individual transition
|
|
|
|
// mutators from mutating modules in illegal ways and for e.g. Split() or
|
|
|
|
// Mutate() to run their own visitations of the transitive dependency of the
|
|
|
|
// module and both of these are bad ideas, but it's better than no guardrails at
|
|
|
|
// all.
|
|
|
|
//
|
|
|
|
// This model is pretty close to Bazel's configuration transitions. The mapping
|
|
|
|
// between concepts in Soong and Bazel is as follows:
|
|
|
|
// - Module == configured target
|
|
|
|
// - Variant == configuration
|
|
|
|
// - Variation name == configuration flag
|
|
|
|
// - Variation == configuration flag value
|
|
|
|
// - Outgoing transition == attribute transition
|
|
|
|
// - Incoming transition == rule transition
|
|
|
|
//
|
|
|
|
// The Split() method does not have a Bazel equivalent and Bazel split
|
|
|
|
// transitions do not have a Soong equivalent.
|
|
|
|
//
|
|
|
|
// Mutate() does not make sense in Bazel due to the different models of the
|
|
|
|
// two systems: when creating new variations, Soong clones the old module and
|
|
|
|
// thus some way is needed to change it state whereas Bazel creates each
|
|
|
|
// configuration of a given configured target anew.
|
2022-06-13 20:44:57 +02:00
|
|
|
type TransitionMutator interface {
|
|
|
|
// Returns the set of variations that should be created for a module no matter
|
|
|
|
// who depends on it. Used when Make depends on a particular variation or when
|
|
|
|
// the module knows its variations just based on information given to it in
|
|
|
|
// the Blueprint file. This method should not mutate the module it is called
|
|
|
|
// on.
|
|
|
|
Split(ctx BaseModuleContext) []string
|
|
|
|
|
|
|
|
// Called on a module to determine which variation it wants from its direct
|
|
|
|
// dependencies. The dependency itself can override this decision. This method
|
|
|
|
// should not mutate the module itself.
|
|
|
|
OutgoingTransition(ctx OutgoingTransitionContext, sourceVariation string) string
|
|
|
|
|
|
|
|
// Called on a module to determine which variation it should be in based on
|
|
|
|
// the variation modules that depend on it want. This gives the module a final
|
|
|
|
// say about its own variations. This method should not mutate the module
|
|
|
|
// itself.
|
|
|
|
IncomingTransition(ctx IncomingTransitionContext, incomingVariation string) string
|
|
|
|
|
|
|
|
// Called after a module was split into multiple variations on each variation.
|
|
|
|
// It should not split the module any further but adding new dependencies is
|
|
|
|
// fine. Unlike all the other methods on TransitionMutator, this method is
|
|
|
|
// allowed to mutate the module.
|
|
|
|
Mutate(ctx BottomUpMutatorContext, variation string)
|
|
|
|
}
|
|
|
|
|
|
|
|
type transitionMutatorImpl struct {
|
|
|
|
name string
|
|
|
|
mutator TransitionMutator
|
|
|
|
}
|
|
|
|
|
|
|
|
// Adds each argument in items to l if it's not already there.
|
|
|
|
func addToStringListIfNotPresent(l []string, items ...string) []string {
|
|
|
|
OUTER:
|
|
|
|
for _, i := range items {
|
|
|
|
for _, existing := range l {
|
|
|
|
if existing == i {
|
|
|
|
continue OUTER
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
l = append(l, i)
|
|
|
|
}
|
|
|
|
|
|
|
|
return l
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *transitionMutatorImpl) addRequiredVariation(m *moduleInfo, variation string) {
|
|
|
|
m.requiredVariationsLock.Lock()
|
|
|
|
defer m.requiredVariationsLock.Unlock()
|
|
|
|
|
|
|
|
// This is only a consistency check. Leaking the variations of a transition
|
|
|
|
// mutator to another one could well lead to issues that are difficult to
|
|
|
|
// track down.
|
|
|
|
if m.currentTransitionMutator != "" && m.currentTransitionMutator != t.name {
|
|
|
|
panic(fmt.Errorf("transition mutator is %s in mutator %s", m.currentTransitionMutator, t.name))
|
|
|
|
}
|
|
|
|
|
|
|
|
m.currentTransitionMutator = t.name
|
|
|
|
m.transitionVariations = addToStringListIfNotPresent(m.transitionVariations, variation)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *transitionMutatorImpl) topDownMutator(mctx TopDownMutatorContext) {
|
|
|
|
module := mctx.(*mutatorContext).module
|
|
|
|
mutatorSplits := t.mutator.Split(mctx)
|
|
|
|
if mutatorSplits == nil || len(mutatorSplits) == 0 {
|
|
|
|
panic(fmt.Errorf("transition mutator %s returned no splits for module %s", t.name, mctx.ModuleName()))
|
|
|
|
}
|
|
|
|
|
|
|
|
// transitionVariations for given a module can be mutated by the module itself
|
|
|
|
// and modules that directly depend on it. Since this is a top-down mutator,
|
|
|
|
// all modules that directly depend on this module have already been processed
|
|
|
|
// so no locking is necessary.
|
|
|
|
module.transitionVariations = addToStringListIfNotPresent(module.transitionVariations, mutatorSplits...)
|
|
|
|
sort.Strings(module.transitionVariations)
|
|
|
|
|
|
|
|
for _, srcVariation := range module.transitionVariations {
|
|
|
|
for _, dep := range module.directDeps {
|
|
|
|
finalVariation := t.transition(mctx)(mctx.Module(), srcVariation, dep.module.logicModule, dep.tag)
|
|
|
|
t.addRequiredVariation(dep.module, finalVariation)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type transitionContextImpl struct {
|
|
|
|
module Module
|
|
|
|
depTag DependencyTag
|
|
|
|
config interface{}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *transitionContextImpl) Module() Module {
|
|
|
|
return c.module
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *transitionContextImpl) DepTag() DependencyTag {
|
|
|
|
return c.depTag
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *transitionContextImpl) Config() interface{} {
|
|
|
|
return c.config
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *transitionMutatorImpl) transition(mctx BaseMutatorContext) Transition {
|
|
|
|
return func(source Module, sourceVariation string, dep Module, depTag DependencyTag) string {
|
|
|
|
tc := &transitionContextImpl{module: dep, depTag: depTag, config: mctx.Config()}
|
|
|
|
outgoingVariation := t.mutator.OutgoingTransition(tc, sourceVariation)
|
|
|
|
finalVariation := t.mutator.IncomingTransition(tc, outgoingVariation)
|
|
|
|
return finalVariation
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *transitionMutatorImpl) bottomUpMutator(mctx BottomUpMutatorContext) {
|
|
|
|
mc := mctx.(*mutatorContext)
|
|
|
|
// Fetch and clean up transition mutator state. No locking needed since the
|
|
|
|
// only time interaction between multiple modules is required is during the
|
|
|
|
// computation of the variations required by a given module.
|
|
|
|
variations := mc.module.transitionVariations
|
|
|
|
mc.module.transitionVariations = nil
|
|
|
|
mc.module.currentTransitionMutator = ""
|
|
|
|
|
|
|
|
if len(variations) < 1 {
|
|
|
|
panic(fmt.Errorf("no variations found for module %s by mutator %s",
|
|
|
|
mctx.ModuleName(), t.name))
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(variations) == 1 && variations[0] == "" {
|
|
|
|
// Module is not split, just apply the transition
|
|
|
|
mc.applyTransition(t.transition(mctx))
|
|
|
|
} else {
|
|
|
|
mc.createVariationsWithTransition(t.transition(mctx), variations...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *transitionMutatorImpl) mutateMutator(mctx BottomUpMutatorContext) {
|
|
|
|
module := mctx.(*mutatorContext).module
|
|
|
|
currentVariation := module.variant.variations[t.name]
|
|
|
|
t.mutator.Mutate(mctx, currentVariation)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) RegisterTransitionMutator(name string, mutator TransitionMutator) {
|
|
|
|
impl := &transitionMutatorImpl{name: name, mutator: mutator}
|
|
|
|
|
|
|
|
c.RegisterTopDownMutator(name+"_deps", impl.topDownMutator).Parallel()
|
|
|
|
c.RegisterBottomUpMutator(name, impl.bottomUpMutator).Parallel()
|
|
|
|
c.RegisterBottomUpMutator(name+"_mutate", impl.mutateMutator).Parallel()
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
type MutatorHandle interface {
|
|
|
|
// Set the mutator to visit modules in parallel while maintaining ordering. Calling any
|
|
|
|
// method on the mutator context is thread-safe, but the mutator must handle synchronization
|
|
|
|
// for any modifications to global state or any modules outside the one it was invoked on.
|
|
|
|
Parallel() MutatorHandle
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
func (mutator *mutatorInfo) Parallel() MutatorHandle {
|
2016-08-06 07:30:44 +02:00
|
|
|
mutator.parallel = true
|
|
|
|
return mutator
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// SetIgnoreUnknownModuleTypes sets the behavior of the context in the case
|
|
|
|
// where it encounters an unknown module type while parsing Blueprints files. By
|
|
|
|
// default, the context will report unknown module types as an error. If this
|
|
|
|
// method is called with ignoreUnknownModuleTypes set to true then the context
|
|
|
|
// will silently ignore unknown module types.
|
|
|
|
//
|
|
|
|
// This method should generally not be used. It exists to facilitate the
|
|
|
|
// bootstrapping process.
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) SetIgnoreUnknownModuleTypes(ignoreUnknownModuleTypes bool) {
|
|
|
|
c.ignoreUnknownModuleTypes = ignoreUnknownModuleTypes
|
|
|
|
}
|
|
|
|
|
2015-12-18 00:49:30 +01:00
|
|
|
// SetAllowMissingDependencies changes the behavior of Blueprint to ignore
|
|
|
|
// unresolved dependencies. If the module's GenerateBuildActions calls
|
|
|
|
// ModuleContext.GetMissingDependencies Blueprint will not emit any errors
|
|
|
|
// for missing dependencies.
|
|
|
|
func (c *Context) SetAllowMissingDependencies(allowMissingDependencies bool) {
|
|
|
|
c.allowMissingDependencies = allowMissingDependencies
|
|
|
|
}
|
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
func (c *Context) SetModuleListFile(listFile string) {
|
|
|
|
c.moduleListFile = listFile
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) ListModulePaths(baseDir string) (paths []string, err error) {
|
|
|
|
reader, err := c.fs.Open(c.moduleListFile)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-02-07 16:03:19 +01:00
|
|
|
defer reader.Close()
|
2017-08-10 00:13:12 +02:00
|
|
|
bytes, err := ioutil.ReadAll(reader)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
text := string(bytes)
|
|
|
|
|
|
|
|
text = strings.Trim(text, "\n")
|
|
|
|
lines := strings.Split(text, "\n")
|
|
|
|
for i := range lines {
|
|
|
|
lines[i] = filepath.Join(baseDir, lines[i])
|
|
|
|
}
|
|
|
|
|
|
|
|
return lines, nil
|
|
|
|
}
|
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
// a fileParseContext tells the status of parsing a particular file
|
|
|
|
type fileParseContext struct {
|
|
|
|
// name of file
|
|
|
|
fileName string
|
|
|
|
|
|
|
|
// scope to use when resolving variables
|
|
|
|
Scope *parser.Scope
|
|
|
|
|
|
|
|
// pointer to the one in the parent directory
|
|
|
|
parent *fileParseContext
|
|
|
|
|
|
|
|
// is closed once FileHandler has completed for this file
|
|
|
|
doneVisiting chan struct{}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// ParseBlueprintsFiles parses a set of Blueprints files starting with the file
|
|
|
|
// at rootFile. When it encounters a Blueprints file with a set of subdirs
|
|
|
|
// listed it recursively parses any Blueprints files found in those
|
|
|
|
// subdirectories.
|
|
|
|
//
|
|
|
|
// If no errors are encountered while parsing the files, the list of paths on
|
|
|
|
// which the future output will depend is returned. This list will include both
|
|
|
|
// Blueprints file paths as well as directory paths for cases where wildcard
|
|
|
|
// subdirs are found.
|
2019-12-31 03:40:09 +01:00
|
|
|
func (c *Context) ParseBlueprintsFiles(rootFile string,
|
|
|
|
config interface{}) (deps []string, errs []error) {
|
|
|
|
|
2019-03-08 22:42:29 +01:00
|
|
|
baseDir := filepath.Dir(rootFile)
|
|
|
|
pathsToParse, err := c.ListModulePaths(baseDir)
|
|
|
|
if err != nil {
|
|
|
|
return nil, []error{err}
|
|
|
|
}
|
2019-12-31 03:40:09 +01:00
|
|
|
return c.ParseFileList(baseDir, pathsToParse, config)
|
2019-03-08 22:42:29 +01:00
|
|
|
}
|
|
|
|
|
2023-02-21 18:10:27 +01:00
|
|
|
type shouldVisitFileInfo struct {
|
|
|
|
shouldVisitFile bool
|
|
|
|
skippedModules []string
|
|
|
|
reasonForSkip string
|
|
|
|
errs []error
|
|
|
|
}
|
|
|
|
|
2022-11-08 20:44:01 +01:00
|
|
|
// Returns a boolean for whether this file should be analyzed
|
|
|
|
// Evaluates to true if the file either
|
|
|
|
// 1. does not contain a blueprint_package_includes
|
|
|
|
// 2. contains a blueprint_package_includes and all requested tags are set
|
|
|
|
// This should be processed before adding any modules to the build graph
|
2023-02-21 18:10:27 +01:00
|
|
|
func shouldVisitFile(c *Context, file *parser.File) shouldVisitFileInfo {
|
|
|
|
skippedModules := []string{}
|
|
|
|
var blueprintPackageIncludes *PackageIncludes
|
2022-11-08 20:44:01 +01:00
|
|
|
for _, def := range file.Defs {
|
|
|
|
switch def := def.(type) {
|
|
|
|
case *parser.Module:
|
2023-02-21 18:10:27 +01:00
|
|
|
skippedModules = append(skippedModules, def.Name())
|
2022-11-08 20:44:01 +01:00
|
|
|
if def.Type != "blueprint_package_includes" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
module, errs := processModuleDef(def, file.Name, c.moduleFactories, nil, c.ignoreUnknownModuleTypes)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
// This file contains errors in blueprint_package_includes
|
|
|
|
// Visit anyways so that we can report errors on other modules in the file
|
2023-02-21 18:10:27 +01:00
|
|
|
return shouldVisitFileInfo{
|
|
|
|
shouldVisitFile: true,
|
|
|
|
errs: errs,
|
|
|
|
}
|
2022-11-08 20:44:01 +01:00
|
|
|
}
|
|
|
|
logicModule, _ := c.cloneLogicModule(module)
|
2023-02-21 18:10:27 +01:00
|
|
|
blueprintPackageIncludes = logicModule.(*PackageIncludes)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if blueprintPackageIncludes != nil {
|
|
|
|
packageMatches := blueprintPackageIncludes.MatchesIncludeTags(c)
|
|
|
|
if !packageMatches {
|
|
|
|
return shouldVisitFileInfo{
|
|
|
|
shouldVisitFile: false,
|
|
|
|
skippedModules: skippedModules,
|
|
|
|
reasonForSkip: fmt.Sprintf(
|
|
|
|
"module is defined in %q which contains a blueprint_package_includes module with unsatisfied tags",
|
|
|
|
file.Name,
|
|
|
|
),
|
|
|
|
}
|
2022-11-08 20:44:01 +01:00
|
|
|
}
|
|
|
|
}
|
2023-02-21 18:10:27 +01:00
|
|
|
|
|
|
|
shouldVisit, invalidatingPrefix := c.sourceRootDirs.SourceRootDirAllowed(file.Name)
|
|
|
|
if !shouldVisit {
|
|
|
|
return shouldVisitFileInfo{
|
|
|
|
shouldVisitFile: shouldVisit,
|
|
|
|
skippedModules: skippedModules,
|
|
|
|
reasonForSkip: fmt.Sprintf(
|
|
|
|
"%q is a descendant of %q, and that path prefix was not included in PRODUCT_SOURCE_ROOT_DIRS",
|
|
|
|
file.Name,
|
|
|
|
invalidatingPrefix,
|
|
|
|
),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return shouldVisitFileInfo{shouldVisitFile: true}
|
2022-11-08 20:44:01 +01:00
|
|
|
}
|
|
|
|
|
2019-12-31 03:40:09 +01:00
|
|
|
func (c *Context) ParseFileList(rootDir string, filePaths []string,
|
|
|
|
config interface{}) (deps []string, errs []error) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
if len(filePaths) < 1 {
|
|
|
|
return nil, []error{fmt.Errorf("no paths provided to parse")}
|
|
|
|
}
|
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
c.dependenciesReady = false
|
|
|
|
|
2019-12-31 03:40:09 +01:00
|
|
|
type newModuleInfo struct {
|
|
|
|
*moduleInfo
|
2021-05-19 19:07:19 +02:00
|
|
|
deps []string
|
2019-12-31 03:40:09 +01:00
|
|
|
added chan<- struct{}
|
|
|
|
}
|
|
|
|
|
2023-02-21 18:10:27 +01:00
|
|
|
type newSkipInfo struct {
|
|
|
|
shouldVisitFileInfo
|
|
|
|
file string
|
|
|
|
}
|
|
|
|
|
2019-12-31 03:40:09 +01:00
|
|
|
moduleCh := make(chan newModuleInfo)
|
2015-07-01 01:05:22 +02:00
|
|
|
errsCh := make(chan []error)
|
|
|
|
doneCh := make(chan struct{})
|
2023-02-21 18:10:27 +01:00
|
|
|
skipCh := make(chan newSkipInfo)
|
2015-07-01 01:05:22 +02:00
|
|
|
var numErrs uint32
|
|
|
|
var numGoroutines int32
|
|
|
|
|
|
|
|
// handler must be reentrant
|
2017-08-08 23:43:58 +02:00
|
|
|
handleOneFile := func(file *parser.File) {
|
2015-07-01 01:05:22 +02:00
|
|
|
if atomic.LoadUint32(&numErrs) > maxErrors {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-12-31 03:40:09 +01:00
|
|
|
addedCh := make(chan struct{})
|
|
|
|
|
2019-12-31 03:38:20 +01:00
|
|
|
var scopedModuleFactories map[string]ModuleFactory
|
|
|
|
|
2019-12-31 03:40:09 +01:00
|
|
|
var addModule func(module *moduleInfo) []error
|
2020-05-13 10:06:17 +02:00
|
|
|
addModule = func(module *moduleInfo) []error {
|
2020-05-04 12:00:03 +02:00
|
|
|
// Run any load hooks immediately before it is sent to the moduleCh and is
|
|
|
|
// registered by name. This allows load hooks to set and/or modify any aspect
|
|
|
|
// of the module (including names) using information that is not available when
|
|
|
|
// the module factory is called.
|
2021-05-19 19:07:19 +02:00
|
|
|
newModules, newDeps, errs := runAndRemoveLoadHooks(c, config, module, &scopedModuleFactories)
|
2019-12-31 03:40:09 +01:00
|
|
|
if len(errs) > 0 {
|
|
|
|
return errs
|
|
|
|
}
|
2020-05-04 12:00:03 +02:00
|
|
|
|
2021-05-19 19:07:19 +02:00
|
|
|
moduleCh <- newModuleInfo{module, newDeps, addedCh}
|
2020-05-04 12:00:03 +02:00
|
|
|
<-addedCh
|
2019-12-31 03:40:09 +01:00
|
|
|
for _, n := range newModules {
|
|
|
|
errs = addModule(n)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2023-02-21 18:10:27 +01:00
|
|
|
shouldVisitInfo := shouldVisitFile(c, file)
|
|
|
|
errs := shouldVisitInfo.errs
|
2022-11-08 20:44:01 +01:00
|
|
|
if len(errs) > 0 {
|
|
|
|
atomic.AddUint32(&numErrs, uint32(len(errs)))
|
|
|
|
errsCh <- errs
|
|
|
|
}
|
2023-02-21 18:10:27 +01:00
|
|
|
if !shouldVisitInfo.shouldVisitFile {
|
|
|
|
skipCh <- newSkipInfo{
|
|
|
|
file: file.Name,
|
|
|
|
shouldVisitFileInfo: shouldVisitInfo,
|
|
|
|
}
|
2022-11-08 20:44:01 +01:00
|
|
|
// TODO: Write a file that lists the skipped bp files
|
|
|
|
return
|
|
|
|
}
|
2019-12-31 03:40:09 +01:00
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
for _, def := range file.Defs {
|
|
|
|
switch def := def.(type) {
|
|
|
|
case *parser.Module:
|
2020-05-13 10:06:17 +02:00
|
|
|
module, errs := processModuleDef(def, file.Name, c.moduleFactories, scopedModuleFactories, c.ignoreUnknownModuleTypes)
|
2019-12-31 03:40:09 +01:00
|
|
|
if len(errs) == 0 && module != nil {
|
|
|
|
errs = addModule(module)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(errs) > 0 {
|
|
|
|
atomic.AddUint32(&numErrs, uint32(len(errs)))
|
|
|
|
errsCh <- errs
|
|
|
|
}
|
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
case *parser.Assignment:
|
|
|
|
// Already handled via Scope object
|
|
|
|
default:
|
|
|
|
panic("unknown definition type")
|
|
|
|
}
|
2015-07-01 01:05:22 +02:00
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
}
|
2015-07-01 01:05:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
atomic.AddInt32(&numGoroutines, 1)
|
|
|
|
go func() {
|
|
|
|
var errs []error
|
2017-08-10 00:13:12 +02:00
|
|
|
deps, errs = c.WalkBlueprintsFiles(rootDir, filePaths, handleOneFile)
|
2015-07-01 01:05:22 +02:00
|
|
|
if len(errs) > 0 {
|
|
|
|
errsCh <- errs
|
|
|
|
}
|
|
|
|
doneCh <- struct{}{}
|
|
|
|
}()
|
|
|
|
|
2021-05-19 19:07:19 +02:00
|
|
|
var hookDeps []string
|
2015-07-01 01:05:22 +02:00
|
|
|
loop:
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case newErrs := <-errsCh:
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
case module := <-moduleCh:
|
2019-12-31 03:40:09 +01:00
|
|
|
newErrs := c.addModule(module.moduleInfo)
|
2021-05-19 19:07:19 +02:00
|
|
|
hookDeps = append(hookDeps, module.deps...)
|
2019-12-31 03:40:09 +01:00
|
|
|
if module.added != nil {
|
|
|
|
module.added <- struct{}{}
|
|
|
|
}
|
2015-07-01 01:05:22 +02:00
|
|
|
if len(newErrs) > 0 {
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
}
|
|
|
|
case <-doneCh:
|
|
|
|
n := atomic.AddInt32(&numGoroutines, -1)
|
|
|
|
if n == 0 {
|
|
|
|
break loop
|
|
|
|
}
|
2023-02-21 18:10:27 +01:00
|
|
|
case skipped := <-skipCh:
|
|
|
|
nctx := newNamespaceContextFromFilename(skipped.file)
|
|
|
|
for _, name := range skipped.skippedModules {
|
|
|
|
c.nameInterface.NewSkippedModule(nctx, name, SkippedModuleInfo{
|
|
|
|
filename: skipped.file,
|
|
|
|
reason: skipped.reasonForSkip,
|
|
|
|
})
|
|
|
|
}
|
2015-07-01 01:05:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-19 19:07:19 +02:00
|
|
|
deps = append(deps, hookDeps...)
|
2015-07-01 01:05:22 +02:00
|
|
|
return deps, errs
|
|
|
|
}
|
|
|
|
|
|
|
|
type FileHandler func(*parser.File)
|
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
// WalkBlueprintsFiles walks a set of Blueprints files starting with the given filepaths,
|
|
|
|
// calling the given file handler on each
|
|
|
|
//
|
|
|
|
// When WalkBlueprintsFiles encounters a Blueprints file with a set of subdirs listed,
|
|
|
|
// it recursively parses any Blueprints files found in those subdirectories.
|
|
|
|
//
|
|
|
|
// If any of the file paths is an ancestor directory of any other of file path, the ancestor
|
|
|
|
// will be parsed and visited first.
|
|
|
|
//
|
|
|
|
// the file handler will be called from a goroutine, so it must be reentrant.
|
2015-07-01 01:05:22 +02:00
|
|
|
//
|
|
|
|
// If no errors are encountered while parsing the files, the list of paths on
|
|
|
|
// which the future output will depend is returned. This list will include both
|
|
|
|
// Blueprints file paths as well as directory paths for cases where wildcard
|
|
|
|
// subdirs are found.
|
2017-11-30 03:37:31 +01:00
|
|
|
//
|
|
|
|
// visitor will be called asynchronously, and will only be called once visitor for each
|
|
|
|
// ancestor directory has completed.
|
|
|
|
//
|
|
|
|
// WalkBlueprintsFiles will not return until all calls to visitor have returned.
|
2017-08-10 00:13:12 +02:00
|
|
|
func (c *Context) WalkBlueprintsFiles(rootDir string, filePaths []string,
|
|
|
|
visitor FileHandler) (deps []string, errs []error) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
// make a mapping from ancestors to their descendants to facilitate parsing ancestors first
|
|
|
|
descendantsMap, err := findBlueprintDescendants(filePaths)
|
|
|
|
if err != nil {
|
|
|
|
panic(err.Error())
|
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
blueprintsSet := make(map[string]bool)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-12-05 23:57:58 +01:00
|
|
|
// Channels to receive data back from openAndParse goroutines
|
2017-11-30 03:37:31 +01:00
|
|
|
blueprintsCh := make(chan fileParseContext)
|
2015-01-08 01:22:45 +01:00
|
|
|
errsCh := make(chan []error)
|
|
|
|
depsCh := make(chan string)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-12-05 23:57:58 +01:00
|
|
|
// Channel to notify main loop that a openAndParse goroutine has finished
|
2017-11-30 03:37:31 +01:00
|
|
|
doneParsingCh := make(chan fileParseContext)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
// Number of outstanding goroutines to wait for
|
2017-08-08 23:43:58 +02:00
|
|
|
activeCount := 0
|
2017-11-30 03:37:31 +01:00
|
|
|
var pending []fileParseContext
|
2017-08-10 00:13:12 +02:00
|
|
|
tooManyErrors := false
|
|
|
|
|
|
|
|
// Limit concurrent calls to parseBlueprintFiles to 200
|
|
|
|
// Darwin has a default limit of 256 open files
|
|
|
|
maxActiveCount := 200
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
// count the number of pending calls to visitor()
|
|
|
|
visitorWaitGroup := sync.WaitGroup{}
|
|
|
|
|
|
|
|
startParseBlueprintsFile := func(blueprint fileParseContext) {
|
|
|
|
if blueprintsSet[blueprint.fileName] {
|
2017-05-16 19:33:58 +02:00
|
|
|
return
|
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
blueprintsSet[blueprint.fileName] = true
|
2017-08-08 23:43:58 +02:00
|
|
|
activeCount++
|
2017-11-30 03:37:31 +01:00
|
|
|
deps = append(deps, blueprint.fileName)
|
|
|
|
visitorWaitGroup.Add(1)
|
2015-01-08 01:22:45 +01:00
|
|
|
go func() {
|
2017-12-06 00:03:51 +01:00
|
|
|
file, blueprints, deps, errs := c.openAndParse(blueprint.fileName, blueprint.Scope, rootDir,
|
|
|
|
&blueprint)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
errsCh <- errs
|
|
|
|
}
|
|
|
|
for _, blueprint := range blueprints {
|
|
|
|
blueprintsCh <- blueprint
|
|
|
|
}
|
|
|
|
for _, dep := range deps {
|
|
|
|
depsCh <- dep
|
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
doneParsingCh <- blueprint
|
|
|
|
|
|
|
|
if blueprint.parent != nil && blueprint.parent.doneVisiting != nil {
|
|
|
|
// wait for visitor() of parent to complete
|
|
|
|
<-blueprint.parent.doneVisiting
|
|
|
|
}
|
2017-08-08 23:43:58 +02:00
|
|
|
|
2017-12-06 00:11:55 +01:00
|
|
|
if len(errs) == 0 {
|
|
|
|
// process this file
|
|
|
|
visitor(file)
|
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
if blueprint.doneVisiting != nil {
|
|
|
|
close(blueprint.doneVisiting)
|
|
|
|
}
|
|
|
|
visitorWaitGroup.Done()
|
2015-01-08 01:22:45 +01:00
|
|
|
}()
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
foundParseableBlueprint := func(blueprint fileParseContext) {
|
2017-08-10 00:13:12 +02:00
|
|
|
if activeCount >= maxActiveCount {
|
|
|
|
pending = append(pending, blueprint)
|
|
|
|
} else {
|
|
|
|
startParseBlueprintsFile(blueprint)
|
|
|
|
}
|
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
startParseDescendants := func(blueprint fileParseContext) {
|
|
|
|
descendants, hasDescendants := descendantsMap[blueprint.fileName]
|
2017-08-10 00:13:12 +02:00
|
|
|
if hasDescendants {
|
|
|
|
for _, descendant := range descendants {
|
2017-11-30 03:37:31 +01:00
|
|
|
foundParseableBlueprint(fileParseContext{descendant, parser.NewScope(blueprint.Scope), &blueprint, make(chan struct{})})
|
2017-08-10 00:13:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-05-16 19:33:58 +02:00
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
// begin parsing any files that have no ancestors
|
2017-11-30 03:37:31 +01:00
|
|
|
startParseDescendants(fileParseContext{"", parser.NewScope(nil), nil, nil})
|
2015-01-08 01:22:45 +01:00
|
|
|
|
|
|
|
loop:
|
|
|
|
for {
|
|
|
|
if len(errs) > maxErrors {
|
|
|
|
tooManyErrors = true
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
select {
|
|
|
|
case newErrs := <-errsCh:
|
2014-05-28 01:34:41 +02:00
|
|
|
errs = append(errs, newErrs...)
|
2015-01-08 01:22:45 +01:00
|
|
|
case dep := <-depsCh:
|
|
|
|
deps = append(deps, dep)
|
|
|
|
case blueprint := <-blueprintsCh:
|
|
|
|
if tooManyErrors {
|
|
|
|
continue
|
|
|
|
}
|
2017-08-10 00:13:12 +02:00
|
|
|
foundParseableBlueprint(blueprint)
|
2017-11-30 03:37:31 +01:00
|
|
|
case blueprint := <-doneParsingCh:
|
2017-08-08 23:43:58 +02:00
|
|
|
activeCount--
|
2017-08-10 00:13:12 +02:00
|
|
|
if !tooManyErrors {
|
|
|
|
startParseDescendants(blueprint)
|
|
|
|
}
|
|
|
|
if activeCount < maxActiveCount && len(pending) > 0 {
|
|
|
|
// start to process the next one from the queue
|
|
|
|
next := pending[len(pending)-1]
|
2017-05-16 19:33:58 +02:00
|
|
|
pending = pending[:len(pending)-1]
|
2017-08-10 00:13:12 +02:00
|
|
|
startParseBlueprintsFile(next)
|
2017-05-16 19:33:58 +02:00
|
|
|
}
|
2017-08-08 23:43:58 +02:00
|
|
|
if activeCount == 0 {
|
2015-01-08 01:22:45 +01:00
|
|
|
break loop
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2017-08-10 00:13:12 +02:00
|
|
|
sort.Strings(deps)
|
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
// wait for every visitor() to complete
|
|
|
|
visitorWaitGroup.Wait()
|
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2016-06-03 00:30:20 +02:00
|
|
|
// MockFileSystem causes the Context to replace all reads with accesses to the provided map of
|
|
|
|
// filenames to contents stored as a byte slice.
|
|
|
|
func (c *Context) MockFileSystem(files map[string][]byte) {
|
2017-11-15 23:49:48 +01:00
|
|
|
// look for a module list file
|
|
|
|
_, ok := files[MockModuleListFile]
|
|
|
|
if !ok {
|
|
|
|
// no module list file specified; find every file named Blueprints
|
|
|
|
pathsToParse := []string{}
|
|
|
|
for candidate := range files {
|
2021-09-02 11:34:06 +02:00
|
|
|
if filepath.Base(candidate) == "Android.bp" {
|
2017-11-15 23:49:48 +01:00
|
|
|
pathsToParse = append(pathsToParse, candidate)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(pathsToParse) < 1 {
|
|
|
|
panic(fmt.Sprintf("No Blueprints files found in mock filesystem: %v\n", files))
|
2017-08-10 00:13:12 +02:00
|
|
|
}
|
2017-11-15 23:49:48 +01:00
|
|
|
// put the list of Blueprints files into a list file
|
|
|
|
files[MockModuleListFile] = []byte(strings.Join(pathsToParse, "\n"))
|
2017-08-10 00:13:12 +02:00
|
|
|
}
|
2017-11-15 23:49:48 +01:00
|
|
|
c.SetModuleListFile(MockModuleListFile)
|
2017-08-10 00:13:12 +02:00
|
|
|
|
|
|
|
// mock the filesystem
|
2017-02-01 22:21:35 +01:00
|
|
|
c.fs = pathtools.MockFs(files)
|
2016-06-03 00:30:20 +02:00
|
|
|
}
|
|
|
|
|
2019-12-17 22:11:21 +01:00
|
|
|
func (c *Context) SetFs(fs pathtools.FileSystem) {
|
|
|
|
c.fs = fs
|
|
|
|
}
|
|
|
|
|
2017-12-06 00:03:51 +01:00
|
|
|
// openAndParse opens and parses a single Blueprints file, and returns the results
|
2017-12-05 23:57:58 +01:00
|
|
|
func (c *Context) openAndParse(filename string, scope *parser.Scope, rootDir string,
|
2017-12-06 00:03:51 +01:00
|
|
|
parent *fileParseContext) (file *parser.File,
|
|
|
|
subBlueprints []fileParseContext, deps []string, errs []error) {
|
2015-01-08 01:22:45 +01:00
|
|
|
|
2016-06-03 00:30:20 +02:00
|
|
|
f, err := c.fs.Open(filename)
|
2015-01-08 01:22:45 +01:00
|
|
|
if err != nil {
|
2017-08-24 02:30:05 +02:00
|
|
|
// couldn't open the file; see if we can provide a clearer error than "could not open file"
|
|
|
|
stats, statErr := c.fs.Lstat(filename)
|
|
|
|
if statErr == nil {
|
|
|
|
isSymlink := stats.Mode()&os.ModeSymlink != 0
|
|
|
|
if isSymlink {
|
|
|
|
err = fmt.Errorf("could not open symlink %v : %v", filename, err)
|
|
|
|
target, readlinkErr := os.Readlink(filename)
|
|
|
|
if readlinkErr == nil {
|
|
|
|
_, targetStatsErr := c.fs.Lstat(target)
|
|
|
|
if targetStatsErr != nil {
|
|
|
|
err = fmt.Errorf("could not open symlink %v; its target (%v) cannot be opened", filename, target)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
err = fmt.Errorf("%v exists but could not be opened: %v", filename, err)
|
|
|
|
}
|
|
|
|
}
|
2017-12-06 00:03:51 +01:00
|
|
|
return nil, nil, nil, []error{err}
|
2015-01-08 01:22:45 +01:00
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
|
2017-12-06 00:03:51 +01:00
|
|
|
func() {
|
|
|
|
defer func() {
|
|
|
|
err = f.Close()
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, err)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
file, subBlueprints, errs = c.parseOne(rootDir, filename, f, scope, parent)
|
2015-07-01 01:05:22 +02:00
|
|
|
}()
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
if len(errs) > 0 {
|
2017-12-06 00:03:51 +01:00
|
|
|
return nil, nil, nil, errs
|
2015-01-08 01:22:45 +01:00
|
|
|
}
|
|
|
|
|
2015-04-21 01:50:54 +02:00
|
|
|
for _, b := range subBlueprints {
|
2017-12-06 00:03:51 +01:00
|
|
|
deps = append(deps, b.fileName)
|
2015-04-21 01:50:54 +02:00
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
|
2017-12-06 00:03:51 +01:00
|
|
|
return file, subBlueprints, deps, nil
|
2015-04-21 01:50:54 +02:00
|
|
|
}
|
|
|
|
|
2017-08-08 23:45:56 +02:00
|
|
|
// parseOne parses a single Blueprints file from the given reader, creating Module
|
|
|
|
// objects for each of the module definitions encountered. If the Blueprints
|
|
|
|
// file contains an assignment to the "subdirs" variable, then the
|
|
|
|
// subdirectories listed are searched for Blueprints files returned in the
|
|
|
|
// subBlueprints return value. If the Blueprints file contains an assignment
|
|
|
|
// to the "build" variable, then the file listed are returned in the
|
|
|
|
// subBlueprints return value.
|
|
|
|
//
|
|
|
|
// rootDir specifies the path to the root directory of the source tree, while
|
|
|
|
// filename specifies the path to the Blueprints file. These paths are used for
|
|
|
|
// error reporting and for determining the module's directory.
|
|
|
|
func (c *Context) parseOne(rootDir, filename string, reader io.Reader,
|
2017-11-30 03:37:31 +01:00
|
|
|
scope *parser.Scope, parent *fileParseContext) (file *parser.File, subBlueprints []fileParseContext, errs []error) {
|
2017-08-08 23:45:56 +02:00
|
|
|
|
|
|
|
relBlueprintsFile, err := filepath.Rel(rootDir, filename)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, []error{err}
|
|
|
|
}
|
|
|
|
|
|
|
|
scope.Remove("subdirs")
|
|
|
|
scope.Remove("optional_subdirs")
|
|
|
|
scope.Remove("build")
|
|
|
|
file, errs = parser.ParseAndEval(filename, reader, scope)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
for i, err := range errs {
|
|
|
|
if parseErr, ok := err.(*parser.ParseError); ok {
|
|
|
|
err = &BlueprintError{
|
|
|
|
Err: parseErr.Err,
|
|
|
|
Pos: parseErr.Pos,
|
|
|
|
}
|
|
|
|
errs[i] = err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If there were any parse errors don't bother trying to interpret the
|
|
|
|
// result.
|
|
|
|
return nil, nil, errs
|
|
|
|
}
|
|
|
|
file.Name = relBlueprintsFile
|
|
|
|
|
|
|
|
build, buildPos, err := getLocalStringListFromScope(scope, "build")
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, err)
|
|
|
|
}
|
2017-12-01 02:31:43 +01:00
|
|
|
for _, buildEntry := range build {
|
|
|
|
if strings.Contains(buildEntry, "/") {
|
|
|
|
errs = append(errs, &BlueprintError{
|
|
|
|
Err: fmt.Errorf("illegal value %v. The '/' character is not permitted", buildEntry),
|
|
|
|
Pos: buildPos,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2017-08-08 23:45:56 +02:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var blueprints []string
|
|
|
|
|
|
|
|
newBlueprints, newErrs := c.findBuildBlueprints(filepath.Dir(filename), build, buildPos)
|
|
|
|
blueprints = append(blueprints, newBlueprints...)
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
subBlueprintsAndScope := make([]fileParseContext, len(blueprints))
|
2017-08-08 23:45:56 +02:00
|
|
|
for i, b := range blueprints {
|
2017-11-30 03:37:31 +01:00
|
|
|
subBlueprintsAndScope[i] = fileParseContext{b, parser.NewScope(scope), parent, make(chan struct{})}
|
2017-08-08 23:45:56 +02:00
|
|
|
}
|
|
|
|
return file, subBlueprintsAndScope, errs
|
|
|
|
}
|
|
|
|
|
2015-12-16 22:03:41 +01:00
|
|
|
func (c *Context) findBuildBlueprints(dir string, build []string,
|
2016-11-01 19:10:51 +01:00
|
|
|
buildPos scanner.Position) ([]string, []error) {
|
|
|
|
|
|
|
|
var blueprints []string
|
|
|
|
var errs []error
|
2015-12-16 22:03:41 +01:00
|
|
|
|
|
|
|
for _, file := range build {
|
2016-11-01 19:10:51 +01:00
|
|
|
pattern := filepath.Join(dir, file)
|
|
|
|
var matches []string
|
|
|
|
var err error
|
|
|
|
|
2016-11-15 00:23:33 +01:00
|
|
|
matches, err = c.glob(pattern, nil)
|
2016-11-01 19:10:51 +01:00
|
|
|
|
2015-12-16 22:03:41 +01:00
|
|
|
if err != nil {
|
2016-10-08 02:13:10 +02:00
|
|
|
errs = append(errs, &BlueprintError{
|
2016-11-01 19:10:51 +01:00
|
|
|
Err: fmt.Errorf("%q: %s", pattern, err.Error()),
|
2015-12-16 22:03:41 +01:00
|
|
|
Pos: buildPos,
|
|
|
|
})
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(matches) == 0 {
|
2016-10-08 02:13:10 +02:00
|
|
|
errs = append(errs, &BlueprintError{
|
2016-11-01 19:10:51 +01:00
|
|
|
Err: fmt.Errorf("%q: not found", pattern),
|
2015-12-16 22:03:41 +01:00
|
|
|
Pos: buildPos,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, foundBlueprints := range matches {
|
2018-02-23 23:49:45 +01:00
|
|
|
if strings.HasSuffix(foundBlueprints, "/") {
|
|
|
|
errs = append(errs, &BlueprintError{
|
|
|
|
Err: fmt.Errorf("%q: is a directory", foundBlueprints),
|
|
|
|
Pos: buildPos,
|
|
|
|
})
|
|
|
|
}
|
2015-12-16 22:03:41 +01:00
|
|
|
blueprints = append(blueprints, foundBlueprints)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-01 19:10:51 +01:00
|
|
|
return blueprints, errs
|
2015-12-16 22:03:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) findSubdirBlueprints(dir string, subdirs []string, subdirsPos scanner.Position,
|
2016-11-01 19:10:51 +01:00
|
|
|
subBlueprintsName string, optional bool) ([]string, []error) {
|
|
|
|
|
|
|
|
var blueprints []string
|
|
|
|
var errs []error
|
2015-01-08 01:22:45 +01:00
|
|
|
|
|
|
|
for _, subdir := range subdirs {
|
2016-11-01 19:10:51 +01:00
|
|
|
pattern := filepath.Join(dir, subdir, subBlueprintsName)
|
|
|
|
var matches []string
|
|
|
|
var err error
|
|
|
|
|
2016-11-15 00:23:33 +01:00
|
|
|
matches, err = c.glob(pattern, nil)
|
2016-11-01 19:10:51 +01:00
|
|
|
|
2015-04-01 05:39:02 +02:00
|
|
|
if err != nil {
|
2016-10-08 02:13:10 +02:00
|
|
|
errs = append(errs, &BlueprintError{
|
2016-11-01 19:10:51 +01:00
|
|
|
Err: fmt.Errorf("%q: %s", pattern, err.Error()),
|
2015-04-21 01:50:54 +02:00
|
|
|
Pos: subdirsPos,
|
|
|
|
})
|
|
|
|
continue
|
2015-04-01 05:39:02 +02:00
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
|
2015-12-16 22:03:41 +01:00
|
|
|
if len(matches) == 0 && !optional {
|
2016-10-08 02:13:10 +02:00
|
|
|
errs = append(errs, &BlueprintError{
|
2016-11-01 19:10:51 +01:00
|
|
|
Err: fmt.Errorf("%q: not found", pattern),
|
2015-04-21 01:50:54 +02:00
|
|
|
Pos: subdirsPos,
|
|
|
|
})
|
2015-04-01 05:39:02 +02:00
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
|
2016-11-01 19:10:51 +01:00
|
|
|
for _, subBlueprints := range matches {
|
2018-02-23 23:49:45 +01:00
|
|
|
if strings.HasSuffix(subBlueprints, "/") {
|
|
|
|
errs = append(errs, &BlueprintError{
|
|
|
|
Err: fmt.Errorf("%q: is a directory", subBlueprints),
|
|
|
|
Pos: subdirsPos,
|
|
|
|
})
|
|
|
|
}
|
2016-11-01 19:10:51 +01:00
|
|
|
blueprints = append(blueprints, subBlueprints)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-01 19:10:51 +01:00
|
|
|
return blueprints, errs
|
2015-04-21 01:50:54 +02:00
|
|
|
}
|
|
|
|
|
2015-07-11 02:51:55 +02:00
|
|
|
func getLocalStringListFromScope(scope *parser.Scope, v string) ([]string, scanner.Position, error) {
|
|
|
|
if assignment, local := scope.Get(v); assignment == nil || !local {
|
|
|
|
return nil, scanner.Position{}, nil
|
|
|
|
} else {
|
2016-06-07 21:28:16 +02:00
|
|
|
switch value := assignment.Value.Eval().(type) {
|
|
|
|
case *parser.List:
|
|
|
|
ret := make([]string, 0, len(value.Values))
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2016-06-07 21:28:16 +02:00
|
|
|
for _, listValue := range value.Values {
|
|
|
|
s, ok := listValue.(*parser.String)
|
|
|
|
if !ok {
|
2014-05-28 01:34:41 +02:00
|
|
|
// The parser should not produce this.
|
|
|
|
panic("non-string value found in list")
|
|
|
|
}
|
|
|
|
|
2016-06-07 21:28:16 +02:00
|
|
|
ret = append(ret, s.Value)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2016-06-10 02:03:57 +02:00
|
|
|
return ret, assignment.EqualsPos, nil
|
2016-06-07 21:28:16 +02:00
|
|
|
case *parser.Bool, *parser.String:
|
2016-10-08 02:13:10 +02:00
|
|
|
return nil, scanner.Position{}, &BlueprintError{
|
2015-04-21 01:50:54 +02:00
|
|
|
Err: fmt.Errorf("%q must be a list of strings", v),
|
2016-06-10 02:03:57 +02:00
|
|
|
Pos: assignment.EqualsPos,
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
default:
|
2018-02-27 10:38:08 +01:00
|
|
|
panic(fmt.Errorf("unknown value type: %d", assignment.Value.Type()))
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-04-27 22:18:21 +02:00
|
|
|
func getStringFromScope(scope *parser.Scope, v string) (string, scanner.Position, error) {
|
2015-07-11 02:51:55 +02:00
|
|
|
if assignment, _ := scope.Get(v); assignment == nil {
|
|
|
|
return "", scanner.Position{}, nil
|
|
|
|
} else {
|
2016-06-07 21:28:16 +02:00
|
|
|
switch value := assignment.Value.Eval().(type) {
|
|
|
|
case *parser.String:
|
2016-06-10 02:03:57 +02:00
|
|
|
return value.Value, assignment.EqualsPos, nil
|
2016-06-07 21:28:16 +02:00
|
|
|
case *parser.Bool, *parser.List:
|
2016-10-08 02:13:10 +02:00
|
|
|
return "", scanner.Position{}, &BlueprintError{
|
2015-04-27 22:18:21 +02:00
|
|
|
Err: fmt.Errorf("%q must be a string", v),
|
2016-06-10 02:03:57 +02:00
|
|
|
Pos: assignment.EqualsPos,
|
2015-04-27 22:18:21 +02:00
|
|
|
}
|
|
|
|
default:
|
2018-02-27 10:38:08 +01:00
|
|
|
panic(fmt.Errorf("unknown value type: %d", assignment.Value.Type()))
|
2015-04-27 22:18:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-12 00:41:52 +02:00
|
|
|
// Clones a build logic module by calling the factory method for its module type, and then cloning
|
|
|
|
// property values. Any values stored in the module object that are not stored in properties
|
|
|
|
// structs will be lost.
|
|
|
|
func (c *Context) cloneLogicModule(origModule *moduleInfo) (Module, []interface{}) {
|
2017-07-28 23:32:36 +02:00
|
|
|
newLogicModule, newProperties := origModule.factory()
|
2016-04-12 00:41:52 +02:00
|
|
|
|
2017-07-28 23:31:03 +02:00
|
|
|
if len(newProperties) != len(origModule.properties) {
|
2016-05-17 23:58:05 +02:00
|
|
|
panic("mismatched properties array length in " + origModule.Name())
|
2016-04-12 00:41:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for i := range newProperties {
|
2020-01-28 01:14:31 +01:00
|
|
|
dst := reflect.ValueOf(newProperties[i])
|
|
|
|
src := reflect.ValueOf(origModule.properties[i])
|
2016-04-12 00:41:52 +02:00
|
|
|
|
|
|
|
proptools.CopyProperties(dst, src)
|
|
|
|
}
|
|
|
|
|
|
|
|
return newLogicModule, newProperties
|
|
|
|
}
|
|
|
|
|
2020-08-13 21:07:30 +02:00
|
|
|
func newVariant(module *moduleInfo, mutatorName string, variationName string,
|
|
|
|
local bool) variant {
|
|
|
|
|
|
|
|
newVariantName := module.variant.name
|
|
|
|
if variationName != "" {
|
|
|
|
if newVariantName == "" {
|
|
|
|
newVariantName = variationName
|
|
|
|
} else {
|
|
|
|
newVariantName += "_" + variationName
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
newVariations := module.variant.variations.clone()
|
|
|
|
if newVariations == nil {
|
|
|
|
newVariations = make(variationMap)
|
|
|
|
}
|
|
|
|
newVariations[mutatorName] = variationName
|
|
|
|
|
|
|
|
newDependencyVariations := module.variant.dependencyVariations.clone()
|
|
|
|
if !local {
|
|
|
|
if newDependencyVariations == nil {
|
|
|
|
newDependencyVariations = make(variationMap)
|
|
|
|
}
|
|
|
|
newDependencyVariations[mutatorName] = variationName
|
|
|
|
}
|
|
|
|
|
|
|
|
return variant{newVariantName, newVariations, newDependencyVariations}
|
|
|
|
}
|
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
func (c *Context) createVariations(origModule *moduleInfo, mutatorName string,
|
2022-06-13 20:44:57 +02:00
|
|
|
depChooser depChooser, variationNames []string, local bool) (modulesOrAliases, []error) {
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2015-03-19 01:43:15 +01:00
|
|
|
if len(variationNames) == 0 {
|
|
|
|
panic(fmt.Errorf("mutator %q passed zero-length variation list for module %q",
|
2016-05-17 23:58:05 +02:00
|
|
|
mutatorName, origModule.Name()))
|
2015-03-19 01:43:15 +01:00
|
|
|
}
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
var newModules modulesOrAliases
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2015-03-04 02:37:03 +01:00
|
|
|
var errs []error
|
|
|
|
|
2015-03-14 00:02:36 +01:00
|
|
|
for i, variationName := range variationNames {
|
2014-12-19 01:28:54 +01:00
|
|
|
var newLogicModule Module
|
|
|
|
var newProperties []interface{}
|
|
|
|
|
|
|
|
if i == 0 {
|
|
|
|
// Reuse the existing module for the first new variant
|
2015-03-16 18:57:54 +01:00
|
|
|
// This both saves creating a new module, and causes the insertion in c.moduleInfo below
|
|
|
|
// with logicModule as the key to replace the original entry in c.moduleInfo
|
2017-07-28 23:31:03 +02:00
|
|
|
newLogicModule, newProperties = origModule.logicModule, origModule.properties
|
2014-12-19 01:28:54 +01:00
|
|
|
} else {
|
2016-04-12 00:41:52 +02:00
|
|
|
newLogicModule, newProperties = c.cloneLogicModule(origModule)
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2015-03-11 08:57:25 +01:00
|
|
|
m := *origModule
|
|
|
|
newModule := &m
|
2020-07-02 19:08:12 +02:00
|
|
|
newModule.directDeps = append([]depInfo(nil), origModule.directDeps...)
|
2021-01-22 07:39:28 +01:00
|
|
|
newModule.reverseDeps = nil
|
|
|
|
newModule.forwardDeps = nil
|
2015-03-11 08:57:25 +01:00
|
|
|
newModule.logicModule = newLogicModule
|
2020-08-13 21:07:30 +02:00
|
|
|
newModule.variant = newVariant(origModule, mutatorName, variationName, local)
|
2017-07-28 23:31:03 +02:00
|
|
|
newModule.properties = newProperties
|
2020-07-02 19:08:12 +02:00
|
|
|
newModule.providers = append([]interface{}(nil), origModule.providers...)
|
2014-12-19 01:28:54 +01:00
|
|
|
|
|
|
|
newModules = append(newModules, newModule)
|
2015-03-16 18:57:54 +01:00
|
|
|
|
2022-06-13 20:44:57 +02:00
|
|
|
newErrs := c.convertDepsToVariation(newModule, depChooser)
|
2015-03-04 02:37:03 +01:00
|
|
|
if len(newErrs) > 0 {
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Mark original variant as invalid. Modules that depend on this module will still
|
|
|
|
// depend on origModule, but we'll fix it when the mutator is called on them.
|
|
|
|
origModule.logicModule = nil
|
|
|
|
origModule.splitModules = newModules
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
|
|
|
|
2015-03-04 02:37:03 +01:00
|
|
|
return newModules, errs
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2022-06-13 20:44:57 +02:00
|
|
|
type depChooser func(source *moduleInfo, dep depInfo) (*moduleInfo, string)
|
|
|
|
|
|
|
|
// This function is called for every dependency edge to determine which
|
|
|
|
// variation of the dependency is needed. Its inputs are the depending module,
|
|
|
|
// its variation, the dependency and the dependency tag.
|
|
|
|
type Transition func(source Module, sourceVariation string, dep Module, depTag DependencyTag) string
|
|
|
|
|
|
|
|
func chooseDepByTransition(mutatorName string, transition Transition) depChooser {
|
|
|
|
return func(source *moduleInfo, dep depInfo) (*moduleInfo, string) {
|
|
|
|
sourceVariation := source.variant.variations[mutatorName]
|
|
|
|
depLogicModule := dep.module.logicModule
|
|
|
|
if depLogicModule == nil {
|
|
|
|
// This is really a lie because the original dependency before the split
|
|
|
|
// went away when it was split. We choose an arbitrary split module
|
|
|
|
// instead and hope that whatever information the transition wants from it
|
|
|
|
// is the same as in the original one
|
|
|
|
// TODO(lberki): this can be fixed by calling transition() once and saving
|
|
|
|
// its results somewhere
|
|
|
|
depLogicModule = dep.module.splitModules[0].moduleOrAliasTarget().logicModule
|
|
|
|
}
|
2015-03-04 02:37:03 +01:00
|
|
|
|
2022-06-13 20:44:57 +02:00
|
|
|
desiredVariation := transition(source.logicModule, sourceVariation, depLogicModule, dep.tag)
|
|
|
|
for _, m := range dep.module.splitModules {
|
|
|
|
if m.moduleOrAliasVariant().variations[mutatorName] == desiredVariation {
|
|
|
|
return m.moduleOrAliasTarget(), ""
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
2022-06-13 20:44:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil, desiredVariation
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func chooseDep(candidates modulesOrAliases, mutatorName, variationName string, defaultVariationName *string) (*moduleInfo, string) {
|
|
|
|
for _, m := range candidates {
|
|
|
|
if m.moduleOrAliasVariant().variations[mutatorName] == variationName {
|
|
|
|
return m.moduleOrAliasTarget(), ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if defaultVariationName != nil {
|
|
|
|
// give it a second chance; match with defaultVariationName
|
|
|
|
for _, m := range candidates {
|
|
|
|
if m.moduleOrAliasVariant().variations[mutatorName] == *defaultVariationName {
|
|
|
|
return m.moduleOrAliasTarget(), ""
|
2019-07-29 12:59:15 +02:00
|
|
|
}
|
2022-06-13 20:44:57 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, variationName
|
|
|
|
}
|
|
|
|
|
|
|
|
func chooseDepExplicit(mutatorName string,
|
|
|
|
variationName string, defaultVariationName *string) depChooser {
|
|
|
|
return func(source *moduleInfo, dep depInfo) (*moduleInfo, string) {
|
|
|
|
return chooseDep(dep.module.splitModules, mutatorName, variationName, defaultVariationName)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func chooseDepInherit(mutatorName string, defaultVariationName *string) depChooser {
|
|
|
|
return func(source *moduleInfo, dep depInfo) (*moduleInfo, string) {
|
|
|
|
sourceVariation := source.variant.variations[mutatorName]
|
|
|
|
return chooseDep(dep.module.splitModules, mutatorName, sourceVariation, defaultVariationName)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) convertDepsToVariation(module *moduleInfo, depChooser depChooser) (errs []error) {
|
|
|
|
for i, dep := range module.directDeps {
|
|
|
|
if dep.module.logicModule == nil {
|
|
|
|
newDep, missingVariation := depChooser(module, dep)
|
2014-12-19 01:28:54 +01:00
|
|
|
if newDep == nil {
|
2016-10-08 02:13:10 +02:00
|
|
|
errs = append(errs, &BlueprintError{
|
2015-03-14 00:02:36 +01:00
|
|
|
Err: fmt.Errorf("failed to find variation %q for module %q needed by %q",
|
2022-06-13 20:44:57 +02:00
|
|
|
missingVariation, dep.module.Name(), module.Name()),
|
2015-03-11 08:57:25 +01:00
|
|
|
Pos: module.pos,
|
2015-03-04 02:37:03 +01:00
|
|
|
})
|
|
|
|
continue
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
2016-04-12 00:47:28 +02:00
|
|
|
module.directDeps[i].module = newDep
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
}
|
2015-03-04 02:37:03 +01:00
|
|
|
|
|
|
|
return errs
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2020-08-13 21:07:30 +02:00
|
|
|
func (c *Context) prettyPrintVariant(variations variationMap) string {
|
|
|
|
names := make([]string, 0, len(variations))
|
2015-03-11 04:08:19 +01:00
|
|
|
for _, m := range c.variantMutatorNames {
|
2020-08-13 21:07:30 +02:00
|
|
|
if v, ok := variations[m]; ok {
|
2015-03-11 04:08:19 +01:00
|
|
|
names = append(names, m+":"+v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-16 21:15:36 +01:00
|
|
|
return strings.Join(names, ",")
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
func (c *Context) prettyPrintGroupVariants(group *moduleGroup) string {
|
|
|
|
var variants []string
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range group.modules {
|
|
|
|
if mod := moduleOrAlias.module(); mod != nil {
|
|
|
|
variants = append(variants, c.prettyPrintVariant(mod.variant.variations))
|
|
|
|
} else if alias := moduleOrAlias.alias(); alias != nil {
|
|
|
|
variants = append(variants, c.prettyPrintVariant(alias.variant.variations)+
|
2020-11-16 21:15:36 +01:00
|
|
|
" (alias to "+c.prettyPrintVariant(alias.target.variant.variations)+")")
|
2020-08-25 01:18:21 +02:00
|
|
|
}
|
2019-11-14 05:11:14 +01:00
|
|
|
}
|
2019-11-14 05:10:12 +01:00
|
|
|
return strings.Join(variants, "\n ")
|
|
|
|
}
|
|
|
|
|
2020-05-13 10:06:17 +02:00
|
|
|
func newModule(factory ModuleFactory) *moduleInfo {
|
2017-07-28 23:32:36 +02:00
|
|
|
logicModule, properties := factory()
|
|
|
|
|
2022-01-11 04:46:23 +01:00
|
|
|
return &moduleInfo{
|
2017-07-28 23:32:36 +02:00
|
|
|
logicModule: logicModule,
|
|
|
|
factory: factory,
|
2022-01-11 04:46:23 +01:00
|
|
|
properties: properties,
|
2017-07-28 23:32:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-13 10:06:17 +02:00
|
|
|
func processModuleDef(moduleDef *parser.Module,
|
|
|
|
relBlueprintsFile string, moduleFactories, scopedModuleFactories map[string]ModuleFactory, ignoreUnknownModuleTypes bool) (*moduleInfo, []error) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-05-13 10:06:17 +02:00
|
|
|
factory, ok := moduleFactories[moduleDef.Type]
|
2019-12-31 03:38:20 +01:00
|
|
|
if !ok && scopedModuleFactories != nil {
|
|
|
|
factory, ok = scopedModuleFactories[moduleDef.Type]
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
if !ok {
|
2020-05-13 10:06:17 +02:00
|
|
|
if ignoreUnknownModuleTypes {
|
2015-01-08 01:22:45 +01:00
|
|
|
return nil, nil
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
return nil, []error{
|
2016-10-08 02:13:10 +02:00
|
|
|
&BlueprintError{
|
2016-06-10 00:52:30 +02:00
|
|
|
Err: fmt.Errorf("unrecognized module type %q", moduleDef.Type),
|
|
|
|
Pos: moduleDef.TypePos,
|
2014-06-23 02:02:55 +02:00
|
|
|
},
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2020-05-13 10:06:17 +02:00
|
|
|
module := newModule(factory)
|
2017-07-28 23:32:36 +02:00
|
|
|
module.typeName = moduleDef.Type
|
2015-03-11 08:57:25 +01:00
|
|
|
|
2017-07-28 23:32:36 +02:00
|
|
|
module.relBlueprintsFile = relBlueprintsFile
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-01-02 18:37:49 +01:00
|
|
|
propertyMap, errs := proptools.UnpackProperties(moduleDef.Properties, module.properties...)
|
2014-05-28 01:34:41 +02:00
|
|
|
if len(errs) > 0 {
|
2020-01-02 18:37:49 +01:00
|
|
|
for i, err := range errs {
|
|
|
|
if unpackErr, ok := err.(*proptools.UnpackError); ok {
|
|
|
|
err = &BlueprintError{
|
|
|
|
Err: unpackErr.Err,
|
|
|
|
Pos: unpackErr.Pos,
|
|
|
|
}
|
|
|
|
errs[i] = err
|
|
|
|
}
|
|
|
|
}
|
2015-01-08 01:22:45 +01:00
|
|
|
return nil, errs
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2016-06-10 00:52:30 +02:00
|
|
|
module.pos = moduleDef.TypePos
|
2015-03-11 08:57:25 +01:00
|
|
|
module.propertyPos = make(map[string]scanner.Position)
|
2014-09-30 20:38:25 +02:00
|
|
|
for name, propertyDef := range propertyMap {
|
2016-06-10 02:03:57 +02:00
|
|
|
module.propertyPos[name] = propertyDef.ColonPos
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-01-08 01:22:45 +01:00
|
|
|
return module, nil
|
|
|
|
}
|
2014-12-18 01:12:41 +01:00
|
|
|
|
2015-07-01 01:05:22 +02:00
|
|
|
func (c *Context) addModule(module *moduleInfo) []error {
|
2016-05-17 23:58:05 +02:00
|
|
|
name := module.logicModule.Name()
|
2018-10-11 22:01:05 +02:00
|
|
|
if name == "" {
|
|
|
|
return []error{
|
|
|
|
&BlueprintError{
|
|
|
|
Err: fmt.Errorf("property 'name' is missing from a module"),
|
|
|
|
Pos: module.pos,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
2015-07-01 01:05:22 +02:00
|
|
|
c.moduleInfo[module.logicModule] = module
|
2015-03-11 08:57:25 +01:00
|
|
|
|
2016-05-17 23:58:05 +02:00
|
|
|
group := &moduleGroup{
|
2017-12-02 02:10:52 +01:00
|
|
|
name: name,
|
2020-08-25 01:18:21 +02:00
|
|
|
modules: modulesOrAliases{module},
|
2015-01-08 01:22:45 +01:00
|
|
|
}
|
2016-05-17 23:58:05 +02:00
|
|
|
module.group = group
|
2017-11-11 00:12:08 +01:00
|
|
|
namespace, errs := c.nameInterface.NewModule(
|
2017-12-02 02:10:52 +01:00
|
|
|
newNamespaceContext(module),
|
2017-11-11 00:12:08 +01:00
|
|
|
ModuleGroup{moduleGroup: group},
|
|
|
|
module.logicModule)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
for i := range errs {
|
|
|
|
errs[i] = &BlueprintError{Err: errs[i], Pos: module.pos}
|
|
|
|
}
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
group.namespace = namespace
|
|
|
|
|
2016-05-17 23:58:05 +02:00
|
|
|
c.moduleGroups = append(c.moduleGroups, group)
|
2015-01-08 01:22:45 +01:00
|
|
|
|
2015-07-01 01:05:22 +02:00
|
|
|
return nil
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// ResolveDependencies checks that the dependencies specified by all of the
|
|
|
|
// modules defined in the parsed Blueprints files are valid. This means that
|
|
|
|
// the modules depended upon are defined and that no circular dependencies
|
|
|
|
// exist.
|
2017-08-01 02:26:06 +02:00
|
|
|
func (c *Context) ResolveDependencies(config interface{}) (deps []string, errs []error) {
|
2022-03-25 05:56:02 +01:00
|
|
|
c.BeginEvent("resolve_deps")
|
|
|
|
defer c.EndEvent("resolve_deps")
|
2019-01-23 22:21:48 +01:00
|
|
|
return c.resolveDependencies(c.Context, config)
|
|
|
|
}
|
2017-11-07 22:29:54 +01:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
func (c *Context) resolveDependencies(ctx context.Context, config interface{}) (deps []string, errs []error) {
|
|
|
|
pprof.Do(ctx, pprof.Labels("blueprint", "ResolveDependencies"), func(ctx context.Context) {
|
2020-07-02 19:08:12 +02:00
|
|
|
c.initProviders()
|
|
|
|
|
2022-11-04 04:19:48 +01:00
|
|
|
c.liveGlobals = newLiveTracker(c, config)
|
2017-11-07 22:29:54 +01:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
deps, errs = c.generateSingletonBuildActions(config, c.preSingletonInfo, c.liveGlobals)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
errs = c.updateDependencies()
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var mutatorDeps []string
|
|
|
|
mutatorDeps, errs = c.runMutators(ctx, config)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
deps = append(deps, mutatorDeps...)
|
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
if !c.skipCloneModulesAfterMutators {
|
|
|
|
c.cloneModules()
|
|
|
|
}
|
2019-01-23 22:21:48 +01:00
|
|
|
|
|
|
|
c.dependenciesReady = true
|
|
|
|
})
|
2016-08-10 21:56:40 +02:00
|
|
|
|
2015-07-25 01:53:27 +02:00
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2015-07-25 01:53:27 +02:00
|
|
|
}
|
2016-04-12 00:41:52 +02:00
|
|
|
|
2017-08-01 02:26:06 +02:00
|
|
|
return deps, nil
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-10-29 23:32:56 +01:00
|
|
|
// Default dependencies handling. If the module implements the (deprecated)
|
2015-03-11 04:08:19 +01:00
|
|
|
// DynamicDependerModule interface then this set consists of the union of those
|
2016-05-17 23:58:05 +02:00
|
|
|
// module names returned by its DynamicDependencies method and those added by calling
|
|
|
|
// AddDependencies or AddVariationDependencies on DynamicDependencyModuleContext.
|
2015-10-29 23:32:56 +01:00
|
|
|
func blueprintDepsMutator(ctx BottomUpMutatorContext) {
|
|
|
|
if dynamicDepender, ok := ctx.Module().(DynamicDependerModule); ok {
|
2016-01-07 22:43:09 +01:00
|
|
|
func() {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
ctx.error(newPanicErrorf(r, "DynamicDependencies for %s", ctx.moduleInfo()))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
dynamicDeps := dynamicDepender.DynamicDependencies(ctx)
|
2014-09-25 05:28:11 +02:00
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
if ctx.Failed() {
|
|
|
|
return
|
|
|
|
}
|
2014-09-25 05:28:11 +02:00
|
|
|
|
2016-04-12 00:47:28 +02:00
|
|
|
ctx.AddDependency(ctx.Module(), nil, dynamicDeps...)
|
2016-01-07 22:43:09 +01:00
|
|
|
}()
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-08-22 03:20:38 +02:00
|
|
|
// findExactVariantOrSingle searches the moduleGroup for a module with the same variant as module,
|
|
|
|
// and returns the matching module, or nil if one is not found. A group with exactly one module
|
|
|
|
// is always considered matching.
|
|
|
|
func findExactVariantOrSingle(module *moduleInfo, possible *moduleGroup, reverse bool) *moduleInfo {
|
2020-08-25 01:18:21 +02:00
|
|
|
found, _ := findVariant(module, possible, nil, false, reverse)
|
|
|
|
if found == nil {
|
|
|
|
for _, moduleOrAlias := range possible.modules {
|
|
|
|
if m := moduleOrAlias.module(); m != nil {
|
|
|
|
if found != nil {
|
|
|
|
// more than one possible match, give up
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
found = m
|
|
|
|
}
|
|
|
|
}
|
2015-07-25 01:53:27 +02:00
|
|
|
}
|
2020-08-25 01:18:21 +02:00
|
|
|
return found
|
2015-07-25 01:53:27 +02:00
|
|
|
}
|
|
|
|
|
2020-08-27 12:20:23 +02:00
|
|
|
func (c *Context) addDependency(module *moduleInfo, tag DependencyTag, depName string) (*moduleInfo, []error) {
|
2017-03-11 01:39:27 +01:00
|
|
|
if _, ok := tag.(BaseDependencyTag); ok {
|
|
|
|
panic("BaseDependencyTag is not allowed to be used directly!")
|
|
|
|
}
|
|
|
|
|
2016-05-17 23:58:05 +02:00
|
|
|
if depName == module.Name() {
|
2020-08-27 12:20:23 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2014-12-19 01:28:54 +01:00
|
|
|
Err: fmt.Errorf("%q depends on itself", depName),
|
2015-11-04 02:33:29 +01:00
|
|
|
Pos: module.pos,
|
2014-12-19 01:28:54 +01:00
|
|
|
}}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
possibleDeps := c.moduleGroupFromName(depName, module.namespace())
|
2016-05-17 23:58:05 +02:00
|
|
|
if possibleDeps == nil {
|
2020-11-16 21:15:36 +01:00
|
|
|
return nil, c.discoveredMissingDependencies(module, depName, nil)
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2020-08-22 03:20:38 +02:00
|
|
|
if m := findExactVariantOrSingle(module, possibleDeps, false); m != nil {
|
2019-03-30 00:35:02 +01:00
|
|
|
module.newDirectDeps = append(module.newDirectDeps, depInfo{m, tag})
|
2016-08-11 20:09:00 +02:00
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
2020-08-27 12:20:23 +02:00
|
|
|
return m, nil
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
|
2020-03-24 20:01:20 +01:00
|
|
|
if c.allowMissingDependencies {
|
|
|
|
// Allow missing variants.
|
2020-11-16 21:15:36 +01:00
|
|
|
return nil, c.discoveredMissingDependencies(module, depName, module.variant.dependencyVariations)
|
2020-03-24 20:01:20 +01:00
|
|
|
}
|
|
|
|
|
2020-08-27 12:20:23 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
Improve error messages when dependency variants are missing
Instead of just saying:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant "arch:android_arm_armv7-a, link:shared, vndk:"
Include a list of currently existing variants:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant:
arch:android_arm_armv7-a, link:shared, vndk:
available variants:
arch:android_arm_armv7-a, link:shared
arch:linux_x86, link:shared
arch:linux_x86_64, link:shared
arch:windows_x86, link:shared
arch:windows_x86_64, link:shared
This still isn't the best experience for users, but it at least provides
enough information for someone more familiar with the build to
understand the problem.
2017-03-20 22:11:38 +01:00
|
|
|
Err: fmt.Errorf("dependency %q of %q missing variant:\n %s\navailable variants:\n %s",
|
2016-05-17 23:58:05 +02:00
|
|
|
depName, module.Name(),
|
2020-08-13 21:07:30 +02:00
|
|
|
c.prettyPrintVariant(module.variant.dependencyVariations),
|
2019-11-14 05:10:12 +01:00
|
|
|
c.prettyPrintGroupVariants(possibleDeps)),
|
2015-11-04 02:33:29 +01:00
|
|
|
Pos: module.pos,
|
2015-03-11 04:08:19 +01:00
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
2015-11-04 01:41:29 +01:00
|
|
|
func (c *Context) findReverseDependency(module *moduleInfo, destName string) (*moduleInfo, []error) {
|
2016-05-17 23:58:05 +02:00
|
|
|
if destName == module.Name() {
|
2016-10-08 02:13:10 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2015-07-25 01:53:27 +02:00
|
|
|
Err: fmt.Errorf("%q depends on itself", destName),
|
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
possibleDeps := c.moduleGroupFromName(destName, module.namespace())
|
2016-05-17 23:58:05 +02:00
|
|
|
if possibleDeps == nil {
|
2016-10-08 02:13:10 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2015-07-25 01:53:27 +02:00
|
|
|
Err: fmt.Errorf("%q has a reverse dependency on undefined module %q",
|
2016-05-17 23:58:05 +02:00
|
|
|
module.Name(), destName),
|
2015-07-25 01:53:27 +02:00
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
2020-08-22 03:20:38 +02:00
|
|
|
if m := findExactVariantOrSingle(module, possibleDeps, true); m != nil {
|
2015-11-04 01:41:29 +01:00
|
|
|
return m, nil
|
2015-07-25 01:53:27 +02:00
|
|
|
}
|
|
|
|
|
2020-03-24 20:01:20 +01:00
|
|
|
if c.allowMissingDependencies {
|
|
|
|
// Allow missing variants.
|
2020-11-16 21:15:36 +01:00
|
|
|
return module, c.discoveredMissingDependencies(module, destName, module.variant.dependencyVariations)
|
2020-03-24 20:01:20 +01:00
|
|
|
}
|
|
|
|
|
2016-10-08 02:13:10 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
Improve error messages when dependency variants are missing
Instead of just saying:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant "arch:android_arm_armv7-a, link:shared, vndk:"
Include a list of currently existing variants:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant:
arch:android_arm_armv7-a, link:shared, vndk:
available variants:
arch:android_arm_armv7-a, link:shared
arch:linux_x86, link:shared
arch:linux_x86_64, link:shared
arch:windows_x86, link:shared
arch:windows_x86_64, link:shared
This still isn't the best experience for users, but it at least provides
enough information for someone more familiar with the build to
understand the problem.
2017-03-20 22:11:38 +01:00
|
|
|
Err: fmt.Errorf("reverse dependency %q of %q missing variant:\n %s\navailable variants:\n %s",
|
2016-05-17 23:58:05 +02:00
|
|
|
destName, module.Name(),
|
2020-08-13 21:07:30 +02:00
|
|
|
c.prettyPrintVariant(module.variant.dependencyVariations),
|
2019-11-14 05:10:12 +01:00
|
|
|
c.prettyPrintGroupVariants(possibleDeps)),
|
2015-07-25 01:53:27 +02:00
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
|
2020-08-22 03:20:38 +02:00
|
|
|
func findVariant(module *moduleInfo, possibleDeps *moduleGroup, variations []Variation, far bool, reverse bool) (*moduleInfo, variationMap) {
|
2020-08-13 21:07:30 +02:00
|
|
|
// We can't just append variant.Variant to module.dependencyVariant.variantName and
|
2015-03-11 04:08:19 +01:00
|
|
|
// compare the strings because the result won't be in mutator registration order.
|
|
|
|
// Create a new map instead, and then deep compare the maps.
|
2015-05-08 20:14:54 +02:00
|
|
|
var newVariant variationMap
|
|
|
|
if !far {
|
2020-03-06 01:29:24 +01:00
|
|
|
if !reverse {
|
|
|
|
// For forward dependency, ignore local variants by matching against
|
|
|
|
// dependencyVariant which doesn't have the local variants
|
2020-08-13 21:07:30 +02:00
|
|
|
newVariant = module.variant.dependencyVariations.clone()
|
2020-03-06 01:29:24 +01:00
|
|
|
} else {
|
|
|
|
// For reverse dependency, use all the variants
|
2020-08-13 21:07:30 +02:00
|
|
|
newVariant = module.variant.variations.clone()
|
2020-03-06 01:29:24 +01:00
|
|
|
}
|
2015-05-08 20:14:54 +02:00
|
|
|
}
|
2015-03-14 00:02:36 +01:00
|
|
|
for _, v := range variations {
|
2019-11-14 05:11:04 +01:00
|
|
|
if newVariant == nil {
|
|
|
|
newVariant = make(variationMap)
|
|
|
|
}
|
2015-03-14 00:02:36 +01:00
|
|
|
newVariant[v.Mutator] = v.Variation
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
check := func(variant variationMap) bool {
|
2015-05-08 20:14:54 +02:00
|
|
|
if far {
|
2020-08-24 23:46:13 +02:00
|
|
|
return newVariant.subsetOf(variant)
|
2015-05-08 20:14:54 +02:00
|
|
|
} else {
|
2019-11-14 05:10:12 +01:00
|
|
|
return variant.equal(newVariant)
|
2015-05-08 20:14:54 +02:00
|
|
|
}
|
2019-11-14 05:10:12 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
var foundDep *moduleInfo
|
|
|
|
for _, m := range possibleDeps.modules {
|
2020-08-25 01:18:21 +02:00
|
|
|
if check(m.moduleOrAliasVariant().variations) {
|
|
|
|
foundDep = m.moduleOrAliasTarget()
|
2019-11-14 05:10:12 +01:00
|
|
|
break
|
2015-03-11 04:08:19 +01:00
|
|
|
}
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2020-03-06 01:29:24 +01:00
|
|
|
return foundDep, newVariant
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) addVariationDependency(module *moduleInfo, variations []Variation,
|
2020-08-27 12:20:23 +02:00
|
|
|
tag DependencyTag, depName string, far bool) (*moduleInfo, []error) {
|
2020-03-06 01:29:24 +01:00
|
|
|
if _, ok := tag.(BaseDependencyTag); ok {
|
|
|
|
panic("BaseDependencyTag is not allowed to be used directly!")
|
|
|
|
}
|
|
|
|
|
|
|
|
possibleDeps := c.moduleGroupFromName(depName, module.namespace())
|
|
|
|
if possibleDeps == nil {
|
2020-11-16 21:15:36 +01:00
|
|
|
return nil, c.discoveredMissingDependencies(module, depName, nil)
|
2020-03-06 01:29:24 +01:00
|
|
|
}
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
foundDep, newVariant := findVariant(module, possibleDeps, variations, far, false)
|
2020-03-06 01:29:24 +01:00
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
if foundDep == nil {
|
2020-03-24 20:01:20 +01:00
|
|
|
if c.allowMissingDependencies {
|
|
|
|
// Allow missing variants.
|
2020-11-16 21:15:36 +01:00
|
|
|
return nil, c.discoveredMissingDependencies(module, depName, newVariant)
|
2020-03-24 20:01:20 +01:00
|
|
|
}
|
2020-08-27 12:20:23 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2019-11-14 05:10:12 +01:00
|
|
|
Err: fmt.Errorf("dependency %q of %q missing variant:\n %s\navailable variants:\n %s",
|
|
|
|
depName, module.Name(),
|
|
|
|
c.prettyPrintVariant(newVariant),
|
|
|
|
c.prettyPrintGroupVariants(possibleDeps)),
|
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
Improve error messages when dependency variants are missing
Instead of just saying:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant "arch:android_arm_armv7-a, link:shared, vndk:"
Include a list of currently existing variants:
error: .../Android.bp:48:1: dependency "libc++" of "libtest" missing variant:
arch:android_arm_armv7-a, link:shared, vndk:
available variants:
arch:android_arm_armv7-a, link:shared
arch:linux_x86, link:shared
arch:linux_x86_64, link:shared
arch:windows_x86, link:shared
arch:windows_x86_64, link:shared
This still isn't the best experience for users, but it at least provides
enough information for someone more familiar with the build to
understand the problem.
2017-03-20 22:11:38 +01:00
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
if module == foundDep {
|
2020-08-27 12:20:23 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2019-11-14 05:10:12 +01:00
|
|
|
Err: fmt.Errorf("%q depends on itself", depName),
|
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
// AddVariationDependency allows adding a dependency on itself, but only if
|
|
|
|
// that module is earlier in the module list than this one, since we always
|
|
|
|
// run GenerateBuildActions in order for the variants of a module
|
|
|
|
if foundDep.group == module.group && beforeInModuleList(module, foundDep, module.group.modules) {
|
2020-08-27 12:20:23 +02:00
|
|
|
return nil, []error{&BlueprintError{
|
2019-11-14 05:10:12 +01:00
|
|
|
Err: fmt.Errorf("%q depends on later version of itself", depName),
|
|
|
|
Pos: module.pos,
|
|
|
|
}}
|
|
|
|
}
|
|
|
|
module.newDirectDeps = append(module.newDirectDeps, depInfo{foundDep, tag})
|
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
2020-08-27 12:20:23 +02:00
|
|
|
return foundDep, nil
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2016-04-12 02:33:13 +02:00
|
|
|
func (c *Context) addInterVariantDependency(origModule *moduleInfo, tag DependencyTag,
|
2020-08-27 12:20:23 +02:00
|
|
|
from, to Module) *moduleInfo {
|
2017-03-11 01:39:27 +01:00
|
|
|
if _, ok := tag.(BaseDependencyTag); ok {
|
|
|
|
panic("BaseDependencyTag is not allowed to be used directly!")
|
|
|
|
}
|
2016-04-12 02:33:13 +02:00
|
|
|
|
|
|
|
var fromInfo, toInfo *moduleInfo
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range origModule.splitModules {
|
|
|
|
if m := moduleOrAlias.module(); m != nil {
|
|
|
|
if m.logicModule == from {
|
|
|
|
fromInfo = m
|
|
|
|
}
|
|
|
|
if m.logicModule == to {
|
|
|
|
toInfo = m
|
|
|
|
if fromInfo != nil {
|
|
|
|
panic(fmt.Errorf("%q depends on later version of itself", origModule.Name()))
|
|
|
|
}
|
2016-04-12 02:33:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if fromInfo == nil || toInfo == nil {
|
|
|
|
panic(fmt.Errorf("AddInterVariantDependency called for module %q on invalid variant",
|
2016-05-17 23:58:05 +02:00
|
|
|
origModule.Name()))
|
2016-04-12 02:33:13 +02:00
|
|
|
}
|
|
|
|
|
2019-03-30 00:35:02 +01:00
|
|
|
fromInfo.newDirectDeps = append(fromInfo.newDirectDeps, depInfo{toInfo, tag})
|
2016-08-11 20:09:00 +02:00
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
2020-08-27 12:20:23 +02:00
|
|
|
return toInfo
|
2016-04-12 02:33:13 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 11:34:06 +02:00
|
|
|
// findBlueprintDescendants returns a map linking parent Blueprint files to child Blueprints files
|
|
|
|
// For example, if paths = []string{"a/b/c/Android.bp", "a/Android.bp"},
|
|
|
|
// then descendants = {"":[]string{"a/Android.bp"}, "a/Android.bp":[]string{"a/b/c/Android.bp"}}
|
2017-08-10 00:13:12 +02:00
|
|
|
func findBlueprintDescendants(paths []string) (descendants map[string][]string, err error) {
|
|
|
|
// make mapping from dir path to file path
|
|
|
|
filesByDir := make(map[string]string, len(paths))
|
|
|
|
for _, path := range paths {
|
|
|
|
dir := filepath.Dir(path)
|
|
|
|
_, alreadyFound := filesByDir[dir]
|
|
|
|
if alreadyFound {
|
|
|
|
return nil, fmt.Errorf("Found two Blueprint files in directory %v : %v and %v", dir, filesByDir[dir], path)
|
|
|
|
}
|
|
|
|
filesByDir[dir] = path
|
|
|
|
}
|
|
|
|
|
2017-11-30 03:37:31 +01:00
|
|
|
findAncestor := func(childFile string) (ancestor string) {
|
|
|
|
prevAncestorDir := filepath.Dir(childFile)
|
2017-08-10 00:13:12 +02:00
|
|
|
for {
|
|
|
|
ancestorDir := filepath.Dir(prevAncestorDir)
|
|
|
|
if ancestorDir == prevAncestorDir {
|
|
|
|
// reached the root dir without any matches; assign this as a descendant of ""
|
2017-11-30 03:37:31 +01:00
|
|
|
return ""
|
2017-08-10 00:13:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
ancestorFile, ancestorExists := filesByDir[ancestorDir]
|
|
|
|
if ancestorExists {
|
2017-11-30 03:37:31 +01:00
|
|
|
return ancestorFile
|
2017-08-10 00:13:12 +02:00
|
|
|
}
|
|
|
|
prevAncestorDir = ancestorDir
|
|
|
|
}
|
|
|
|
}
|
2017-11-30 03:37:31 +01:00
|
|
|
// generate the descendants map
|
|
|
|
descendants = make(map[string][]string, len(filesByDir))
|
|
|
|
for _, childFile := range filesByDir {
|
|
|
|
ancestorFile := findAncestor(childFile)
|
|
|
|
descendants[ancestorFile] = append(descendants[ancestorFile], childFile)
|
|
|
|
}
|
2017-08-10 00:13:12 +02:00
|
|
|
return descendants, nil
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
type visitOrderer interface {
|
|
|
|
// returns the number of modules that this module needs to wait for
|
|
|
|
waitCount(module *moduleInfo) int
|
|
|
|
// returns the list of modules that are waiting for this module
|
|
|
|
propagate(module *moduleInfo) []*moduleInfo
|
|
|
|
// visit modules in order
|
2020-08-26 02:12:59 +02:00
|
|
|
visit(modules []*moduleInfo, visit func(*moduleInfo, chan<- pauseSpec) bool)
|
2016-08-11 20:09:00 +02:00
|
|
|
}
|
|
|
|
|
2018-03-28 20:50:12 +02:00
|
|
|
type unorderedVisitorImpl struct{}
|
|
|
|
|
|
|
|
func (unorderedVisitorImpl) waitCount(module *moduleInfo) int {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
func (unorderedVisitorImpl) propagate(module *moduleInfo) []*moduleInfo {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
func (unorderedVisitorImpl) visit(modules []*moduleInfo, visit func(*moduleInfo, chan<- pauseSpec) bool) {
|
2018-03-28 20:50:12 +02:00
|
|
|
for _, module := range modules {
|
2020-08-26 02:12:59 +02:00
|
|
|
if visit(module, nil) {
|
2018-03-28 20:50:12 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
type bottomUpVisitorImpl struct{}
|
|
|
|
|
|
|
|
func (bottomUpVisitorImpl) waitCount(module *moduleInfo) int {
|
|
|
|
return len(module.forwardDeps)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (bottomUpVisitorImpl) propagate(module *moduleInfo) []*moduleInfo {
|
|
|
|
return module.reverseDeps
|
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
func (bottomUpVisitorImpl) visit(modules []*moduleInfo, visit func(*moduleInfo, chan<- pauseSpec) bool) {
|
2016-08-11 20:09:00 +02:00
|
|
|
for _, module := range modules {
|
2020-08-26 02:12:59 +02:00
|
|
|
if visit(module, nil) {
|
2016-08-11 20:09:00 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type topDownVisitorImpl struct{}
|
|
|
|
|
|
|
|
func (topDownVisitorImpl) waitCount(module *moduleInfo) int {
|
|
|
|
return len(module.reverseDeps)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (topDownVisitorImpl) propagate(module *moduleInfo) []*moduleInfo {
|
|
|
|
return module.forwardDeps
|
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
func (topDownVisitorImpl) visit(modules []*moduleInfo, visit func(*moduleInfo, chan<- pauseSpec) bool) {
|
2016-08-11 20:09:00 +02:00
|
|
|
for i := 0; i < len(modules); i++ {
|
|
|
|
module := modules[len(modules)-1-i]
|
2020-08-26 02:12:59 +02:00
|
|
|
if visit(module, nil) {
|
2016-08-06 07:30:44 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
var (
|
|
|
|
bottomUpVisitor bottomUpVisitorImpl
|
|
|
|
topDownVisitor topDownVisitorImpl
|
|
|
|
)
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
// pauseSpec describes a pause that a module needs to occur until another module has been visited,
|
|
|
|
// at which point the unpause channel will be closed.
|
|
|
|
type pauseSpec struct {
|
|
|
|
paused *moduleInfo
|
|
|
|
until *moduleInfo
|
|
|
|
unpause unpause
|
|
|
|
}
|
|
|
|
|
|
|
|
type unpause chan struct{}
|
|
|
|
|
|
|
|
const parallelVisitLimit = 1000
|
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
// Calls visit on each module, guaranteeing that visit is not called on a module until visit on all
|
2020-08-26 02:12:59 +02:00
|
|
|
// of its dependencies has finished. A visit function can write a pauseSpec to the pause channel
|
|
|
|
// to wait for another dependency to be visited. If a visit function returns true to cancel
|
|
|
|
// while another visitor is paused, the paused visitor will never be resumed and its goroutine
|
|
|
|
// will stay paused forever.
|
|
|
|
func parallelVisit(modules []*moduleInfo, order visitOrderer, limit int,
|
|
|
|
visit func(module *moduleInfo, pause chan<- pauseSpec) bool) []error {
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
doneCh := make(chan *moduleInfo)
|
2016-08-12 00:37:45 +02:00
|
|
|
cancelCh := make(chan bool)
|
2020-08-26 02:12:59 +02:00
|
|
|
pauseCh := make(chan pauseSpec)
|
2015-03-02 23:03:01 +01:00
|
|
|
cancel := false
|
2015-01-08 03:08:56 +01:00
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
var backlog []*moduleInfo // Visitors that are ready to start but backlogged due to limit.
|
|
|
|
var unpauseBacklog []pauseSpec // Visitors that are ready to unpause but backlogged due to limit.
|
|
|
|
|
|
|
|
active := 0 // Number of visitors running, not counting paused visitors.
|
|
|
|
visited := 0 // Number of finished visitors.
|
|
|
|
|
|
|
|
pauseMap := make(map[*moduleInfo][]pauseSpec)
|
|
|
|
|
|
|
|
for _, module := range modules {
|
2016-08-11 20:09:00 +02:00
|
|
|
module.waitingCount = order.waitCount(module)
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
// Call the visitor on a module if there are fewer active visitors than the parallelism
|
|
|
|
// limit, otherwise add it to the backlog.
|
|
|
|
startOrBacklog := func(module *moduleInfo) {
|
|
|
|
if active < limit {
|
|
|
|
active++
|
2018-03-28 20:50:12 +02:00
|
|
|
go func() {
|
2020-08-26 02:12:59 +02:00
|
|
|
ret := visit(module, pauseCh)
|
2018-03-28 20:50:12 +02:00
|
|
|
if ret {
|
|
|
|
cancelCh <- true
|
|
|
|
}
|
|
|
|
doneCh <- module
|
|
|
|
}()
|
|
|
|
} else {
|
|
|
|
backlog = append(backlog, module)
|
|
|
|
}
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
// Unpause the already-started but paused visitor on a module if there are fewer active
|
|
|
|
// visitors than the parallelism limit, otherwise add it to the backlog.
|
|
|
|
unpauseOrBacklog := func(pauseSpec pauseSpec) {
|
|
|
|
if active < limit {
|
|
|
|
active++
|
|
|
|
close(pauseSpec.unpause)
|
|
|
|
} else {
|
|
|
|
unpauseBacklog = append(unpauseBacklog, pauseSpec)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Start any modules in the backlog up to the parallelism limit. Unpause paused modules first
|
|
|
|
// since they may already be holding resources.
|
|
|
|
unpauseOrStartFromBacklog := func() {
|
|
|
|
for active < limit && len(unpauseBacklog) > 0 {
|
|
|
|
unpause := unpauseBacklog[0]
|
|
|
|
unpauseBacklog = unpauseBacklog[1:]
|
|
|
|
unpauseOrBacklog(unpause)
|
|
|
|
}
|
|
|
|
for active < limit && len(backlog) > 0 {
|
|
|
|
toVisit := backlog[0]
|
|
|
|
backlog = backlog[1:]
|
|
|
|
startOrBacklog(toVisit)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
toVisit := len(modules)
|
|
|
|
|
|
|
|
// Start or backlog any modules that are not waiting for any other modules.
|
|
|
|
for _, module := range modules {
|
2015-03-11 23:43:52 +01:00
|
|
|
if module.waitingCount == 0 {
|
2020-08-26 02:12:59 +02:00
|
|
|
startOrBacklog(module)
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
for active > 0 {
|
2015-01-08 03:08:56 +01:00
|
|
|
select {
|
2018-03-28 20:50:12 +02:00
|
|
|
case <-cancelCh:
|
|
|
|
cancel = true
|
|
|
|
backlog = nil
|
2015-03-11 23:43:52 +01:00
|
|
|
case doneModule := <-doneCh:
|
2020-08-26 02:12:59 +02:00
|
|
|
active--
|
2015-03-02 23:03:01 +01:00
|
|
|
if !cancel {
|
2020-08-26 02:12:59 +02:00
|
|
|
// Mark this module as done.
|
|
|
|
doneModule.waitingCount = -1
|
|
|
|
visited++
|
|
|
|
|
|
|
|
// Unpause or backlog any modules that were waiting for this one.
|
|
|
|
if unpauses, ok := pauseMap[doneModule]; ok {
|
|
|
|
delete(pauseMap, doneModule)
|
|
|
|
for _, unpause := range unpauses {
|
|
|
|
unpauseOrBacklog(unpause)
|
|
|
|
}
|
2018-03-28 20:50:12 +02:00
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
|
|
|
|
// Start any backlogged modules up to limit.
|
|
|
|
unpauseOrStartFromBacklog()
|
|
|
|
|
|
|
|
// Decrement waitingCount on the next modules in the tree based
|
|
|
|
// on propagation order, and start or backlog them if they are
|
|
|
|
// ready to start.
|
2016-08-11 20:09:00 +02:00
|
|
|
for _, module := range order.propagate(doneModule) {
|
|
|
|
module.waitingCount--
|
|
|
|
if module.waitingCount == 0 {
|
2020-08-26 02:12:59 +02:00
|
|
|
startOrBacklog(module)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case pauseSpec := <-pauseCh:
|
|
|
|
if pauseSpec.until.waitingCount == -1 {
|
|
|
|
// Module being paused for is already finished, resume immediately.
|
|
|
|
close(pauseSpec.unpause)
|
|
|
|
} else {
|
|
|
|
// Register for unpausing.
|
|
|
|
pauseMap[pauseSpec.until] = append(pauseMap[pauseSpec.until], pauseSpec)
|
|
|
|
|
|
|
|
// Don't count paused visitors as active so that this can't deadlock
|
|
|
|
// if 1000 visitors are paused simultaneously.
|
|
|
|
active--
|
|
|
|
unpauseOrStartFromBacklog()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !cancel {
|
|
|
|
// Invariant check: no backlogged modules, these weren't waiting on anything except
|
|
|
|
// the parallelism limit so they should have run.
|
|
|
|
if len(backlog) > 0 {
|
|
|
|
panic(fmt.Errorf("parallelVisit finished with %d backlogged visitors", len(backlog)))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Invariant check: no backlogged paused modules, these weren't waiting on anything
|
|
|
|
// except the parallelism limit so they should have run.
|
|
|
|
if len(unpauseBacklog) > 0 {
|
|
|
|
panic(fmt.Errorf("parallelVisit finished with %d backlogged unpaused visitors", len(unpauseBacklog)))
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(pauseMap) > 0 {
|
2021-02-09 00:34:08 +01:00
|
|
|
// Probably a deadlock due to a newly added dependency cycle. Start from each module in
|
|
|
|
// the order of the input modules list and perform a depth-first search for the module
|
|
|
|
// it is paused on, ignoring modules that are marked as done. Note this traverses from
|
|
|
|
// modules to the modules that would have been unblocked when that module finished, i.e
|
|
|
|
// the reverse of the visitOrderer.
|
|
|
|
|
2021-04-28 00:20:15 +02:00
|
|
|
// In order to reduce duplicated work, once a module has been checked and determined
|
|
|
|
// not to be part of a cycle add it and everything that depends on it to the checked
|
|
|
|
// map.
|
|
|
|
checked := make(map[*moduleInfo]struct{})
|
|
|
|
|
2021-02-09 00:34:08 +01:00
|
|
|
var check func(module, end *moduleInfo) []*moduleInfo
|
|
|
|
check = func(module, end *moduleInfo) []*moduleInfo {
|
2020-08-26 02:12:59 +02:00
|
|
|
if module.waitingCount == -1 {
|
|
|
|
// This module was finished, it can't be part of a loop.
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if module == end {
|
|
|
|
// This module is the end of the loop, start rolling up the cycle.
|
|
|
|
return []*moduleInfo{module}
|
|
|
|
}
|
|
|
|
|
2021-04-28 00:20:15 +02:00
|
|
|
if _, alreadyChecked := checked[module]; alreadyChecked {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
for _, dep := range order.propagate(module) {
|
2021-02-09 00:34:08 +01:00
|
|
|
cycle := check(dep, end)
|
2020-08-26 02:12:59 +02:00
|
|
|
if cycle != nil {
|
|
|
|
return append([]*moduleInfo{module}, cycle...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, depPauseSpec := range pauseMap[module] {
|
2021-02-09 00:34:08 +01:00
|
|
|
cycle := check(depPauseSpec.paused, end)
|
2020-08-26 02:12:59 +02:00
|
|
|
if cycle != nil {
|
|
|
|
return append([]*moduleInfo{module}, cycle...)
|
2015-03-02 23:03:01 +01:00
|
|
|
}
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
|
2021-04-28 00:20:15 +02:00
|
|
|
checked[module] = struct{}{}
|
2020-08-26 02:12:59 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-02-09 00:34:08 +01:00
|
|
|
// Iterate over the modules list instead of pauseMap to provide deterministic ordering.
|
|
|
|
for _, module := range modules {
|
|
|
|
for _, pauseSpec := range pauseMap[module] {
|
|
|
|
cycle := check(pauseSpec.paused, pauseSpec.until)
|
|
|
|
if len(cycle) > 0 {
|
|
|
|
return cycleError(cycle)
|
|
|
|
}
|
|
|
|
}
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
|
|
|
|
// Invariant check: if there was no deadlock and no cancellation every module
|
|
|
|
// should have been visited.
|
|
|
|
if visited != toVisit {
|
|
|
|
panic(fmt.Errorf("parallelVisit ran %d visitors, expected %d", visited, toVisit))
|
|
|
|
}
|
|
|
|
|
|
|
|
// Invariant check: if there was no deadlock and no cancellation every module
|
|
|
|
// should have been visited, so there is nothing left to be paused on.
|
|
|
|
if len(pauseMap) > 0 {
|
|
|
|
panic(fmt.Errorf("parallelVisit finished with %d paused visitors", len(pauseMap)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func cycleError(cycle []*moduleInfo) (errs []error) {
|
|
|
|
// The cycle list is in reverse order because all the 'check' calls append
|
|
|
|
// their own module to the list.
|
|
|
|
errs = append(errs, &BlueprintError{
|
|
|
|
Err: fmt.Errorf("encountered dependency cycle:"),
|
|
|
|
Pos: cycle[len(cycle)-1].pos,
|
|
|
|
})
|
|
|
|
|
|
|
|
// Iterate backwards through the cycle list.
|
|
|
|
curModule := cycle[0]
|
|
|
|
for i := len(cycle) - 1; i >= 0; i-- {
|
|
|
|
nextModule := cycle[i]
|
|
|
|
errs = append(errs, &BlueprintError{
|
2021-04-28 00:33:49 +02:00
|
|
|
Err: fmt.Errorf(" %s depends on %s",
|
|
|
|
curModule, nextModule),
|
2020-08-26 02:12:59 +02:00
|
|
|
Pos: curModule.pos,
|
|
|
|
})
|
|
|
|
curModule = nextModule
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
|
|
|
|
return errs
|
2015-01-08 03:08:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// updateDependencies recursively walks the module dependency graph and updates
|
|
|
|
// additional fields based on the dependencies. It builds a sorted list of modules
|
|
|
|
// such that dependencies of a module always appear first, and populates reverse
|
|
|
|
// dependency links and counts of total dependencies. It also reports errors when
|
2022-01-11 04:46:23 +01:00
|
|
|
// it encounters dependency cycles. This should be called after resolveDependencies,
|
2015-01-08 03:08:56 +01:00
|
|
|
// as well as after any mutator pass has called addDependency
|
|
|
|
func (c *Context) updateDependencies() (errs []error) {
|
2020-12-01 00:30:45 +01:00
|
|
|
c.cachedDepsModified = true
|
2015-03-11 23:43:52 +01:00
|
|
|
visited := make(map[*moduleInfo]bool) // modules that were already checked
|
|
|
|
checking := make(map[*moduleInfo]bool) // modules actively being checked
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
sorted := make([]*moduleInfo, 0, len(c.moduleInfo))
|
2014-12-17 23:16:51 +01:00
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
var check func(group *moduleInfo) []*moduleInfo
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
check = func(module *moduleInfo) []*moduleInfo {
|
|
|
|
visited[module] = true
|
|
|
|
checking[module] = true
|
|
|
|
defer delete(checking, module)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2021-01-22 07:39:28 +01:00
|
|
|
// Reset the forward and reverse deps without reducing their capacity to avoid reallocation.
|
|
|
|
module.reverseDeps = module.reverseDeps[:0]
|
|
|
|
module.forwardDeps = module.forwardDeps[:0]
|
2015-03-11 23:43:52 +01:00
|
|
|
|
|
|
|
// Add an implicit dependency ordering on all earlier modules in the same module group
|
|
|
|
for _, dep := range module.group.modules {
|
|
|
|
if dep == module {
|
|
|
|
break
|
2014-12-18 01:12:41 +01:00
|
|
|
}
|
2020-08-25 01:18:21 +02:00
|
|
|
if depModule := dep.module(); depModule != nil {
|
2021-01-22 07:39:28 +01:00
|
|
|
module.forwardDeps = append(module.forwardDeps, depModule)
|
2020-08-25 01:18:21 +02:00
|
|
|
}
|
2014-12-18 01:12:41 +01:00
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2021-01-22 07:39:28 +01:00
|
|
|
outer:
|
2015-03-11 23:43:52 +01:00
|
|
|
for _, dep := range module.directDeps {
|
2021-01-22 07:39:28 +01:00
|
|
|
// use a loop to check for duplicates, average number of directDeps measured to be 9.5.
|
|
|
|
for _, exists := range module.forwardDeps {
|
|
|
|
if dep.module == exists {
|
|
|
|
continue outer
|
|
|
|
}
|
|
|
|
}
|
|
|
|
module.forwardDeps = append(module.forwardDeps, dep.module)
|
2015-03-11 23:43:52 +01:00
|
|
|
}
|
|
|
|
|
2021-01-22 07:39:28 +01:00
|
|
|
for _, dep := range module.forwardDeps {
|
2014-05-28 01:34:41 +02:00
|
|
|
if checking[dep] {
|
|
|
|
// This is a cycle.
|
2015-03-11 23:43:52 +01:00
|
|
|
return []*moduleInfo{dep, module}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if !visited[dep] {
|
|
|
|
cycle := check(dep)
|
|
|
|
if cycle != nil {
|
2015-03-11 23:43:52 +01:00
|
|
|
if cycle[0] == module {
|
2014-05-28 01:34:41 +02:00
|
|
|
// We are the "start" of the cycle, so we're responsible
|
2020-08-26 02:12:59 +02:00
|
|
|
// for generating the errors.
|
|
|
|
errs = append(errs, cycleError(cycle)...)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
|
|
|
// We can continue processing this module's children to
|
|
|
|
// find more cycles. Since all the modules that were
|
|
|
|
// part of the found cycle were marked as visited we
|
|
|
|
// won't run into that cycle again.
|
|
|
|
} else {
|
|
|
|
// We're not the "start" of the cycle, so we just append
|
|
|
|
// our module to the list and return it.
|
2015-03-11 23:43:52 +01:00
|
|
|
return append(cycle, module)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-01-08 03:08:56 +01:00
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
dep.reverseDeps = append(dep.reverseDeps, module)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
sorted = append(sorted, module)
|
2014-12-17 23:16:51 +01:00
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
for _, module := range c.moduleInfo {
|
|
|
|
if !visited[module] {
|
|
|
|
cycle := check(module)
|
2014-05-28 01:34:41 +02:00
|
|
|
if cycle != nil {
|
2015-03-11 23:43:52 +01:00
|
|
|
if cycle[len(cycle)-1] != module {
|
2015-03-11 22:40:30 +01:00
|
|
|
panic("inconceivable!")
|
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
errs = append(errs, cycleError(cycle)...)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
c.modulesSorted = sorted
|
2014-12-17 23:16:51 +01:00
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-04-26 15:50:42 +02:00
|
|
|
type jsonVariations []Variation
|
2021-04-01 18:27:31 +02:00
|
|
|
|
|
|
|
type jsonModuleName struct {
|
|
|
|
Name string
|
2023-04-07 15:11:30 +02:00
|
|
|
Variant string
|
2022-04-26 15:50:42 +02:00
|
|
|
Variations jsonVariations
|
|
|
|
DependencyVariations jsonVariations
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
type jsonDep struct {
|
|
|
|
jsonModuleName
|
|
|
|
Tag string
|
|
|
|
}
|
|
|
|
|
2021-06-25 09:10:56 +02:00
|
|
|
type JsonModule struct {
|
2021-04-01 18:27:31 +02:00
|
|
|
jsonModuleName
|
|
|
|
Deps []jsonDep
|
|
|
|
Type string
|
|
|
|
Blueprint string
|
2022-05-03 16:49:10 +02:00
|
|
|
CreatedBy *string
|
2021-06-25 09:10:56 +02:00
|
|
|
Module map[string]interface{}
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
|
|
|
|
2022-04-26 15:50:42 +02:00
|
|
|
func toJsonVariationMap(vm variationMap) jsonVariations {
|
|
|
|
m := make(jsonVariations, 0, len(vm))
|
2022-04-07 22:28:32 +02:00
|
|
|
for k, v := range vm {
|
|
|
|
m = append(m, Variation{k, v})
|
|
|
|
}
|
|
|
|
sort.Slice(m, func(i, j int) bool {
|
|
|
|
if m[i].Mutator != m[j].Mutator {
|
|
|
|
return m[i].Mutator < m[j].Mutator
|
|
|
|
}
|
|
|
|
return m[i].Variation < m[j].Variation
|
|
|
|
})
|
|
|
|
return m
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func jsonModuleNameFromModuleInfo(m *moduleInfo) *jsonModuleName {
|
|
|
|
return &jsonModuleName{
|
|
|
|
Name: m.Name(),
|
2023-04-07 15:11:30 +02:00
|
|
|
Variant: m.variant.name,
|
2021-04-01 18:27:31 +02:00
|
|
|
Variations: toJsonVariationMap(m.variant.variations),
|
|
|
|
DependencyVariations: toJsonVariationMap(m.variant.dependencyVariations),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-25 09:10:56 +02:00
|
|
|
type JSONDataSupplier interface {
|
|
|
|
AddJSONData(d *map[string]interface{})
|
|
|
|
}
|
|
|
|
|
2022-05-26 22:17:35 +02:00
|
|
|
// JSONAction contains the action-related info we expose to json module graph
|
|
|
|
type JSONAction struct {
|
|
|
|
Inputs []string
|
|
|
|
Outputs []string
|
|
|
|
}
|
|
|
|
|
|
|
|
// JSONActionSupplier allows JSON representation of additional actions that are not registered in
|
|
|
|
// Ninja
|
|
|
|
type JSONActionSupplier interface {
|
|
|
|
JSONActions() []JSONAction
|
|
|
|
}
|
|
|
|
|
2021-06-25 09:10:56 +02:00
|
|
|
func jsonModuleFromModuleInfo(m *moduleInfo) *JsonModule {
|
|
|
|
result := &JsonModule{
|
2021-04-01 18:27:31 +02:00
|
|
|
jsonModuleName: *jsonModuleNameFromModuleInfo(m),
|
|
|
|
Deps: make([]jsonDep, 0),
|
|
|
|
Type: m.typeName,
|
|
|
|
Blueprint: m.relBlueprintsFile,
|
2021-06-25 09:10:56 +02:00
|
|
|
Module: make(map[string]interface{}),
|
|
|
|
}
|
2022-05-03 16:49:10 +02:00
|
|
|
if m.createdBy != nil {
|
|
|
|
n := m.createdBy.Name()
|
|
|
|
result.CreatedBy = &n
|
|
|
|
}
|
2021-06-25 09:10:56 +02:00
|
|
|
if j, ok := m.logicModule.(JSONDataSupplier); ok {
|
|
|
|
j.AddJSONData(&result.Module)
|
|
|
|
}
|
|
|
|
for _, p := range m.providers {
|
|
|
|
if j, ok := p.(JSONDataSupplier); ok {
|
|
|
|
j.AddJSONData(&result.Module)
|
|
|
|
}
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
2021-06-25 09:10:56 +02:00
|
|
|
return result
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
|
|
|
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
func jsonModuleWithActionsFromModuleInfo(m *moduleInfo) *JsonModule {
|
|
|
|
result := &JsonModule{
|
|
|
|
jsonModuleName: jsonModuleName{
|
2023-04-07 15:11:30 +02:00
|
|
|
Name: m.Name(),
|
|
|
|
Variant: m.variant.name,
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
},
|
|
|
|
Deps: make([]jsonDep, 0),
|
|
|
|
Type: m.typeName,
|
|
|
|
Blueprint: m.relBlueprintsFile,
|
|
|
|
Module: make(map[string]interface{}),
|
|
|
|
}
|
2022-05-26 22:17:35 +02:00
|
|
|
var actions []JSONAction
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
for _, bDef := range m.actionDefs.buildDefs {
|
2022-05-26 22:17:35 +02:00
|
|
|
actions = append(actions, JSONAction{
|
|
|
|
Inputs: append(
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
getNinjaStringsWithNilPkgNames(bDef.Inputs),
|
|
|
|
getNinjaStringsWithNilPkgNames(bDef.Implicits)...),
|
2022-05-26 22:17:35 +02:00
|
|
|
Outputs: append(
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
getNinjaStringsWithNilPkgNames(bDef.Outputs),
|
|
|
|
getNinjaStringsWithNilPkgNames(bDef.ImplicitOutputs)...),
|
|
|
|
})
|
|
|
|
}
|
2022-05-26 22:17:35 +02:00
|
|
|
|
|
|
|
if j, ok := m.logicModule.(JSONActionSupplier); ok {
|
|
|
|
actions = append(actions, j.JSONActions()...)
|
|
|
|
}
|
|
|
|
for _, p := range m.providers {
|
|
|
|
if j, ok := p.(JSONActionSupplier); ok {
|
|
|
|
actions = append(actions, j.JSONActions()...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
result.Module["Actions"] = actions
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
// Gets a list of strings from the given list of ninjaStrings by invoking ninjaString.Value with
|
|
|
|
// nil pkgNames on each of the input ninjaStrings.
|
|
|
|
func getNinjaStringsWithNilPkgNames(nStrs []ninjaString) []string {
|
|
|
|
var strs []string
|
|
|
|
for _, nstr := range nStrs {
|
|
|
|
strs = append(strs, nstr.Value(nil))
|
|
|
|
}
|
|
|
|
return strs
|
|
|
|
}
|
|
|
|
|
2023-04-07 19:14:07 +02:00
|
|
|
func (c *Context) GetWeightedOutputsFromPredicate(predicate func(*JsonModule) (bool, int)) map[string]int {
|
|
|
|
outputToWeight := make(map[string]int)
|
2023-03-22 17:48:26 +01:00
|
|
|
for _, m := range c.modulesSorted {
|
2023-04-07 19:14:07 +02:00
|
|
|
jmWithActions := jsonModuleWithActionsFromModuleInfo(m)
|
|
|
|
if ok, weight := predicate(jmWithActions); ok {
|
2023-03-22 17:48:26 +01:00
|
|
|
for _, a := range jmWithActions.Module["Actions"].([]JSONAction) {
|
2023-04-07 19:14:07 +02:00
|
|
|
for _, o := range a.Outputs {
|
|
|
|
if val, ok := outputToWeight[o]; ok {
|
|
|
|
if val > weight {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
outputToWeight[o] = weight
|
|
|
|
}
|
2023-03-22 17:48:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-07 19:14:07 +02:00
|
|
|
return outputToWeight
|
2023-03-22 17:48:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func inList(s string, l []string) bool {
|
|
|
|
for _, element := range l {
|
|
|
|
if s == element {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
// PrintJSONGraph prints info of modules in a JSON file.
|
|
|
|
func (c *Context) PrintJSONGraphAndActions(wGraph io.Writer, wActions io.Writer) {
|
|
|
|
modulesToGraph := make([]*JsonModule, 0)
|
|
|
|
modulesToActions := make([]*JsonModule, 0)
|
2021-04-01 18:27:31 +02:00
|
|
|
for _, m := range c.modulesSorted {
|
|
|
|
jm := jsonModuleFromModuleInfo(m)
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
jmWithActions := jsonModuleWithActionsFromModuleInfo(m)
|
2021-04-01 18:27:31 +02:00
|
|
|
for _, d := range m.directDeps {
|
|
|
|
jm.Deps = append(jm.Deps, jsonDep{
|
|
|
|
jsonModuleName: *jsonModuleNameFromModuleInfo(d.module),
|
|
|
|
Tag: fmt.Sprintf("%T %+v", d.tag, d.tag),
|
|
|
|
})
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
jmWithActions.Deps = append(jmWithActions.Deps, jsonDep{
|
|
|
|
jsonModuleName: jsonModuleName{
|
|
|
|
Name: d.module.Name(),
|
|
|
|
},
|
|
|
|
})
|
2021-04-01 18:27:31 +02:00
|
|
|
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
}
|
|
|
|
modulesToGraph = append(modulesToGraph, jm)
|
|
|
|
modulesToActions = append(modulesToActions, jmWithActions)
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
writeJson(wGraph, modulesToGraph)
|
|
|
|
writeJson(wActions, modulesToActions)
|
|
|
|
}
|
2021-04-01 18:27:31 +02:00
|
|
|
|
Support writing inputs/outputs of actions of modules into a file from the moduleInfo.actionDefs.
An example module variant in the module-actions.json:
{
"Name": "metalava-gradle-plugin-deps",
"Variations": null,
"DependencyVariations": null,
"Deps": [
{
"Name": "prebuilts_gradle-plugin_license",
"Variations": null,
"DependencyVariations": null,
"Tag": ""
}
],
"Type": "",
"Blueprint": "prebuilts/gradle-plugin/Android.bp",
"Module": {
"Actions": [
{
"Inputs": [
"prebuilts/gradle-plugin/com/android/tools/lint/lint-api/30.1.0-alpha13/lint-api-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-checks/30.1.0-alpha13/lint-checks-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint/30.1.0-alpha13/lint-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-gradle/30.1.0-alpha13/lint-gradle-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/lint/lint-model/30.1.0-alpha13/lint-model-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/common/30.1.0-alpha13/common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdk-common/30.1.0-alpha13/sdk-common-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/sdklib/30.1.0-alpha13/sdklib-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/org-jetbrains/uast/30.1.0-alpha13/uast-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/intellij-core/30.1.0-alpha13/intellij-core-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/external/com-intellij/kotlin-compiler/30.1.0-alpha13/kotlin-compiler-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/repository/30.1.0-alpha13/repository-30.1.0-alpha13.jar",
"prebuilts/gradle-plugin/com/android/tools/build/manifest-merger/30.1.0-alpha13/manifest-merger-30.1.0-alpha13.jar"
],
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/combined/metalava-gradle-plugin-deps.jar"
]
},
{
"Inputs": null,
"Outputs": [
"out/soong/.intermediates/prebuilts/gradle-plugin/metalava-gradle-plugin-deps/linux_glibc_common/meta_lic"
]
}
]
}
}
Test: local
Change-Id: Icbb7236507251e257f6773b110ae8a0788eef41e
2022-01-25 09:19:25 +01:00
|
|
|
func writeJson(w io.Writer, modules []*JsonModule) {
|
2021-08-17 23:32:42 +02:00
|
|
|
e := json.NewEncoder(w)
|
|
|
|
e.SetIndent("", "\t")
|
|
|
|
e.Encode(modules)
|
2021-04-01 18:27:31 +02:00
|
|
|
}
|
|
|
|
|
2014-06-13 05:06:50 +02:00
|
|
|
// PrepareBuildActions generates an internal representation of all the build
|
|
|
|
// actions that need to be performed. This process involves invoking the
|
|
|
|
// GenerateBuildActions method on each of the Module objects created during the
|
|
|
|
// parse phase and then on each of the registered Singleton objects.
|
|
|
|
//
|
|
|
|
// If the ResolveDependencies method has not already been called it is called
|
|
|
|
// automatically by this method.
|
|
|
|
//
|
|
|
|
// The config argument is made available to all of the Module and Singleton
|
|
|
|
// objects via the Config method on the ModuleContext and SingletonContext
|
|
|
|
// objects passed to GenerateBuildActions. It is also passed to the functions
|
|
|
|
// specified via PoolFunc, RuleFunc, and VariableFunc so that they can compute
|
|
|
|
// config-specific values.
|
2014-06-26 02:21:54 +02:00
|
|
|
//
|
|
|
|
// The returned deps is a list of the ninja files dependencies that were added
|
2015-12-19 00:18:03 +01:00
|
|
|
// by the modules and singletons via the ModuleContext.AddNinjaFileDeps(),
|
|
|
|
// SingletonContext.AddNinjaFileDeps(), and PackageContext.AddNinjaFileDeps()
|
|
|
|
// methods.
|
2021-04-01 18:27:31 +02:00
|
|
|
|
2014-06-26 02:21:54 +02:00
|
|
|
func (c *Context) PrepareBuildActions(config interface{}) (deps []string, errs []error) {
|
2022-03-25 05:56:02 +01:00
|
|
|
c.BeginEvent("prepare_build_actions")
|
|
|
|
defer c.EndEvent("prepare_build_actions")
|
2019-01-23 22:21:48 +01:00
|
|
|
pprof.Do(c.Context, pprof.Labels("blueprint", "PrepareBuildActions"), func(ctx context.Context) {
|
|
|
|
c.buildActionsReady = false
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
if !c.dependenciesReady {
|
|
|
|
var extraDeps []string
|
|
|
|
extraDeps, errs = c.resolveDependencies(ctx, config)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
deps = append(deps, extraDeps...)
|
|
|
|
}
|
|
|
|
|
|
|
|
var depsModules []string
|
|
|
|
depsModules, errs = c.generateModuleBuildActions(config, c.liveGlobals)
|
2014-05-28 01:34:41 +02:00
|
|
|
if len(errs) > 0 {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
var depsSingletons []string
|
|
|
|
depsSingletons, errs = c.generateSingletonBuildActions(config, c.singletonInfo, c.liveGlobals)
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
deps = append(deps, depsModules...)
|
|
|
|
deps = append(deps, depsSingletons...)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2021-08-26 15:08:09 +02:00
|
|
|
if c.outDir != nil {
|
|
|
|
err := c.liveGlobals.addNinjaStringDeps(c.outDir)
|
2019-01-23 22:21:48 +01:00
|
|
|
if err != nil {
|
|
|
|
errs = []error{err}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2014-06-26 02:21:54 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
pkgNames, depsPackages := c.makeUniquePackageNames(c.liveGlobals)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
deps = append(deps, depsPackages...)
|
2015-12-19 00:18:03 +01:00
|
|
|
|
2021-01-22 01:49:25 +01:00
|
|
|
c.memoizeFullNames(c.liveGlobals, pkgNames)
|
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
// This will panic if it finds a problem since it's a programming error.
|
|
|
|
c.checkForVariableReferenceCycles(c.liveGlobals.variables, pkgNames)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
c.pkgNames = pkgNames
|
|
|
|
c.globalVariables = c.liveGlobals.variables
|
|
|
|
c.globalPools = c.liveGlobals.pools
|
|
|
|
c.globalRules = c.liveGlobals.rules
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
c.buildActionsReady = true
|
|
|
|
})
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
if len(errs) > 0 {
|
|
|
|
return nil, errs
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2014-06-26 02:21:54 +02:00
|
|
|
return deps, nil
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
func (c *Context) runMutators(ctx context.Context, config interface{}) (deps []string, errs []error) {
|
|
|
|
pprof.Do(ctx, pprof.Labels("blueprint", "runMutators"), func(ctx context.Context) {
|
2022-04-19 18:12:03 +02:00
|
|
|
for _, mutator := range c.mutatorInfo {
|
2019-01-23 22:21:48 +01:00
|
|
|
pprof.Do(ctx, pprof.Labels("mutator", mutator.name), func(context.Context) {
|
2023-02-13 16:01:53 +01:00
|
|
|
c.BeginEvent(mutator.name)
|
|
|
|
defer c.EndEvent(mutator.name)
|
2019-01-23 22:21:48 +01:00
|
|
|
var newDeps []string
|
|
|
|
if mutator.topDownMutator != nil {
|
|
|
|
newDeps, errs = c.runMutator(config, mutator, topDownMutator)
|
|
|
|
} else if mutator.bottomUpMutator != nil {
|
|
|
|
newDeps, errs = c.runMutator(config, mutator, bottomUpMutator)
|
|
|
|
} else {
|
|
|
|
panic("no mutator set on " + mutator.name)
|
|
|
|
}
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
deps = append(deps, newDeps...)
|
|
|
|
})
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
2019-01-23 22:21:48 +01:00
|
|
|
})
|
|
|
|
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return nil, errs
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2017-08-01 02:26:06 +02:00
|
|
|
return deps, nil
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
type mutatorDirection interface {
|
|
|
|
run(mutator *mutatorInfo, ctx *mutatorContext)
|
|
|
|
orderer() visitOrderer
|
|
|
|
fmt.Stringer
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
type bottomUpMutatorImpl struct{}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
func (bottomUpMutatorImpl) run(mutator *mutatorInfo, ctx *mutatorContext) {
|
|
|
|
mutator.bottomUpMutator(ctx)
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
func (bottomUpMutatorImpl) orderer() visitOrderer {
|
|
|
|
return bottomUpVisitor
|
|
|
|
}
|
|
|
|
|
|
|
|
func (bottomUpMutatorImpl) String() string {
|
|
|
|
return "bottom up mutator"
|
|
|
|
}
|
|
|
|
|
|
|
|
type topDownMutatorImpl struct{}
|
|
|
|
|
|
|
|
func (topDownMutatorImpl) run(mutator *mutatorInfo, ctx *mutatorContext) {
|
|
|
|
mutator.topDownMutator(ctx)
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
func (topDownMutatorImpl) orderer() visitOrderer {
|
|
|
|
return topDownVisitor
|
|
|
|
}
|
|
|
|
|
|
|
|
func (topDownMutatorImpl) String() string {
|
|
|
|
return "top down mutator"
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
topDownMutator topDownMutatorImpl
|
|
|
|
bottomUpMutator bottomUpMutatorImpl
|
|
|
|
)
|
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
type reverseDep struct {
|
|
|
|
module *moduleInfo
|
|
|
|
dep depInfo
|
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
func (c *Context) runMutator(config interface{}, mutator *mutatorInfo,
|
2017-08-01 02:26:06 +02:00
|
|
|
direction mutatorDirection) (deps []string, errs []error) {
|
2016-08-06 07:30:44 +02:00
|
|
|
|
|
|
|
newModuleInfo := make(map[Module]*moduleInfo)
|
|
|
|
for k, v := range c.moduleInfo {
|
|
|
|
newModuleInfo[k] = v
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
type globalStateChange struct {
|
2017-07-28 23:32:36 +02:00
|
|
|
reverse []reverseDep
|
|
|
|
rename []rename
|
|
|
|
replace []replace
|
|
|
|
newModules []*moduleInfo
|
2017-08-01 02:26:06 +02:00
|
|
|
deps []string
|
2016-12-09 19:29:05 +01:00
|
|
|
}
|
|
|
|
|
2016-04-12 00:47:28 +02:00
|
|
|
reverseDeps := make(map[*moduleInfo][]depInfo)
|
2016-12-09 19:29:05 +01:00
|
|
|
var rename []rename
|
|
|
|
var replace []replace
|
2017-07-28 23:32:36 +02:00
|
|
|
var newModules []*moduleInfo
|
2015-11-04 01:41:29 +01:00
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
errsCh := make(chan []error)
|
2016-12-09 19:29:05 +01:00
|
|
|
globalStateCh := make(chan globalStateChange)
|
2020-08-25 01:18:21 +02:00
|
|
|
newVariationsCh := make(chan modulesOrAliases)
|
2016-08-06 07:30:44 +02:00
|
|
|
done := make(chan bool)
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
c.depsModified = 0
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
visit := func(module *moduleInfo, pause chan<- pauseSpec) bool {
|
2015-04-15 05:28:10 +02:00
|
|
|
if module.splitModules != nil {
|
|
|
|
panic("split module found in sorted module list")
|
|
|
|
}
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
mctx := &mutatorContext{
|
|
|
|
baseModuleContext: baseModuleContext{
|
|
|
|
context: c,
|
|
|
|
config: config,
|
|
|
|
module: module,
|
|
|
|
},
|
2020-08-26 02:12:59 +02:00
|
|
|
name: mutator.name,
|
|
|
|
pauseCh: pause,
|
2015-03-11 23:43:52 +01:00
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
module.startedMutator = mutator
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
func() {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
2016-08-11 20:09:00 +02:00
|
|
|
in := fmt.Sprintf("%s %q for %s", direction, mutator.name, module)
|
2016-01-07 22:43:09 +01:00
|
|
|
if err, ok := r.(panicError); ok {
|
|
|
|
err.addIn(in)
|
|
|
|
mctx.error(err)
|
|
|
|
} else {
|
|
|
|
mctx.error(newPanicErrorf(r, in))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
2016-08-11 20:09:00 +02:00
|
|
|
direction.run(mutator, mctx)
|
2016-01-07 22:43:09 +01:00
|
|
|
}()
|
2016-08-06 07:30:44 +02:00
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
module.finishedMutator = mutator
|
|
|
|
|
2015-03-11 23:43:52 +01:00
|
|
|
if len(mctx.errs) > 0 {
|
2016-08-12 00:37:45 +02:00
|
|
|
errsCh <- mctx.errs
|
2016-08-06 07:30:44 +02:00
|
|
|
return true
|
2015-03-11 23:43:52 +01:00
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2017-07-29 00:22:46 +02:00
|
|
|
if len(mctx.newVariations) > 0 {
|
|
|
|
newVariationsCh <- mctx.newVariations
|
2015-03-11 23:43:52 +01:00
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2020-03-03 23:23:27 +01:00
|
|
|
if len(mctx.reverseDeps) > 0 || len(mctx.replace) > 0 || len(mctx.rename) > 0 || len(mctx.newModules) > 0 || len(mctx.ninjaFileDeps) > 0 {
|
2016-12-09 19:29:05 +01:00
|
|
|
globalStateCh <- globalStateChange{
|
2017-07-28 23:32:36 +02:00
|
|
|
reverse: mctx.reverseDeps,
|
|
|
|
replace: mctx.replace,
|
|
|
|
rename: mctx.rename,
|
|
|
|
newModules: mctx.newModules,
|
2017-08-01 02:26:06 +02:00
|
|
|
deps: mctx.ninjaFileDeps,
|
2016-12-09 19:29:05 +01:00
|
|
|
}
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// Process errs and reverseDeps in a single goroutine
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case newErrs := <-errsCh:
|
|
|
|
errs = append(errs, newErrs...)
|
2016-12-09 19:29:05 +01:00
|
|
|
case globalStateChange := <-globalStateCh:
|
|
|
|
for _, r := range globalStateChange.reverse {
|
2016-08-06 07:30:44 +02:00
|
|
|
reverseDeps[r.module] = append(reverseDeps[r.module], r.dep)
|
|
|
|
}
|
2016-12-09 19:29:05 +01:00
|
|
|
replace = append(replace, globalStateChange.replace...)
|
|
|
|
rename = append(rename, globalStateChange.rename...)
|
2017-07-28 23:32:36 +02:00
|
|
|
newModules = append(newModules, globalStateChange.newModules...)
|
2017-08-01 02:26:06 +02:00
|
|
|
deps = append(deps, globalStateChange.deps...)
|
2017-07-29 00:22:46 +02:00
|
|
|
case newVariations := <-newVariationsCh:
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range newVariations {
|
|
|
|
if m := moduleOrAlias.module(); m != nil {
|
|
|
|
newModuleInfo[m.logicModule] = m
|
|
|
|
}
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
case <-done:
|
|
|
|
return
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
2016-08-06 07:30:44 +02:00
|
|
|
}()
|
2014-12-19 01:28:54 +01:00
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
c.startedMutator = mutator
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
var visitErrs []error
|
2016-08-06 07:30:44 +02:00
|
|
|
if mutator.parallel {
|
2020-08-26 02:12:59 +02:00
|
|
|
visitErrs = parallelVisit(c.modulesSorted, direction.orderer(), parallelVisitLimit, visit)
|
2016-08-06 07:30:44 +02:00
|
|
|
} else {
|
2016-08-11 20:09:00 +02:00
|
|
|
direction.orderer().visit(c.modulesSorted, visit)
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
if len(visitErrs) > 0 {
|
|
|
|
return nil, visitErrs
|
|
|
|
}
|
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
c.finishedMutators[mutator] = true
|
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
done <- true
|
|
|
|
|
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
c.moduleInfo = newModuleInfo
|
|
|
|
|
|
|
|
for _, group := range c.moduleGroups {
|
|
|
|
for i := 0; i < len(group.modules); i++ {
|
2020-08-25 01:18:21 +02:00
|
|
|
module := group.modules[i].module()
|
|
|
|
if module == nil {
|
|
|
|
// Existing alias, skip it
|
|
|
|
continue
|
|
|
|
}
|
2016-08-06 07:30:44 +02:00
|
|
|
|
|
|
|
// Update module group to contain newly split variants
|
|
|
|
if module.splitModules != nil {
|
|
|
|
group.modules, i = spliceModules(group.modules, i, module.splitModules)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fix up any remaining dependencies on modules that were split into variants
|
|
|
|
// by replacing them with the first variant
|
|
|
|
for j, dep := range module.directDeps {
|
|
|
|
if dep.module.logicModule == nil {
|
2020-08-25 01:18:21 +02:00
|
|
|
module.directDeps[j].module = dep.module.splitModules.firstModule()
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
|
|
|
}
|
2019-03-30 00:35:02 +01:00
|
|
|
|
2019-05-20 22:55:14 +02:00
|
|
|
if module.createdBy != nil && module.createdBy.logicModule == nil {
|
2020-08-25 01:18:21 +02:00
|
|
|
module.createdBy = module.createdBy.splitModules.firstModule()
|
2019-05-20 22:55:14 +02:00
|
|
|
}
|
|
|
|
|
2019-03-30 00:35:02 +01:00
|
|
|
// Add in any new direct dependencies that were added by the mutator
|
|
|
|
module.directDeps = append(module.directDeps, module.newDirectDeps...)
|
|
|
|
module.newDirectDeps = nil
|
2016-08-06 07:30:44 +02:00
|
|
|
}
|
2019-11-14 05:11:14 +01:00
|
|
|
|
2020-08-13 21:11:52 +02:00
|
|
|
findAliasTarget := func(variant variant) *moduleInfo {
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range group.modules {
|
|
|
|
if alias := moduleOrAlias.alias(); alias != nil {
|
|
|
|
if alias.variant.variations.equal(variant.variations) {
|
|
|
|
return alias.target
|
|
|
|
}
|
2020-08-13 21:11:52 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-11-14 05:11:14 +01:00
|
|
|
// Forward or delete any dangling aliases.
|
2020-08-25 01:18:21 +02:00
|
|
|
// Use a manual loop instead of range because len(group.modules) can
|
|
|
|
// change inside the loop
|
|
|
|
for i := 0; i < len(group.modules); i++ {
|
|
|
|
if alias := group.modules[i].alias(); alias != nil {
|
|
|
|
if alias.target.logicModule == nil {
|
|
|
|
newTarget := findAliasTarget(alias.target.variant)
|
|
|
|
if newTarget != nil {
|
|
|
|
alias.target = newTarget
|
|
|
|
} else {
|
|
|
|
// The alias was left dangling, remove it.
|
|
|
|
group.modules = append(group.modules[:i], group.modules[i+1:]...)
|
|
|
|
i--
|
|
|
|
}
|
2019-11-14 05:11:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2019-03-30 00:35:02 +01:00
|
|
|
// Add in any new reverse dependencies that were added by the mutator
|
2015-11-04 01:41:29 +01:00
|
|
|
for module, deps := range reverseDeps {
|
2016-04-12 00:47:28 +02:00
|
|
|
sort.Sort(depSorter(deps))
|
2015-11-04 01:41:29 +01:00
|
|
|
module.directDeps = append(module.directDeps, deps...)
|
2016-08-11 20:09:00 +02:00
|
|
|
c.depsModified++
|
2015-11-04 01:41:29 +01:00
|
|
|
}
|
|
|
|
|
2017-07-28 23:32:36 +02:00
|
|
|
for _, module := range newModules {
|
|
|
|
errs = c.addModule(module)
|
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2017-07-28 23:32:36 +02:00
|
|
|
}
|
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
|
|
|
}
|
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
errs = c.handleRenames(rename)
|
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2016-12-09 19:29:05 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
errs = c.handleReplacements(replace)
|
2016-10-12 19:45:05 +02:00
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2016-10-12 19:45:05 +02:00
|
|
|
}
|
|
|
|
|
2016-08-11 20:09:00 +02:00
|
|
|
if c.depsModified > 0 {
|
|
|
|
errs = c.updateDependencies()
|
|
|
|
if len(errs) > 0 {
|
2017-08-01 02:26:06 +02:00
|
|
|
return nil, errs
|
2016-08-11 20:09:00 +02:00
|
|
|
}
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2017-08-01 02:26:06 +02:00
|
|
|
return deps, errs
|
2014-12-19 01:28:54 +01:00
|
|
|
}
|
|
|
|
|
2016-04-12 00:41:52 +02:00
|
|
|
// Replaces every build logic module with a clone of itself. Prevents introducing problems where
|
|
|
|
// a mutator sets a non-property member variable on a module, which works until a later mutator
|
|
|
|
// creates variants of that module.
|
|
|
|
func (c *Context) cloneModules() {
|
2016-08-09 23:21:02 +02:00
|
|
|
type update struct {
|
|
|
|
orig Module
|
|
|
|
clone *moduleInfo
|
|
|
|
}
|
2018-03-28 20:50:12 +02:00
|
|
|
ch := make(chan update)
|
|
|
|
doneCh := make(chan bool)
|
|
|
|
go func() {
|
2020-08-26 02:12:59 +02:00
|
|
|
errs := parallelVisit(c.modulesSorted, unorderedVisitorImpl{}, parallelVisitLimit,
|
|
|
|
func(m *moduleInfo, pause chan<- pauseSpec) bool {
|
|
|
|
origLogicModule := m.logicModule
|
|
|
|
m.logicModule, m.properties = c.cloneLogicModule(m)
|
|
|
|
ch <- update{origLogicModule, m}
|
|
|
|
return false
|
|
|
|
})
|
|
|
|
if len(errs) > 0 {
|
|
|
|
panic(errs)
|
|
|
|
}
|
2018-03-28 20:50:12 +02:00
|
|
|
doneCh <- true
|
|
|
|
}()
|
2016-08-09 23:21:02 +02:00
|
|
|
|
2018-03-28 20:50:12 +02:00
|
|
|
done := false
|
|
|
|
for !done {
|
|
|
|
select {
|
|
|
|
case <-doneCh:
|
|
|
|
done = true
|
|
|
|
case update := <-ch:
|
|
|
|
delete(c.moduleInfo, update.orig)
|
|
|
|
c.moduleInfo[update.clone.logicModule] = update.clone
|
|
|
|
}
|
2016-04-12 00:41:52 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
// Removes modules[i] from the list and inserts newModules... where it was located, returning
|
|
|
|
// the new slice and the index of the last inserted element
|
2020-08-25 01:18:21 +02:00
|
|
|
func spliceModules(modules modulesOrAliases, i int, newModules modulesOrAliases) (modulesOrAliases, int) {
|
2015-03-11 23:43:52 +01:00
|
|
|
spliceSize := len(newModules)
|
|
|
|
newLen := len(modules) + spliceSize - 1
|
2020-08-25 01:18:21 +02:00
|
|
|
var dest modulesOrAliases
|
2015-03-11 23:43:52 +01:00
|
|
|
if cap(modules) >= len(modules)-1+len(newModules) {
|
|
|
|
// We can fit the splice in the existing capacity, do everything in place
|
|
|
|
dest = modules[:newLen]
|
|
|
|
} else {
|
2020-08-25 01:18:21 +02:00
|
|
|
dest = make(modulesOrAliases, newLen)
|
2015-03-11 23:43:52 +01:00
|
|
|
copy(dest, modules[:i])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Move the end of the slice over by spliceSize-1
|
2015-03-16 08:13:59 +01:00
|
|
|
copy(dest[i+spliceSize:], modules[i+1:])
|
2015-03-11 23:43:52 +01:00
|
|
|
|
|
|
|
// Copy the new modules into the slice
|
2015-03-16 08:13:59 +01:00
|
|
|
copy(dest[i:], newModules)
|
2015-03-11 23:43:52 +01:00
|
|
|
|
2016-08-06 07:30:44 +02:00
|
|
|
return dest, i + spliceSize - 1
|
2015-03-11 23:43:52 +01:00
|
|
|
}
|
|
|
|
|
2014-06-12 03:31:16 +02:00
|
|
|
func (c *Context) generateModuleBuildActions(config interface{},
|
2014-06-26 02:21:54 +02:00
|
|
|
liveGlobals *liveTracker) ([]string, []error) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
c.BeginEvent("generateModuleBuildActions")
|
|
|
|
defer c.EndEvent("generateModuleBuildActions")
|
2014-06-26 02:21:54 +02:00
|
|
|
var deps []string
|
2014-05-28 01:34:41 +02:00
|
|
|
var errs []error
|
|
|
|
|
2015-01-08 03:08:56 +01:00
|
|
|
cancelCh := make(chan struct{})
|
|
|
|
errsCh := make(chan []error)
|
|
|
|
depsCh := make(chan []string)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-cancelCh:
|
|
|
|
close(cancelCh)
|
|
|
|
return
|
|
|
|
case newErrs := <-errsCh:
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
case newDeps := <-depsCh:
|
|
|
|
deps = append(deps, newDeps...)
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
visitErrs := parallelVisit(c.modulesSorted, bottomUpVisitor, parallelVisitLimit,
|
|
|
|
func(module *moduleInfo, pause chan<- pauseSpec) bool {
|
|
|
|
uniqueName := c.nameInterface.UniqueName(newNamespaceContext(module), module.group.name)
|
|
|
|
sanitizedName := toNinjaName(uniqueName)
|
2021-12-21 08:50:57 +01:00
|
|
|
sanitizedVariant := toNinjaName(module.variant.name)
|
2020-08-26 02:12:59 +02:00
|
|
|
|
2021-12-21 08:50:57 +01:00
|
|
|
prefix := moduleNamespacePrefix(sanitizedName + "_" + sanitizedVariant)
|
2020-08-26 02:12:59 +02:00
|
|
|
|
|
|
|
// The parent scope of the moduleContext's local scope gets overridden to be that of the
|
|
|
|
// calling Go package on a per-call basis. Since the initial parent scope doesn't matter we
|
|
|
|
// just set it to nil.
|
|
|
|
scope := newLocalScope(nil, prefix)
|
|
|
|
|
|
|
|
mctx := &moduleContext{
|
|
|
|
baseModuleContext: baseModuleContext{
|
|
|
|
context: c,
|
|
|
|
config: config,
|
|
|
|
module: module,
|
|
|
|
},
|
|
|
|
scope: scope,
|
|
|
|
handledMissingDeps: module.missingDeps == nil,
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
mctx.module.startedGenerateBuildActions = true
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
func() {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
in := fmt.Sprintf("GenerateBuildActions for %s", module)
|
|
|
|
if err, ok := r.(panicError); ok {
|
|
|
|
err.addIn(in)
|
|
|
|
mctx.error(err)
|
|
|
|
} else {
|
|
|
|
mctx.error(newPanicErrorf(r, in))
|
|
|
|
}
|
2016-01-07 22:43:09 +01:00
|
|
|
}
|
2020-08-26 02:12:59 +02:00
|
|
|
}()
|
|
|
|
mctx.module.logicModule.GenerateBuildActions(mctx)
|
2016-01-07 22:43:09 +01:00
|
|
|
}()
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
mctx.module.finishedGenerateBuildActions = true
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
if len(mctx.errs) > 0 {
|
|
|
|
errsCh <- mctx.errs
|
|
|
|
return true
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
if module.missingDeps != nil && !mctx.handledMissingDeps {
|
|
|
|
var errs []error
|
|
|
|
for _, depName := range module.missingDeps {
|
|
|
|
errs = append(errs, c.missingDependencyError(module, depName))
|
|
|
|
}
|
|
|
|
errsCh <- errs
|
|
|
|
return true
|
2015-12-18 00:49:30 +01:00
|
|
|
}
|
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
depsCh <- mctx.ninjaFileDeps
|
2014-06-26 02:21:54 +02:00
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
newErrs := c.processLocalBuildActions(&module.actionDefs,
|
|
|
|
&mctx.actionDefs, liveGlobals)
|
|
|
|
if len(newErrs) > 0 {
|
|
|
|
errsCh <- newErrs
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
})
|
2015-01-08 03:08:56 +01:00
|
|
|
|
|
|
|
cancelCh <- struct{}{}
|
|
|
|
<-cancelCh
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2020-08-26 02:12:59 +02:00
|
|
|
errs = append(errs, visitErrs...)
|
|
|
|
|
2014-06-26 02:21:54 +02:00
|
|
|
return deps, errs
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
func (c *Context) generateOneSingletonBuildActions(config interface{},
|
|
|
|
info *singletonInfo, liveGlobals *liveTracker) ([]string, []error) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2014-06-26 02:21:54 +02:00
|
|
|
var deps []string
|
2014-05-28 01:34:41 +02:00
|
|
|
var errs []error
|
2014-06-26 02:21:54 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
// The parent scope of the singletonContext's local scope gets overridden to be that of the
|
|
|
|
// calling Go package on a per-call basis. Since the initial parent scope doesn't matter we
|
|
|
|
// just set it to nil.
|
|
|
|
scope := newLocalScope(nil, singletonNamespacePrefix(info.name))
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
sctx := &singletonContext{
|
|
|
|
name: info.name,
|
|
|
|
context: c,
|
|
|
|
config: config,
|
|
|
|
scope: scope,
|
|
|
|
globals: liveGlobals,
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
func() {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
in := fmt.Sprintf("GenerateBuildActions for singleton %s", info.name)
|
|
|
|
if err, ok := r.(panicError); ok {
|
|
|
|
err.addIn(in)
|
|
|
|
sctx.error(err)
|
|
|
|
} else {
|
|
|
|
sctx.error(newPanicErrorf(r, in))
|
2016-01-07 22:43:09 +01:00
|
|
|
}
|
2023-05-15 22:59:49 +02:00
|
|
|
}
|
2016-01-07 22:43:09 +01:00
|
|
|
}()
|
2023-05-15 22:59:49 +02:00
|
|
|
info.singleton.GenerateBuildActions(sctx)
|
|
|
|
}()
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
if len(sctx.errs) > 0 {
|
|
|
|
errs = append(errs, sctx.errs...)
|
|
|
|
return deps, errs
|
|
|
|
}
|
|
|
|
|
|
|
|
deps = append(deps, sctx.ninjaFileDeps...)
|
|
|
|
|
|
|
|
newErrs := c.processLocalBuildActions(&info.actionDefs,
|
|
|
|
&sctx.actionDefs, liveGlobals)
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
return deps, errs
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) generateParallelSingletonBuildActions(config interface{},
|
|
|
|
singletons []*singletonInfo, liveGlobals *liveTracker) ([]string, []error) {
|
|
|
|
|
|
|
|
c.BeginEvent("generateParallelSingletonBuildActions")
|
|
|
|
defer c.EndEvent("generateParallelSingletonBuildActions")
|
|
|
|
|
|
|
|
var deps []string
|
|
|
|
var errs []error
|
|
|
|
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
cancelCh := make(chan struct{})
|
|
|
|
depsCh := make(chan []string)
|
|
|
|
errsCh := make(chan []error)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-cancelCh:
|
|
|
|
close(cancelCh)
|
|
|
|
return
|
|
|
|
case dep := <-depsCh:
|
|
|
|
deps = append(deps, dep...)
|
|
|
|
case newErrs := <-errsCh:
|
|
|
|
if len(errs) <= maxErrors {
|
|
|
|
errs = append(errs, newErrs...)
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2023-05-15 22:59:49 +02:00
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
for _, info := range singletons {
|
|
|
|
if !info.parallel {
|
|
|
|
// Skip any singletons registered with parallel=false.
|
2014-05-28 01:34:41 +02:00
|
|
|
continue
|
|
|
|
}
|
2023-05-15 22:59:49 +02:00
|
|
|
wg.Add(1)
|
|
|
|
go func(inf *singletonInfo) {
|
|
|
|
defer wg.Done()
|
|
|
|
newDeps, newErrs := c.generateOneSingletonBuildActions(config, inf, liveGlobals)
|
|
|
|
depsCh <- newDeps
|
|
|
|
errsCh <- newErrs
|
|
|
|
}(info)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
cancelCh <- struct{}{}
|
|
|
|
<-cancelCh
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
return deps, errs
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) generateSingletonBuildActions(config interface{},
|
|
|
|
singletons []*singletonInfo, liveGlobals *liveTracker) ([]string, []error) {
|
|
|
|
|
|
|
|
c.BeginEvent("generateSingletonBuildActions")
|
|
|
|
defer c.EndEvent("generateSingletonBuildActions")
|
|
|
|
|
|
|
|
var deps []string
|
|
|
|
var errs []error
|
2014-06-26 02:21:54 +02:00
|
|
|
|
2023-05-15 22:59:49 +02:00
|
|
|
// Run one singleton. Use a variable to simplify manual validation testing.
|
|
|
|
var runSingleton = func(info *singletonInfo) {
|
|
|
|
c.BeginEvent("singleton:" + info.name)
|
|
|
|
defer c.EndEvent("singleton:" + info.name)
|
|
|
|
newDeps, newErrs := c.generateOneSingletonBuildActions(config, info, liveGlobals)
|
|
|
|
deps = append(deps, newDeps...)
|
2014-05-28 01:34:41 +02:00
|
|
|
errs = append(errs, newErrs...)
|
2023-05-15 22:59:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// First, take care of any singletons that want to run in parallel.
|
|
|
|
deps, errs = c.generateParallelSingletonBuildActions(config, singletons, liveGlobals)
|
|
|
|
|
|
|
|
for _, info := range singletons {
|
|
|
|
if !info.parallel {
|
|
|
|
runSingleton(info)
|
|
|
|
if len(errs) > maxErrors {
|
|
|
|
break
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-26 02:21:54 +02:00
|
|
|
return deps, errs
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) processLocalBuildActions(out, in *localBuildActions,
|
|
|
|
liveGlobals *liveTracker) []error {
|
|
|
|
|
|
|
|
var errs []error
|
|
|
|
|
|
|
|
// First we go through and add everything referenced by the module's
|
|
|
|
// buildDefs to the live globals set. This will end up adding the live
|
|
|
|
// locals to the set as well, but we'll take them out after.
|
|
|
|
for _, def := range in.buildDefs {
|
|
|
|
err := liveGlobals.AddBuildDefDeps(def)
|
|
|
|
if err != nil {
|
|
|
|
errs = append(errs, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(errs) > 0 {
|
|
|
|
return errs
|
|
|
|
}
|
|
|
|
|
2014-12-19 01:28:54 +01:00
|
|
|
out.buildDefs = append(out.buildDefs, in.buildDefs...)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
|
|
|
// We use the now-incorrect set of live "globals" to determine which local
|
|
|
|
// definitions are live. As we go through copying those live locals to the
|
2014-12-19 01:28:54 +01:00
|
|
|
// moduleGroup we remove them from the live globals set.
|
2014-05-28 01:34:41 +02:00
|
|
|
for _, v := range in.variables {
|
2015-03-12 00:17:52 +01:00
|
|
|
isLive := liveGlobals.RemoveVariableIfLive(v)
|
2014-05-28 01:34:41 +02:00
|
|
|
if isLive {
|
|
|
|
out.variables = append(out.variables, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, r := range in.rules {
|
2015-03-12 00:17:52 +01:00
|
|
|
isLive := liveGlobals.RemoveRuleIfLive(r)
|
2014-05-28 01:34:41 +02:00
|
|
|
if isLive {
|
|
|
|
out.rules = append(out.rules, r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-06-20 20:16:37 +02:00
|
|
|
func (c *Context) walkDeps(topModule *moduleInfo, allowDuplicates bool,
|
2016-08-07 07:52:01 +02:00
|
|
|
visitDown func(depInfo, *moduleInfo) bool, visitUp func(depInfo, *moduleInfo)) {
|
2015-10-06 23:03:27 +02:00
|
|
|
|
|
|
|
visited := make(map[*moduleInfo]bool)
|
2016-01-07 22:43:09 +01:00
|
|
|
var visiting *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
2016-08-07 07:52:01 +02:00
|
|
|
panic(newPanicErrorf(r, "WalkDeps(%s, %s, %s) for dependency %s",
|
|
|
|
topModule, funcName(visitDown), funcName(visitUp), visiting))
|
2016-01-07 22:43:09 +01:00
|
|
|
}
|
|
|
|
}()
|
2015-10-06 23:03:27 +02:00
|
|
|
|
|
|
|
var walk func(module *moduleInfo)
|
|
|
|
walk = func(module *moduleInfo) {
|
2016-04-12 00:47:28 +02:00
|
|
|
for _, dep := range module.directDeps {
|
2018-06-20 20:16:37 +02:00
|
|
|
if allowDuplicates || !visited[dep.module] {
|
2016-04-12 00:47:28 +02:00
|
|
|
visiting = dep.module
|
2016-08-07 07:52:01 +02:00
|
|
|
recurse := true
|
|
|
|
if visitDown != nil {
|
|
|
|
recurse = visitDown(dep, module)
|
|
|
|
}
|
2018-06-21 22:31:53 +02:00
|
|
|
if recurse && !visited[dep.module] {
|
2016-04-12 00:47:28 +02:00
|
|
|
walk(dep.module)
|
2020-04-02 11:51:33 +02:00
|
|
|
visited[dep.module] = true
|
2015-10-06 23:03:27 +02:00
|
|
|
}
|
2016-08-07 07:52:01 +02:00
|
|
|
if visitUp != nil {
|
|
|
|
visitUp(dep, module)
|
|
|
|
}
|
2015-10-06 23:03:27 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
walk(topModule)
|
|
|
|
}
|
|
|
|
|
2016-10-11 18:58:53 +02:00
|
|
|
type replace struct {
|
2020-06-30 13:15:26 +02:00
|
|
|
from, to *moduleInfo
|
|
|
|
predicate ReplaceDependencyPredicate
|
2016-10-11 18:58:53 +02:00
|
|
|
}
|
|
|
|
|
2016-10-12 19:45:05 +02:00
|
|
|
type rename struct {
|
|
|
|
group *moduleGroup
|
|
|
|
name string
|
|
|
|
}
|
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
func (c *Context) moduleMatchingVariant(module *moduleInfo, name string) *moduleInfo {
|
2019-11-14 05:10:12 +01:00
|
|
|
group := c.moduleGroupFromName(name, module.namespace())
|
2016-10-11 18:58:53 +02:00
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
if group == nil {
|
2016-12-09 19:29:05 +01:00
|
|
|
return nil
|
2016-10-11 18:58:53 +02:00
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
for _, m := range group.modules {
|
2020-09-12 04:22:27 +02:00
|
|
|
if module.variant.name == m.moduleOrAliasVariant().name {
|
2020-08-25 01:18:21 +02:00
|
|
|
return m.moduleOrAliasTarget()
|
2019-11-14 05:11:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
return nil
|
2016-10-11 18:58:53 +02:00
|
|
|
}
|
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
func (c *Context) handleRenames(renames []rename) []error {
|
2016-10-12 19:45:05 +02:00
|
|
|
var errs []error
|
2016-12-09 19:29:05 +01:00
|
|
|
for _, rename := range renames {
|
2016-10-12 19:45:05 +02:00
|
|
|
group, name := rename.group, rename.name
|
2017-11-11 00:12:08 +01:00
|
|
|
if name == group.name || len(group.modules) < 1 {
|
2016-10-12 19:45:05 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
errs = append(errs, c.nameInterface.Rename(group.name, rename.name, group.namespace)...)
|
2016-10-12 19:45:05 +02:00
|
|
|
}
|
|
|
|
|
2016-12-09 19:29:05 +01:00
|
|
|
return errs
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) handleReplacements(replacements []replace) []error {
|
|
|
|
var errs []error
|
2020-06-30 13:15:26 +02:00
|
|
|
changedDeps := false
|
2016-12-09 19:29:05 +01:00
|
|
|
for _, replace := range replacements {
|
2016-10-11 18:58:53 +02:00
|
|
|
for _, m := range replace.from.reverseDeps {
|
|
|
|
for i, d := range m.directDeps {
|
|
|
|
if d.module == replace.from {
|
2020-06-30 13:15:26 +02:00
|
|
|
// If the replacement has a predicate then check it.
|
|
|
|
if replace.predicate == nil || replace.predicate(m.logicModule, d.tag, d.module.logicModule) {
|
|
|
|
m.directDeps[i].module = replace.to
|
|
|
|
changedDeps = true
|
|
|
|
}
|
2016-10-11 18:58:53 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2016-12-09 19:29:05 +01:00
|
|
|
|
2020-06-30 13:15:26 +02:00
|
|
|
if changedDeps {
|
|
|
|
atomic.AddUint32(&c.depsModified, 1)
|
|
|
|
}
|
2016-10-12 19:45:05 +02:00
|
|
|
return errs
|
|
|
|
}
|
2016-01-07 22:43:09 +01:00
|
|
|
|
2020-11-16 21:15:36 +01:00
|
|
|
func (c *Context) discoveredMissingDependencies(module *moduleInfo, depName string, depVariations variationMap) (errs []error) {
|
|
|
|
if depVariations != nil {
|
|
|
|
depName = depName + "{" + c.prettyPrintVariant(depVariations) + "}"
|
|
|
|
}
|
2017-11-11 00:12:08 +01:00
|
|
|
if c.allowMissingDependencies {
|
|
|
|
module.missingDeps = append(module.missingDeps, depName)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return []error{c.missingDependencyError(module, depName)}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) missingDependencyError(module *moduleInfo, depName string) (errs error) {
|
2023-03-31 22:49:09 +02:00
|
|
|
guess := namesLike(depName, module.Name(), c.moduleGroups)
|
|
|
|
err := c.nameInterface.MissingDependencyError(module.Name(), module.namespace(), depName, guess)
|
2017-11-11 00:12:08 +01:00
|
|
|
return &BlueprintError{
|
|
|
|
Err: err,
|
|
|
|
Pos: module.pos,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-14 05:10:12 +01:00
|
|
|
func (c *Context) moduleGroupFromName(name string, namespace Namespace) *moduleGroup {
|
2017-11-11 00:12:08 +01:00
|
|
|
group, exists := c.nameInterface.ModuleFromName(name, namespace)
|
|
|
|
if exists {
|
2019-11-14 05:10:12 +01:00
|
|
|
return group.moduleGroup
|
2016-05-17 23:58:05 +02:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
func (c *Context) sortedModuleGroups() []*moduleGroup {
|
2020-12-01 00:30:45 +01:00
|
|
|
if c.cachedSortedModuleGroups == nil || c.cachedDepsModified {
|
2017-11-11 00:12:08 +01:00
|
|
|
unwrap := func(wrappers []ModuleGroup) []*moduleGroup {
|
|
|
|
result := make([]*moduleGroup, 0, len(wrappers))
|
|
|
|
for _, group := range wrappers {
|
|
|
|
result = append(result, group.moduleGroup)
|
|
|
|
}
|
|
|
|
return result
|
2014-09-25 05:26:52 +02:00
|
|
|
}
|
2017-11-11 00:12:08 +01:00
|
|
|
|
|
|
|
c.cachedSortedModuleGroups = unwrap(c.nameInterface.AllModules())
|
2020-12-01 00:30:45 +01:00
|
|
|
c.cachedDepsModified = false
|
2014-09-25 05:26:52 +02:00
|
|
|
}
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
return c.cachedSortedModuleGroups
|
2014-09-25 05:26:52 +02:00
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) visitAllModules(visit func(Module)) {
|
2016-01-07 22:43:09 +01:00
|
|
|
var module *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitAllModules(%s) for %s",
|
|
|
|
funcName(visit), module))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
for _, moduleGroup := range c.sortedModuleGroups() {
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range moduleGroup.modules {
|
|
|
|
if module = moduleOrAlias.module(); module != nil {
|
|
|
|
visit(module.logicModule)
|
|
|
|
}
|
2014-12-18 01:12:41 +01:00
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) visitAllModulesIf(pred func(Module) bool,
|
|
|
|
visit func(Module)) {
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
var module *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitAllModulesIf(%s, %s) for %s",
|
|
|
|
funcName(pred), funcName(visit), module))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2017-11-11 00:12:08 +01:00
|
|
|
for _, moduleGroup := range c.sortedModuleGroups() {
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range moduleGroup.modules {
|
|
|
|
if module = moduleOrAlias.module(); module != nil {
|
|
|
|
if pred(module.logicModule) {
|
|
|
|
visit(module.logicModule)
|
|
|
|
}
|
2014-12-18 01:12:41 +01:00
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
func (c *Context) visitAllModuleVariants(module *moduleInfo,
|
|
|
|
visit func(Module)) {
|
|
|
|
|
|
|
|
var variant *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitAllModuleVariants(%s, %s) for %s",
|
|
|
|
module, funcName(visit), variant))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
for _, moduleOrAlias := range module.group.modules {
|
|
|
|
if variant = moduleOrAlias.module(); variant != nil {
|
|
|
|
visit(variant.logicModule)
|
|
|
|
}
|
2016-01-07 22:43:09 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) requireNinjaVersion(major, minor, micro int) {
|
|
|
|
if major != 1 {
|
|
|
|
panic("ninja version with major version != 1 not supported")
|
|
|
|
}
|
|
|
|
if c.requiredNinjaMinor < minor {
|
|
|
|
c.requiredNinjaMinor = minor
|
|
|
|
c.requiredNinjaMicro = micro
|
|
|
|
}
|
|
|
|
if c.requiredNinjaMinor == minor && c.requiredNinjaMicro < micro {
|
|
|
|
c.requiredNinjaMicro = micro
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:08:09 +02:00
|
|
|
func (c *Context) setOutDir(value ninjaString) {
|
|
|
|
if c.outDir == nil {
|
|
|
|
c.outDir = value
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) makeUniquePackageNames(
|
2015-12-19 00:18:03 +01:00
|
|
|
liveGlobals *liveTracker) (map[*packageContext]string, []string) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-11-26 00:29:32 +01:00
|
|
|
pkgs := make(map[string]*packageContext)
|
|
|
|
pkgNames := make(map[*packageContext]string)
|
|
|
|
longPkgNames := make(map[*packageContext]bool)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2015-11-26 00:29:32 +01:00
|
|
|
processPackage := func(pctx *packageContext) {
|
2014-10-03 11:49:58 +02:00
|
|
|
if pctx == nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
// This is a built-in rule and has no package.
|
|
|
|
return
|
|
|
|
}
|
2014-10-03 11:49:58 +02:00
|
|
|
if _, ok := pkgNames[pctx]; ok {
|
2014-05-28 01:34:41 +02:00
|
|
|
// We've already processed this package.
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-03 11:49:58 +02:00
|
|
|
otherPkg, present := pkgs[pctx.shortName]
|
2014-05-28 01:34:41 +02:00
|
|
|
if present {
|
|
|
|
// Short name collision. Both this package and the one that's
|
|
|
|
// already there need to use their full names. We leave the short
|
|
|
|
// name in pkgNames for now so future collisions still get caught.
|
2014-10-03 11:49:58 +02:00
|
|
|
longPkgNames[pctx] = true
|
2014-05-28 01:34:41 +02:00
|
|
|
longPkgNames[otherPkg] = true
|
|
|
|
} else {
|
|
|
|
// No collision so far. Tentatively set the package's name to be
|
|
|
|
// its short name.
|
2014-10-03 11:49:58 +02:00
|
|
|
pkgNames[pctx] = pctx.shortName
|
2015-04-15 03:02:20 +02:00
|
|
|
pkgs[pctx.shortName] = pctx
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// We try to give all packages their short name, but when we get collisions
|
|
|
|
// we need to use the full unique package name.
|
|
|
|
for v, _ := range liveGlobals.variables {
|
2014-10-03 11:49:58 +02:00
|
|
|
processPackage(v.packageContext())
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
for p, _ := range liveGlobals.pools {
|
2014-10-03 11:49:58 +02:00
|
|
|
processPackage(p.packageContext())
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
for r, _ := range liveGlobals.rules {
|
2014-10-03 11:49:58 +02:00
|
|
|
processPackage(r.packageContext())
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Add the packages that had collisions using their full unique names. This
|
|
|
|
// will overwrite any short names that were added in the previous step.
|
2014-10-03 11:49:58 +02:00
|
|
|
for pctx := range longPkgNames {
|
|
|
|
pkgNames[pctx] = pctx.fullName
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2015-12-19 00:18:03 +01:00
|
|
|
// Create deps list from calls to PackageContext.AddNinjaFileDeps
|
|
|
|
deps := []string{}
|
|
|
|
for _, pkg := range pkgs {
|
|
|
|
deps = append(deps, pkg.ninjaFileDeps...)
|
|
|
|
}
|
|
|
|
|
|
|
|
return pkgNames, deps
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2021-01-22 01:49:25 +01:00
|
|
|
// memoizeFullNames stores the full name of each live global variable, rule and pool since each is
|
|
|
|
// guaranteed to be used at least twice, once in the definition and once for each usage, and many
|
|
|
|
// are used much more than once.
|
|
|
|
func (c *Context) memoizeFullNames(liveGlobals *liveTracker, pkgNames map[*packageContext]string) {
|
|
|
|
for v := range liveGlobals.variables {
|
|
|
|
v.memoizeFullName(pkgNames)
|
|
|
|
}
|
|
|
|
for r := range liveGlobals.rules {
|
|
|
|
r.memoizeFullName(pkgNames)
|
|
|
|
}
|
|
|
|
for p := range liveGlobals.pools {
|
|
|
|
p.memoizeFullName(pkgNames)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) checkForVariableReferenceCycles(
|
2020-01-29 21:58:03 +01:00
|
|
|
variables map[Variable]ninjaString, pkgNames map[*packageContext]string) {
|
2014-05-28 01:34:41 +02:00
|
|
|
|
|
|
|
visited := make(map[Variable]bool) // variables that were already checked
|
|
|
|
checking := make(map[Variable]bool) // variables actively being checked
|
|
|
|
|
|
|
|
var check func(v Variable) []Variable
|
|
|
|
|
|
|
|
check = func(v Variable) []Variable {
|
|
|
|
visited[v] = true
|
|
|
|
checking[v] = true
|
|
|
|
defer delete(checking, v)
|
|
|
|
|
|
|
|
value := variables[v]
|
2020-01-29 21:58:03 +01:00
|
|
|
for _, dep := range value.Variables() {
|
2014-05-28 01:34:41 +02:00
|
|
|
if checking[dep] {
|
|
|
|
// This is a cycle.
|
|
|
|
return []Variable{dep, v}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !visited[dep] {
|
|
|
|
cycle := check(dep)
|
|
|
|
if cycle != nil {
|
|
|
|
if cycle[0] == v {
|
|
|
|
// We are the "start" of the cycle, so we're responsible
|
|
|
|
// for generating the errors. The cycle list is in
|
|
|
|
// reverse order because all the 'check' calls append
|
|
|
|
// their own module to the list.
|
|
|
|
msgs := []string{"detected variable reference cycle:"}
|
|
|
|
|
|
|
|
// Iterate backwards through the cycle list.
|
|
|
|
curName := v.fullName(pkgNames)
|
|
|
|
curValue := value.Value(pkgNames)
|
|
|
|
for i := len(cycle) - 1; i >= 0; i-- {
|
|
|
|
next := cycle[i]
|
|
|
|
nextName := next.fullName(pkgNames)
|
|
|
|
nextValue := variables[next].Value(pkgNames)
|
|
|
|
|
|
|
|
msgs = append(msgs, fmt.Sprintf(
|
|
|
|
" %q depends on %q", curName, nextName))
|
|
|
|
msgs = append(msgs, fmt.Sprintf(
|
|
|
|
" [%s = %s]", curName, curValue))
|
|
|
|
|
|
|
|
curName = nextName
|
|
|
|
curValue = nextValue
|
|
|
|
}
|
|
|
|
|
|
|
|
// Variable reference cycles are a programming error,
|
|
|
|
// not the fault of the Blueprint file authors.
|
|
|
|
panic(strings.Join(msgs, "\n"))
|
|
|
|
} else {
|
|
|
|
// We're not the "start" of the cycle, so we just append
|
|
|
|
// our module to the list and return it.
|
|
|
|
return append(cycle, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
for v := range variables {
|
|
|
|
if !visited[v] {
|
|
|
|
cycle := check(v)
|
|
|
|
if cycle != nil {
|
|
|
|
panic("inconceivable!")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-28 06:34:56 +01:00
|
|
|
// AllTargets returns a map all the build target names to the rule used to build
|
|
|
|
// them. This is the same information that is output by running 'ninja -t
|
|
|
|
// targets all'. If this is called before PrepareBuildActions successfully
|
|
|
|
// completes then ErrbuildActionsNotReady is returned.
|
|
|
|
func (c *Context) AllTargets() (map[string]string, error) {
|
|
|
|
if !c.buildActionsReady {
|
|
|
|
return nil, ErrBuildActionsNotReady
|
|
|
|
}
|
|
|
|
|
|
|
|
targets := map[string]string{}
|
2023-02-22 05:06:40 +01:00
|
|
|
var collectTargets = func(actionDefs localBuildActions) error {
|
|
|
|
for _, buildDef := range actionDefs.buildDefs {
|
2014-10-28 06:34:56 +01:00
|
|
|
ruleName := buildDef.Rule.fullName(c.pkgNames)
|
2016-10-26 06:26:12 +02:00
|
|
|
for _, output := range append(buildDef.Outputs, buildDef.ImplicitOutputs...) {
|
2014-11-22 00:12:08 +01:00
|
|
|
outputValue, err := output.Eval(c.globalVariables)
|
|
|
|
if err != nil {
|
2023-02-22 05:06:40 +01:00
|
|
|
return err
|
2014-11-22 00:12:08 +01:00
|
|
|
}
|
2014-10-28 06:34:56 +01:00
|
|
|
targets[outputValue] = ruleName
|
|
|
|
}
|
|
|
|
}
|
2023-02-22 05:06:40 +01:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
// Collect all the module build targets.
|
|
|
|
for _, module := range c.moduleInfo {
|
|
|
|
if err := collectTargets(module.actionDefs); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2014-10-28 06:34:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Collect all the singleton build targets.
|
|
|
|
for _, info := range c.singletonInfo {
|
2023-02-22 05:06:40 +01:00
|
|
|
if err := collectTargets(info.actionDefs); err != nil {
|
|
|
|
return nil, err
|
2014-10-28 06:34:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return targets, nil
|
|
|
|
}
|
|
|
|
|
2021-08-26 15:08:09 +02:00
|
|
|
func (c *Context) OutDir() (string, error) {
|
|
|
|
if c.outDir != nil {
|
|
|
|
return c.outDir.Eval(c.globalVariables)
|
2015-11-19 01:01:01 +01:00
|
|
|
} else {
|
|
|
|
return "", nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-13 23:36:24 +02:00
|
|
|
// ModuleTypePropertyStructs returns a mapping from module type name to a list of pointers to
|
|
|
|
// property structs returned by the factory for that module type.
|
|
|
|
func (c *Context) ModuleTypePropertyStructs() map[string][]interface{} {
|
|
|
|
ret := make(map[string][]interface{})
|
|
|
|
for moduleType, factory := range c.moduleFactories {
|
|
|
|
_, ret[moduleType] = factory()
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2019-02-07 01:20:17 +01:00
|
|
|
func (c *Context) ModuleTypeFactories() map[string]ModuleFactory {
|
|
|
|
ret := make(map[string]ModuleFactory)
|
|
|
|
for k, v := range c.moduleFactories {
|
|
|
|
ret[k] = v
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2015-05-13 23:36:24 +02:00
|
|
|
func (c *Context) ModuleName(logicModule Module) string {
|
|
|
|
module := c.moduleInfo[logicModule]
|
2016-05-17 23:58:05 +02:00
|
|
|
return module.Name()
|
2015-05-13 23:36:24 +02:00
|
|
|
}
|
|
|
|
|
2017-12-01 02:30:42 +01:00
|
|
|
func (c *Context) ModuleDir(logicModule Module) string {
|
2020-07-06 21:18:59 +02:00
|
|
|
return filepath.Dir(c.BlueprintFile(logicModule))
|
2015-05-13 23:36:24 +02:00
|
|
|
}
|
|
|
|
|
2015-12-18 03:02:11 +01:00
|
|
|
func (c *Context) ModuleSubDir(logicModule Module) string {
|
|
|
|
module := c.moduleInfo[logicModule]
|
2020-08-13 21:07:30 +02:00
|
|
|
return module.variant.name
|
2015-12-18 03:02:11 +01:00
|
|
|
}
|
|
|
|
|
2016-07-26 00:51:50 +02:00
|
|
|
func (c *Context) ModuleType(logicModule Module) string {
|
|
|
|
module := c.moduleInfo[logicModule]
|
|
|
|
return module.typeName
|
|
|
|
}
|
|
|
|
|
2020-07-02 19:08:12 +02:00
|
|
|
// ModuleProvider returns the value, if any, for the provider for a module. If the value for the
|
|
|
|
// provider was not set it returns the zero value of the type of the provider, which means the
|
|
|
|
// return value can always be type-asserted to the type of the provider. The return value should
|
|
|
|
// always be considered read-only. It panics if called before the appropriate mutator or
|
|
|
|
// GenerateBuildActions pass for the provider on the module. The value returned may be a deep
|
|
|
|
// copy of the value originally passed to SetProvider.
|
|
|
|
func (c *Context) ModuleProvider(logicModule Module, provider ProviderKey) interface{} {
|
|
|
|
module := c.moduleInfo[logicModule]
|
|
|
|
value, _ := c.provider(module, provider)
|
|
|
|
return value
|
|
|
|
}
|
|
|
|
|
|
|
|
// ModuleHasProvider returns true if the provider for the given module has been set.
|
|
|
|
func (c *Context) ModuleHasProvider(logicModule Module, provider ProviderKey) bool {
|
|
|
|
module := c.moduleInfo[logicModule]
|
|
|
|
_, ok := c.provider(module, provider)
|
|
|
|
return ok
|
|
|
|
}
|
|
|
|
|
2015-05-13 23:36:24 +02:00
|
|
|
func (c *Context) BlueprintFile(logicModule Module) string {
|
|
|
|
module := c.moduleInfo[logicModule]
|
|
|
|
return module.relBlueprintsFile
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) ModuleErrorf(logicModule Module, format string,
|
|
|
|
args ...interface{}) error {
|
|
|
|
|
|
|
|
module := c.moduleInfo[logicModule]
|
2016-10-08 02:13:10 +02:00
|
|
|
return &BlueprintError{
|
2015-05-13 23:36:24 +02:00
|
|
|
Err: fmt.Errorf(format, args...),
|
|
|
|
Pos: module.pos,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) VisitAllModules(visit func(Module)) {
|
|
|
|
c.visitAllModules(visit)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) VisitAllModulesIf(pred func(Module) bool,
|
|
|
|
visit func(Module)) {
|
|
|
|
|
|
|
|
c.visitAllModulesIf(pred, visit)
|
|
|
|
}
|
|
|
|
|
2017-03-17 21:09:05 +01:00
|
|
|
func (c *Context) VisitDirectDeps(module Module, visit func(Module)) {
|
|
|
|
topModule := c.moduleInfo[module]
|
|
|
|
|
|
|
|
var visiting *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitDirectDeps(%s, %s) for dependency %s",
|
|
|
|
topModule, funcName(visit), visiting))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
for _, dep := range topModule.directDeps {
|
|
|
|
visiting = dep.module
|
|
|
|
visit(dep.module.logicModule)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) VisitDirectDepsIf(module Module, pred func(Module) bool, visit func(Module)) {
|
|
|
|
topModule := c.moduleInfo[module]
|
|
|
|
|
|
|
|
var visiting *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitDirectDepsIf(%s, %s, %s) for dependency %s",
|
|
|
|
topModule, funcName(pred), funcName(visit), visiting))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
for _, dep := range topModule.directDeps {
|
|
|
|
visiting = dep.module
|
|
|
|
if pred(dep.module.logicModule) {
|
|
|
|
visit(dep.module.logicModule)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-05-13 23:36:24 +02:00
|
|
|
|
2017-03-17 21:09:05 +01:00
|
|
|
func (c *Context) VisitDepsDepthFirst(module Module, visit func(Module)) {
|
2016-08-07 07:52:01 +02:00
|
|
|
topModule := c.moduleInfo[module]
|
|
|
|
|
|
|
|
var visiting *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitDepsDepthFirst(%s, %s) for dependency %s",
|
|
|
|
topModule, funcName(visit), visiting))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2018-06-20 20:16:37 +02:00
|
|
|
c.walkDeps(topModule, false, nil, func(dep depInfo, parent *moduleInfo) {
|
2016-08-07 07:52:01 +02:00
|
|
|
visiting = dep.module
|
|
|
|
visit(dep.module.logicModule)
|
|
|
|
})
|
2015-05-13 23:36:24 +02:00
|
|
|
}
|
|
|
|
|
2017-03-17 21:09:05 +01:00
|
|
|
func (c *Context) VisitDepsDepthFirstIf(module Module, pred func(Module) bool, visit func(Module)) {
|
2016-08-07 07:52:01 +02:00
|
|
|
topModule := c.moduleInfo[module]
|
|
|
|
|
|
|
|
var visiting *moduleInfo
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
panic(newPanicErrorf(r, "VisitDepsDepthFirstIf(%s, %s, %s) for dependency %s",
|
|
|
|
topModule, funcName(pred), funcName(visit), visiting))
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2018-06-20 20:16:37 +02:00
|
|
|
c.walkDeps(topModule, false, nil, func(dep depInfo, parent *moduleInfo) {
|
2016-08-07 07:52:01 +02:00
|
|
|
if pred(dep.module.logicModule) {
|
|
|
|
visiting = dep.module
|
|
|
|
visit(dep.module.logicModule)
|
|
|
|
}
|
|
|
|
})
|
2015-05-13 23:36:24 +02:00
|
|
|
}
|
|
|
|
|
2015-11-18 01:22:29 +01:00
|
|
|
func (c *Context) PrimaryModule(module Module) Module {
|
2020-08-25 01:18:21 +02:00
|
|
|
return c.moduleInfo[module].group.modules.firstModule().logicModule
|
2015-11-18 01:22:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) FinalModule(module Module) Module {
|
2020-08-25 01:18:21 +02:00
|
|
|
return c.moduleInfo[module].group.modules.lastModule().logicModule
|
2015-11-18 01:22:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) VisitAllModuleVariants(module Module,
|
|
|
|
visit func(Module)) {
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
c.visitAllModuleVariants(c.moduleInfo[module], visit)
|
2015-11-18 01:22:29 +01:00
|
|
|
}
|
|
|
|
|
2019-02-26 03:07:44 +01:00
|
|
|
// Singletons returns a list of all registered Singletons.
|
|
|
|
func (c *Context) Singletons() []Singleton {
|
|
|
|
var ret []Singleton
|
|
|
|
for _, s := range c.singletonInfo {
|
|
|
|
ret = append(ret, s.singleton)
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
// SingletonName returns the name that the given singleton was registered with.
|
|
|
|
func (c *Context) SingletonName(singleton Singleton) string {
|
|
|
|
for _, s := range c.singletonInfo {
|
|
|
|
if s.singleton == singleton {
|
|
|
|
return s.name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
2022-05-04 06:45:37 +02:00
|
|
|
// WriteBuildFile writes the Ninja manifest text for the generated build
|
2014-06-13 05:06:50 +02:00
|
|
|
// actions to w. If this is called before PrepareBuildActions successfully
|
|
|
|
// completes then ErrBuildActionsNotReady is returned.
|
2021-01-22 00:26:21 +01:00
|
|
|
func (c *Context) WriteBuildFile(w io.StringWriter) error {
|
2019-01-23 22:21:48 +01:00
|
|
|
var err error
|
|
|
|
pprof.Do(c.Context, pprof.Labels("blueprint", "WriteBuildFile"), func(ctx context.Context) {
|
|
|
|
if !c.buildActionsReady {
|
|
|
|
err = ErrBuildActionsNotReady
|
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
nw := newNinjaWriter(w)
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeBuildFileHeader(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeNinjaRequiredVersion(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeSubninjas(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
Run globs during earlier bootstrap phases
Instead of sometimes re-running minibp/the primary builder during the
next phase, run bpglob earlier to check dependencies.
We've run into issues where the environment is slightly different
between bootstrapping phase and the main build phase. It's also a
problem because our primary builder (Soong) exports information used by
another tool (Kati) that runs in between the bootstrapping phases and
the main phase. When Soong would run in the main phase, it could get out
of sync, and would require the build to be run again.
To do this, add a "subninja" include a build-globs.ninja file to each
build.ninja file. The first time, this will be an empty file, but we'll
always run minibp / the primary builder anyway. When the builder runs,
in addition to writing a dependency file, write out the
build-globs.ninja file with the rules to run bpglob.
Since bpglob may need to be run very early, before it would normally be
built, build it with microfactory.
Change-Id: I89fcd849a8729e892f163d40060ab90b5d4dfa5d
2018-07-06 06:56:59 +02:00
|
|
|
|
2019-01-23 22:21:48 +01:00
|
|
|
// TODO: Group the globals by package.
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeGlobalVariables(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeGlobalPools(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeBuildDir(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeGlobalRules(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeAllModuleActions(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err = c.writeAllSingletonActions(nw); err != nil {
|
2019-01-23 22:21:48 +01:00
|
|
|
return
|
|
|
|
}
|
|
|
|
})
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
return err
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
type pkgAssociation struct {
|
|
|
|
PkgName string
|
|
|
|
PkgPath string
|
|
|
|
}
|
|
|
|
|
|
|
|
type pkgAssociationSorter struct {
|
|
|
|
pkgs []pkgAssociation
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *pkgAssociationSorter) Len() int {
|
|
|
|
return len(s.pkgs)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *pkgAssociationSorter) Less(i, j int) bool {
|
|
|
|
iName := s.pkgs[i].PkgName
|
|
|
|
jName := s.pkgs[j].PkgName
|
|
|
|
return iName < jName
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *pkgAssociationSorter) Swap(i, j int) {
|
|
|
|
s.pkgs[i], s.pkgs[j] = s.pkgs[j], s.pkgs[i]
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) writeBuildFileHeader(nw *ninjaWriter) error {
|
|
|
|
headerTemplate := template.New("fileHeader")
|
|
|
|
_, err := headerTemplate.Parse(fileHeaderTemplate)
|
|
|
|
if err != nil {
|
|
|
|
// This is a programming error.
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var pkgs []pkgAssociation
|
|
|
|
maxNameLen := 0
|
|
|
|
for pkg, name := range c.pkgNames {
|
|
|
|
pkgs = append(pkgs, pkgAssociation{
|
|
|
|
PkgName: name,
|
|
|
|
PkgPath: pkg.pkgPath,
|
|
|
|
})
|
|
|
|
if len(name) > maxNameLen {
|
|
|
|
maxNameLen = len(name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := range pkgs {
|
|
|
|
pkgs[i].PkgName += strings.Repeat(" ", maxNameLen-len(pkgs[i].PkgName))
|
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
sort.Sort(&pkgAssociationSorter{pkgs})
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
params := map[string]interface{}{
|
|
|
|
"Pkgs": pkgs,
|
|
|
|
}
|
|
|
|
|
|
|
|
buf := bytes.NewBuffer(nil)
|
|
|
|
err = headerTemplate.Execute(buf, params)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nw.Comment(buf.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) writeNinjaRequiredVersion(nw *ninjaWriter) error {
|
|
|
|
value := fmt.Sprintf("%d.%d.%d", c.requiredNinjaMajor, c.requiredNinjaMinor,
|
|
|
|
c.requiredNinjaMicro)
|
|
|
|
|
|
|
|
err := nw.Assign("ninja_required_version", value)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nw.BlankLine()
|
|
|
|
}
|
|
|
|
|
Run globs during earlier bootstrap phases
Instead of sometimes re-running minibp/the primary builder during the
next phase, run bpglob earlier to check dependencies.
We've run into issues where the environment is slightly different
between bootstrapping phase and the main build phase. It's also a
problem because our primary builder (Soong) exports information used by
another tool (Kati) that runs in between the bootstrapping phases and
the main phase. When Soong would run in the main phase, it could get out
of sync, and would require the build to be run again.
To do this, add a "subninja" include a build-globs.ninja file to each
build.ninja file. The first time, this will be an empty file, but we'll
always run minibp / the primary builder anyway. When the builder runs,
in addition to writing a dependency file, write out the
build-globs.ninja file with the rules to run bpglob.
Since bpglob may need to be run very early, before it would normally be
built, build it with microfactory.
Change-Id: I89fcd849a8729e892f163d40060ab90b5d4dfa5d
2018-07-06 06:56:59 +02:00
|
|
|
func (c *Context) writeSubninjas(nw *ninjaWriter) error {
|
|
|
|
for _, subninja := range c.subninjas {
|
2019-01-23 22:23:00 +01:00
|
|
|
err := nw.Subninja(subninja)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
Run globs during earlier bootstrap phases
Instead of sometimes re-running minibp/the primary builder during the
next phase, run bpglob earlier to check dependencies.
We've run into issues where the environment is slightly different
between bootstrapping phase and the main build phase. It's also a
problem because our primary builder (Soong) exports information used by
another tool (Kati) that runs in between the bootstrapping phases and
the main phase. When Soong would run in the main phase, it could get out
of sync, and would require the build to be run again.
To do this, add a "subninja" include a build-globs.ninja file to each
build.ninja file. The first time, this will be an empty file, but we'll
always run minibp / the primary builder anyway. When the builder runs,
in addition to writing a dependency file, write out the
build-globs.ninja file with the rules to run bpglob.
Since bpglob may need to be run very early, before it would normally be
built, build it with microfactory.
Change-Id: I89fcd849a8729e892f163d40060ab90b5d4dfa5d
2018-07-06 06:56:59 +02:00
|
|
|
}
|
|
|
|
return nw.BlankLine()
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) writeBuildDir(nw *ninjaWriter) error {
|
2021-08-26 15:08:09 +02:00
|
|
|
if c.outDir != nil {
|
|
|
|
err := nw.Assign("builddir", c.outDir.Value(c.pkgNames))
|
2014-05-28 01:34:41 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
type globalEntity interface {
|
2015-11-26 00:29:32 +01:00
|
|
|
fullName(pkgNames map[*packageContext]string) string
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
type globalEntitySorter struct {
|
2015-11-26 00:29:32 +01:00
|
|
|
pkgNames map[*packageContext]string
|
2014-09-25 05:26:52 +02:00
|
|
|
entities []globalEntity
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
func (s *globalEntitySorter) Len() int {
|
|
|
|
return len(s.entities)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *globalEntitySorter) Less(i, j int) bool {
|
|
|
|
iName := s.entities[i].fullName(s.pkgNames)
|
|
|
|
jName := s.entities[j].fullName(s.pkgNames)
|
2014-05-28 01:34:41 +02:00
|
|
|
return iName < jName
|
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
func (s *globalEntitySorter) Swap(i, j int) {
|
|
|
|
s.entities[i], s.entities[j] = s.entities[j], s.entities[i]
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) writeGlobalVariables(nw *ninjaWriter) error {
|
|
|
|
visited := make(map[Variable]bool)
|
|
|
|
|
|
|
|
var walk func(v Variable) error
|
|
|
|
walk = func(v Variable) error {
|
|
|
|
visited[v] = true
|
|
|
|
|
|
|
|
// First visit variables on which this variable depends.
|
|
|
|
value := c.globalVariables[v]
|
2020-01-29 21:58:03 +01:00
|
|
|
for _, dep := range value.Variables() {
|
2014-05-28 01:34:41 +02:00
|
|
|
if !visited[dep] {
|
|
|
|
err := walk(dep)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
err := nw.Assign(v.fullName(c.pkgNames), value.Value(c.pkgNames))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
globalVariables := make([]globalEntity, 0, len(c.globalVariables))
|
|
|
|
for variable := range c.globalVariables {
|
|
|
|
globalVariables = append(globalVariables, variable)
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
sort.Sort(&globalEntitySorter{c.pkgNames, globalVariables})
|
2014-05-28 01:34:41 +02:00
|
|
|
|
2014-09-25 05:26:52 +02:00
|
|
|
for _, entity := range globalVariables {
|
|
|
|
v := entity.(Variable)
|
2014-05-28 01:34:41 +02:00
|
|
|
if !visited[v] {
|
|
|
|
err := walk(v)
|
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) writeGlobalPools(nw *ninjaWriter) error {
|
2014-09-25 05:26:52 +02:00
|
|
|
globalPools := make([]globalEntity, 0, len(c.globalPools))
|
|
|
|
for pool := range c.globalPools {
|
|
|
|
globalPools = append(globalPools, pool)
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Sort(&globalEntitySorter{c.pkgNames, globalPools})
|
|
|
|
|
|
|
|
for _, entity := range globalPools {
|
|
|
|
pool := entity.(Pool)
|
2014-05-28 01:34:41 +02:00
|
|
|
name := pool.fullName(c.pkgNames)
|
2014-09-25 05:26:52 +02:00
|
|
|
def := c.globalPools[pool]
|
2014-05-28 01:34:41 +02:00
|
|
|
err := def.WriteTo(nw, name)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) writeGlobalRules(nw *ninjaWriter) error {
|
2014-09-25 05:26:52 +02:00
|
|
|
globalRules := make([]globalEntity, 0, len(c.globalRules))
|
|
|
|
for rule := range c.globalRules {
|
|
|
|
globalRules = append(globalRules, rule)
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Sort(&globalEntitySorter{c.pkgNames, globalRules})
|
|
|
|
|
|
|
|
for _, entity := range globalRules {
|
|
|
|
rule := entity.(Rule)
|
2014-05-28 01:34:41 +02:00
|
|
|
name := rule.fullName(c.pkgNames)
|
2014-09-25 05:26:52 +02:00
|
|
|
def := c.globalRules[rule]
|
2014-05-28 01:34:41 +02:00
|
|
|
err := def.WriteTo(nw, name, c.pkgNames)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-04-12 00:47:28 +02:00
|
|
|
type depSorter []depInfo
|
|
|
|
|
|
|
|
func (s depSorter) Len() int {
|
|
|
|
return len(s)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s depSorter) Less(i, j int) bool {
|
2016-05-17 23:58:05 +02:00
|
|
|
iName := s[i].module.Name()
|
|
|
|
jName := s[j].module.Name()
|
2016-04-12 00:47:28 +02:00
|
|
|
if iName == jName {
|
2020-08-13 21:07:30 +02:00
|
|
|
iName = s[i].module.variant.name
|
|
|
|
jName = s[j].module.variant.name
|
2016-04-12 00:47:28 +02:00
|
|
|
}
|
|
|
|
return iName < jName
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s depSorter) Swap(i, j int) {
|
|
|
|
s[i], s[j] = s[j], s[i]
|
|
|
|
}
|
|
|
|
|
2017-12-02 02:10:52 +01:00
|
|
|
type moduleSorter struct {
|
|
|
|
modules []*moduleInfo
|
|
|
|
nameInterface NameInterface
|
|
|
|
}
|
2014-06-12 01:27:16 +02:00
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
func (s moduleSorter) Len() int {
|
2017-12-02 02:10:52 +01:00
|
|
|
return len(s.modules)
|
2014-06-12 01:27:16 +02:00
|
|
|
}
|
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
func (s moduleSorter) Less(i, j int) bool {
|
2017-12-02 02:10:52 +01:00
|
|
|
iMod := s.modules[i]
|
|
|
|
jMod := s.modules[j]
|
|
|
|
iName := s.nameInterface.UniqueName(newNamespaceContext(iMod), iMod.group.name)
|
|
|
|
jName := s.nameInterface.UniqueName(newNamespaceContext(jMod), jMod.group.name)
|
|
|
|
if iName == jName {
|
2020-08-13 21:11:52 +02:00
|
|
|
iVariantName := s.modules[i].variant.name
|
|
|
|
jVariantName := s.modules[j].variant.name
|
|
|
|
if iVariantName == jVariantName {
|
|
|
|
panic(fmt.Sprintf("duplicate module name: %s %s: %#v and %#v\n",
|
|
|
|
iName, iVariantName, iMod.variant.variations, jMod.variant.variations))
|
|
|
|
} else {
|
|
|
|
return iVariantName < jVariantName
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return iName < jName
|
2015-03-12 00:17:52 +01:00
|
|
|
}
|
2014-06-12 01:27:16 +02:00
|
|
|
}
|
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
func (s moduleSorter) Swap(i, j int) {
|
2017-12-02 02:10:52 +01:00
|
|
|
s.modules[i], s.modules[j] = s.modules[j], s.modules[i]
|
2014-06-12 01:27:16 +02:00
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) writeAllModuleActions(nw *ninjaWriter) error {
|
2023-02-22 05:08:34 +01:00
|
|
|
c.BeginEvent("modules")
|
|
|
|
defer c.EndEvent("modules")
|
2014-05-28 01:34:41 +02:00
|
|
|
headerTemplate := template.New("moduleHeader")
|
2023-02-22 05:08:34 +01:00
|
|
|
if _, err := headerTemplate.Parse(moduleHeaderTemplate); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
// This is a programming error.
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
modules := make([]*moduleInfo, 0, len(c.moduleInfo))
|
|
|
|
for _, module := range c.moduleInfo {
|
|
|
|
modules = append(modules, module)
|
2014-06-12 01:27:16 +02:00
|
|
|
}
|
2017-12-02 02:10:52 +01:00
|
|
|
sort.Sort(moduleSorter{modules, c.nameInterface})
|
2014-06-12 01:27:16 +02:00
|
|
|
|
2023-03-15 23:49:17 +01:00
|
|
|
phonys := c.deduplicateOrderOnlyDeps(modules)
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := c.writeLocalBuildActions(nw, phonys); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
buf := bytes.NewBuffer(nil)
|
|
|
|
|
2015-03-12 00:17:52 +01:00
|
|
|
for _, module := range modules {
|
2015-07-21 00:55:37 +02:00
|
|
|
if len(module.actionDefs.variables)+len(module.actionDefs.rules)+len(module.actionDefs.buildDefs) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
buf.Reset()
|
2014-06-05 00:33:08 +02:00
|
|
|
|
|
|
|
// In order to make the bootstrap build manifest independent of the
|
|
|
|
// build dir we need to output the Blueprints file locations in the
|
|
|
|
// comments as paths relative to the source directory.
|
2015-03-12 00:17:52 +01:00
|
|
|
relPos := module.pos
|
|
|
|
relPos.Filename = module.relBlueprintsFile
|
2014-06-05 00:33:08 +02:00
|
|
|
|
2014-09-25 02:51:52 +02:00
|
|
|
// Get the name and location of the factory function for the module.
|
2017-07-28 23:32:36 +02:00
|
|
|
factoryFunc := runtime.FuncForPC(reflect.ValueOf(module.factory).Pointer())
|
2014-09-25 02:51:52 +02:00
|
|
|
factoryName := factoryFunc.Name()
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
infoMap := map[string]interface{}{
|
2016-05-17 23:58:05 +02:00
|
|
|
"name": module.Name(),
|
|
|
|
"typeName": module.typeName,
|
|
|
|
"goFactory": factoryName,
|
|
|
|
"pos": relPos,
|
2020-08-13 21:07:30 +02:00
|
|
|
"variant": module.variant.name,
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := headerTemplate.Execute(buf, infoMap); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := nw.Comment(buf.String()); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := nw.BlankLine(); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := c.writeLocalBuildActions(nw, &module.actionDefs); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
if err := nw.BlankLine(); err != nil {
|
2014-05-28 01:34:41 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) writeAllSingletonActions(nw *ninjaWriter) error {
|
2023-02-22 05:08:34 +01:00
|
|
|
c.BeginEvent("singletons")
|
|
|
|
defer c.EndEvent("singletons")
|
2014-05-28 01:34:41 +02:00
|
|
|
headerTemplate := template.New("singletonHeader")
|
|
|
|
_, err := headerTemplate.Parse(singletonHeaderTemplate)
|
|
|
|
if err != nil {
|
|
|
|
// This is a programming error.
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
buf := bytes.NewBuffer(nil)
|
|
|
|
|
2015-08-26 02:58:17 +02:00
|
|
|
for _, info := range c.singletonInfo {
|
2015-07-21 00:55:37 +02:00
|
|
|
if len(info.actionDefs.variables)+len(info.actionDefs.rules)+len(info.actionDefs.buildDefs) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-09-25 02:51:52 +02:00
|
|
|
// Get the name of the factory function for the module.
|
|
|
|
factory := info.factory
|
|
|
|
factoryFunc := runtime.FuncForPC(reflect.ValueOf(factory).Pointer())
|
|
|
|
factoryName := factoryFunc.Name()
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
buf.Reset()
|
|
|
|
infoMap := map[string]interface{}{
|
2015-08-26 02:58:17 +02:00
|
|
|
"name": info.name,
|
2014-09-25 02:51:52 +02:00
|
|
|
"goFactory": factoryName,
|
2014-05-28 01:34:41 +02:00
|
|
|
}
|
|
|
|
err = headerTemplate.Execute(buf, infoMap)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.Comment(buf.String())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = c.writeLocalBuildActions(nw, &info.actionDefs)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-02-13 23:55:50 +01:00
|
|
|
func (c *Context) GetEventHandler() *metrics.EventHandler {
|
|
|
|
return c.EventHandler
|
|
|
|
}
|
|
|
|
|
2022-03-25 05:56:02 +01:00
|
|
|
func (c *Context) BeginEvent(name string) {
|
|
|
|
c.EventHandler.Begin(name)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *Context) EndEvent(name string) {
|
|
|
|
c.EventHandler.End(name)
|
|
|
|
}
|
|
|
|
|
2022-05-10 19:46:40 +02:00
|
|
|
func (c *Context) SetBeforePrepareBuildActionsHook(hookFn func() error) {
|
|
|
|
c.BeforePrepareBuildActionsHook = hookFn
|
|
|
|
}
|
|
|
|
|
2023-02-22 05:08:34 +01:00
|
|
|
// phonyCandidate represents the state of a set of deps that decides its eligibility
|
|
|
|
// to be extracted as a phony output
|
|
|
|
type phonyCandidate struct {
|
2023-03-15 23:49:17 +01:00
|
|
|
sync.Once
|
|
|
|
phony *buildDef // the phony buildDef that wraps the set
|
|
|
|
first *buildDef // the first buildDef that uses this set
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// keyForPhonyCandidate gives a unique identifier for a set of deps.
|
|
|
|
// If any of the deps use a variable, we return an empty string to signal
|
|
|
|
// that this set of deps is ineligible for extraction.
|
|
|
|
func keyForPhonyCandidate(deps []ninjaString) string {
|
2023-03-15 23:49:17 +01:00
|
|
|
hasher := sha256.New()
|
|
|
|
for _, d := range deps {
|
2023-02-22 05:08:34 +01:00
|
|
|
if len(d.Variables()) != 0 {
|
|
|
|
return ""
|
|
|
|
}
|
2023-03-15 23:49:17 +01:00
|
|
|
io.WriteString(hasher, d.Value(nil))
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
2023-03-15 23:49:17 +01:00
|
|
|
return base64.RawURLEncoding.EncodeToString(hasher.Sum(nil))
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// scanBuildDef is called for every known buildDef `b` that has a non-empty `b.OrderOnly`.
|
|
|
|
// If `b.OrderOnly` is not present in `candidates`, it gets stored.
|
|
|
|
// But if `b.OrderOnly` already exists in `candidates`, then `b.OrderOnly`
|
|
|
|
// (and phonyCandidate#first.OrderOnly) will be replaced with phonyCandidate#phony.Outputs
|
|
|
|
func scanBuildDef(wg *sync.WaitGroup, candidates *sync.Map, phonyCount *atomic.Uint32, b *buildDef) {
|
|
|
|
defer wg.Done()
|
|
|
|
key := keyForPhonyCandidate(b.OrderOnly)
|
|
|
|
if key == "" {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if v, loaded := candidates.LoadOrStore(key, &phonyCandidate{
|
2023-03-15 23:49:17 +01:00
|
|
|
first: b,
|
2023-02-22 05:08:34 +01:00
|
|
|
}); loaded {
|
|
|
|
m := v.(*phonyCandidate)
|
2023-03-15 23:49:17 +01:00
|
|
|
m.Do(func() {
|
|
|
|
// this is the second occurrence and hence it makes sense to
|
|
|
|
// extract it as a phony output
|
|
|
|
phonyCount.Add(1)
|
|
|
|
m.phony = &buildDef{
|
|
|
|
Rule: Phony,
|
|
|
|
Outputs: []ninjaString{simpleNinjaString("dedup-" + key)},
|
|
|
|
Inputs: m.first.OrderOnly, //we could also use b.OrderOnly
|
|
|
|
Optional: true,
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
2023-03-15 23:49:17 +01:00
|
|
|
// the previously recorded build-def, which first had these deps as its
|
|
|
|
// order-only deps, should now use this phony output instead
|
|
|
|
m.first.OrderOnly = m.phony.Outputs
|
|
|
|
m.first = nil
|
|
|
|
})
|
|
|
|
b.OrderOnly = m.phony.Outputs
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-15 23:49:17 +01:00
|
|
|
// deduplicateOrderOnlyDeps searches for common sets of order-only dependencies across all
|
2023-02-22 05:08:34 +01:00
|
|
|
// buildDef instances in the provided moduleInfo instances. Each such
|
|
|
|
// common set forms a new buildDef representing a phony output that then becomes
|
|
|
|
// the sole order-only dependency of those buildDef instances
|
2023-03-15 23:49:17 +01:00
|
|
|
func (c *Context) deduplicateOrderOnlyDeps(infos []*moduleInfo) *localBuildActions {
|
|
|
|
c.BeginEvent("deduplicate_order_only_deps")
|
|
|
|
defer c.EndEvent("deduplicate_order_only_deps")
|
2023-02-22 05:08:34 +01:00
|
|
|
|
|
|
|
candidates := sync.Map{} //used as map[key]*candidate
|
|
|
|
phonyCount := atomic.Uint32{}
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
for _, info := range infos {
|
|
|
|
for _, b := range info.actionDefs.buildDefs {
|
|
|
|
if len(b.OrderOnly) > 0 {
|
|
|
|
wg.Add(1)
|
|
|
|
go scanBuildDef(&wg, &candidates, &phonyCount, b)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
2023-03-15 23:49:17 +01:00
|
|
|
// now collect all created phonys to return
|
|
|
|
phonys := make([]*buildDef, 0, phonyCount.Load())
|
2023-02-22 05:08:34 +01:00
|
|
|
candidates.Range(func(_ any, v any) bool {
|
|
|
|
candidate := v.(*phonyCandidate)
|
2023-03-15 23:49:17 +01:00
|
|
|
if candidate.phony != nil {
|
|
|
|
phonys = append(phonys, candidate.phony)
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
|
|
|
return true
|
|
|
|
})
|
|
|
|
|
2023-03-15 23:49:17 +01:00
|
|
|
c.EventHandler.Do("sort_phony_builddefs", func() {
|
|
|
|
// sorting for determinism, the phony output names are stable
|
2023-02-22 05:08:34 +01:00
|
|
|
sort.Slice(phonys, func(i int, j int) bool {
|
2023-03-15 23:49:17 +01:00
|
|
|
return phonys[i].Outputs[0].Value(nil) < phonys[j].Outputs[0].Value(nil)
|
2023-02-22 05:08:34 +01:00
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2023-03-15 23:49:17 +01:00
|
|
|
return &localBuildActions{buildDefs: phonys}
|
2023-02-22 05:08:34 +01:00
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
func (c *Context) writeLocalBuildActions(nw *ninjaWriter,
|
|
|
|
defs *localBuildActions) error {
|
|
|
|
|
|
|
|
// Write the local variable assignments.
|
|
|
|
for _, v := range defs.variables {
|
|
|
|
// A localVariable doesn't need the package names or config to
|
|
|
|
// determine its name or value.
|
|
|
|
name := v.fullName(nil)
|
2022-11-04 04:19:48 +01:00
|
|
|
value, err := v.value(nil, nil)
|
2014-05-28 01:34:41 +02:00
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
err = nw.Assign(name, value.Value(c.pkgNames))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(defs.variables) > 0 {
|
|
|
|
err := nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Write the local rules.
|
|
|
|
for _, r := range defs.rules {
|
|
|
|
// A localRule doesn't need the package names or config to determine
|
|
|
|
// its name or definition.
|
|
|
|
name := r.fullName(nil)
|
|
|
|
def, err := r.def(nil)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
err = def.WriteTo(nw, name, c.pkgNames)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Write the build definitions.
|
|
|
|
for _, buildDef := range defs.buildDefs {
|
|
|
|
err := buildDef.WriteTo(nw, c.pkgNames)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(buildDef.Args) > 0 {
|
|
|
|
err = nw.BlankLine()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-25 01:18:21 +02:00
|
|
|
func beforeInModuleList(a, b *moduleInfo, list modulesOrAliases) bool {
|
2015-03-11 04:08:19 +01:00
|
|
|
found := false
|
2015-11-04 01:58:48 +01:00
|
|
|
if a == b {
|
|
|
|
return false
|
|
|
|
}
|
2015-03-11 04:08:19 +01:00
|
|
|
for _, l := range list {
|
2020-08-25 01:18:21 +02:00
|
|
|
if l.module() == a {
|
2015-03-11 04:08:19 +01:00
|
|
|
found = true
|
2020-08-25 01:18:21 +02:00
|
|
|
} else if l.module() == b {
|
2015-03-11 04:08:19 +01:00
|
|
|
return found
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
missing := a
|
|
|
|
if found {
|
|
|
|
missing = b
|
|
|
|
}
|
|
|
|
panic(fmt.Errorf("element %v not found in list %v", missing, list))
|
|
|
|
}
|
|
|
|
|
2016-01-07 22:43:09 +01:00
|
|
|
type panicError struct {
|
|
|
|
panic interface{}
|
|
|
|
stack []byte
|
|
|
|
in string
|
|
|
|
}
|
|
|
|
|
|
|
|
func newPanicErrorf(panic interface{}, in string, a ...interface{}) error {
|
|
|
|
buf := make([]byte, 4096)
|
|
|
|
count := runtime.Stack(buf, false)
|
|
|
|
return panicError{
|
|
|
|
panic: panic,
|
|
|
|
in: fmt.Sprintf(in, a...),
|
|
|
|
stack: buf[:count],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p panicError) Error() string {
|
|
|
|
return fmt.Sprintf("panic in %s\n%s\n%s\n", p.in, p.panic, p.stack)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *panicError) addIn(in string) {
|
|
|
|
p.in += " in " + in
|
|
|
|
}
|
|
|
|
|
|
|
|
func funcName(f interface{}) string {
|
|
|
|
return runtime.FuncForPC(reflect.ValueOf(f).Pointer()).Name()
|
|
|
|
}
|
|
|
|
|
2014-05-28 01:34:41 +02:00
|
|
|
var fileHeaderTemplate = `******************************************************************************
|
|
|
|
*** This file is generated and should not be edited ***
|
|
|
|
******************************************************************************
|
|
|
|
{{if .Pkgs}}
|
|
|
|
This file contains variables, rules, and pools with name prefixes indicating
|
|
|
|
they were generated by the following Go packages:
|
|
|
|
{{range .Pkgs}}
|
|
|
|
{{.PkgName}} [from Go package {{.PkgPath}}]{{end}}{{end}}
|
|
|
|
|
|
|
|
`
|
|
|
|
|
2023-02-21 20:40:56 +01:00
|
|
|
var moduleHeaderTemplate = `# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
2016-05-17 23:58:05 +02:00
|
|
|
Module: {{.name}}
|
2015-03-12 00:17:52 +01:00
|
|
|
Variant: {{.variant}}
|
2014-05-28 01:34:41 +02:00
|
|
|
Type: {{.typeName}}
|
2014-09-25 02:51:52 +02:00
|
|
|
Factory: {{.goFactory}}
|
2014-05-28 01:34:41 +02:00
|
|
|
Defined: {{.pos}}
|
|
|
|
`
|
|
|
|
|
2023-02-21 20:40:56 +01:00
|
|
|
var singletonHeaderTemplate = `# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
2014-05-28 01:34:41 +02:00
|
|
|
Singleton: {{.name}}
|
2014-09-25 02:51:52 +02:00
|
|
|
Factory: {{.goFactory}}
|
2014-05-28 01:34:41 +02:00
|
|
|
`
|
2022-11-08 20:44:01 +01:00
|
|
|
|
|
|
|
// Blueprint module type that can be used to gate blueprint files beneath this directory
|
|
|
|
type PackageIncludes struct {
|
|
|
|
properties struct {
|
|
|
|
// Package will be included if all include tags in this list are set
|
|
|
|
Match_all []string
|
|
|
|
}
|
|
|
|
name *string `blueprint:"mutated"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func (pi *PackageIncludes) Name() string {
|
|
|
|
return proptools.String(pi.name)
|
|
|
|
}
|
|
|
|
|
|
|
|
// This module type does not have any build actions
|
|
|
|
func (pi *PackageIncludes) GenerateBuildActions(ctx ModuleContext) {
|
|
|
|
}
|
|
|
|
|
|
|
|
func newPackageIncludesFactory() (Module, []interface{}) {
|
|
|
|
module := &PackageIncludes{}
|
|
|
|
AddLoadHook(module, func(ctx LoadHookContext) {
|
|
|
|
module.name = proptools.StringPtr(ctx.ModuleDir() + "_includes") // Generate a synthetic name
|
|
|
|
})
|
|
|
|
return module, []interface{}{&module.properties}
|
|
|
|
}
|
|
|
|
|
|
|
|
func RegisterPackageIncludesModuleType(ctx *Context) {
|
|
|
|
ctx.RegisterModuleType("blueprint_package_includes", newPackageIncludesFactory)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (pi *PackageIncludes) MatchAll() []string {
|
|
|
|
return pi.properties.Match_all
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns true if all requested include tags are set in the Context object
|
|
|
|
func (pi *PackageIncludes) MatchesIncludeTags(ctx *Context) bool {
|
|
|
|
if len(pi.MatchAll()) == 0 {
|
|
|
|
ctx.ModuleErrorf(pi, "Match_all must be a non-empty list")
|
|
|
|
}
|
|
|
|
for _, includeTag := range pi.MatchAll() {
|
|
|
|
if !ctx.ContainsIncludeTag(includeTag) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|