Skip @bazel_tools// labels from ninja
Since these bazel labels are removed from depsets anyways, there is little reason to convert they to ninja build statements Test: m nothing (with bazel af426041) Bug: 301638491 Change-Id: Ie920477231d147d0b5b7dbcd1c59ed9985a80abb
This commit is contained in:
parent
a0d741edef
commit
c45478190f
2 changed files with 77 additions and 23 deletions
|
@ -372,18 +372,20 @@ func AqueryBuildStatements(aqueryJsonProto []byte, eventHandler *metrics.EventHa
|
|||
defer eventHandler.End("build_statements")
|
||||
wg := sync.WaitGroup{}
|
||||
var errOnce sync.Once
|
||||
|
||||
id2targets := make(map[uint32]string, len(aqueryProto.Targets))
|
||||
for _, t := range aqueryProto.Targets {
|
||||
id2targets[t.GetId()] = t.GetLabel()
|
||||
}
|
||||
for i, actionEntry := range aqueryProto.Actions {
|
||||
wg.Add(1)
|
||||
go func(i int, actionEntry *analysis_v2_proto.Action) {
|
||||
if buildStatement, aErr := aqueryHandler.actionToBuildStatement(actionEntry); aErr != nil {
|
||||
if strings.HasPrefix(id2targets[actionEntry.TargetId], "@bazel_tools//") {
|
||||
// bazel_tools are removed depsets in `populateDepsetMaps()` so skipping
|
||||
// conversion to build statements as well
|
||||
buildStatements[i] = nil
|
||||
} else if buildStatement, aErr := aqueryHandler.actionToBuildStatement(actionEntry); aErr != nil {
|
||||
errOnce.Do(func() {
|
||||
for _, t := range aqueryProto.Targets {
|
||||
if t.GetId() == actionEntry.GetTargetId() {
|
||||
aErr = fmt.Errorf("%s: [%s] [%s]", aErr.Error(), actionEntry.GetMnemonic(), t.GetLabel())
|
||||
break
|
||||
}
|
||||
}
|
||||
aErr = fmt.Errorf("%s: [%s] [%s]", aErr.Error(), actionEntry.GetMnemonic(), id2targets[actionEntry.TargetId])
|
||||
err = aErr
|
||||
})
|
||||
} else {
|
||||
|
|
|
@ -178,8 +178,8 @@ func TestInvalidOutputId(t *testing.T) {
|
|||
{ "id": 2, "path_fragment_id": 2 }],
|
||||
"actions": [{
|
||||
"target_id": 1,
|
||||
"action_key": "x",
|
||||
"mnemonic": "x",
|
||||
"action_key": "action_x",
|
||||
"mnemonic": "X",
|
||||
"arguments": ["touch", "foo"],
|
||||
"input_dep_set_ids": [1],
|
||||
"output_ids": [3],
|
||||
|
@ -198,7 +198,7 @@ func TestInvalidOutputId(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, "undefined outputId 3")
|
||||
assertError(t, err, "undefined outputId 3: [X] []")
|
||||
}
|
||||
|
||||
func TestInvalidInputDepsetIdFromAction(t *testing.T) {
|
||||
|
@ -209,13 +209,17 @@ func TestInvalidInputDepsetIdFromAction(t *testing.T) {
|
|||
{ "id": 2, "path_fragment_id": 2 }],
|
||||
"actions": [{
|
||||
"target_id": 1,
|
||||
"action_key": "x",
|
||||
"mnemonic": "x",
|
||||
"action_key": "action_x",
|
||||
"mnemonic": "X",
|
||||
"arguments": ["touch", "foo"],
|
||||
"input_dep_set_ids": [2],
|
||||
"output_ids": [1],
|
||||
"primary_output_id": 1
|
||||
}],
|
||||
"targets": [{
|
||||
"id": 1,
|
||||
"label": "target_x"
|
||||
}],
|
||||
"dep_set_of_files": [
|
||||
{ "id": 1, "direct_artifact_ids": [1, 2] }],
|
||||
"path_fragments": [
|
||||
|
@ -229,7 +233,7 @@ func TestInvalidInputDepsetIdFromAction(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, "undefined (not even empty) input depsetId 2")
|
||||
assertError(t, err, "undefined (not even empty) input depsetId 2: [X] [target_x]")
|
||||
}
|
||||
|
||||
func TestInvalidInputDepsetIdFromDepset(t *testing.T) {
|
||||
|
@ -383,8 +387,8 @@ func TestMultipleDepfiles(t *testing.T) {
|
|||
{ "id": 4, "path_fragment_id": 4 }],
|
||||
"actions": [{
|
||||
"target_id": 1,
|
||||
"action_key": "x",
|
||||
"mnemonic": "x",
|
||||
"action_key": "action_x",
|
||||
"mnemonic": "X",
|
||||
"arguments": ["touch", "foo"],
|
||||
"input_dep_set_ids": [1],
|
||||
"output_ids": [2,3,4],
|
||||
|
@ -407,7 +411,7 @@ func TestMultipleDepfiles(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, `found multiple potential depfiles "two.d", "other.d"`)
|
||||
assertError(t, err, `found multiple potential depfiles "two.d", "other.d": [X] []`)
|
||||
}
|
||||
|
||||
func TestTransitiveInputDepsets(t *testing.T) {
|
||||
|
@ -559,7 +563,7 @@ func TestSymlinkTree(t *testing.T) {
|
|||
}, actual)
|
||||
}
|
||||
|
||||
func TestBazelOutRemovalFromInputDepsets(t *testing.T) {
|
||||
func TestBazelToolsRemovalFromInputDepsets(t *testing.T) {
|
||||
const inputString = `{
|
||||
"artifacts": [
|
||||
{ "id": 1, "path_fragment_id": 10 },
|
||||
|
@ -637,7 +641,55 @@ func TestBazelOutRemovalFromInputDepsets(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestBazelOutRemovalFromTransitiveInputDepsets(t *testing.T) {
|
||||
func TestBazelToolsRemovalFromTargets(t *testing.T) {
|
||||
const inputString = `{
|
||||
"artifacts": [{ "id": 1, "path_fragment_id": 10 }],
|
||||
"targets": [
|
||||
{ "id": 100, "label": "targetX" },
|
||||
{ "id": 200, "label": "@bazel_tools//tool_y" }
|
||||
],
|
||||
"actions": [{
|
||||
"target_id": 100,
|
||||
"action_key": "actionX",
|
||||
"arguments": ["bogus", "command"],
|
||||
"mnemonic" : "x",
|
||||
"output_ids": [1]
|
||||
}, {
|
||||
"target_id": 200,
|
||||
"action_key": "y"
|
||||
}],
|
||||
"path_fragments": [{ "id": 10, "label": "outputX"}]
|
||||
}`
|
||||
data, err := JsonToActionGraphContainer(inputString)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
actualBuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
if len(actualDepsets) != 0 {
|
||||
t.Errorf("expected 0 depset but found %#v", actualDepsets)
|
||||
return
|
||||
}
|
||||
expectedBuildStatement := &BuildStatement{
|
||||
Command: "bogus command",
|
||||
OutputPaths: []string{"outputX"},
|
||||
Mnemonic: "x",
|
||||
SymlinkPaths: []string{},
|
||||
}
|
||||
buildStatementFound := false
|
||||
for _, actualBuildStatement := range actualBuildStatements {
|
||||
if buildStatementEquals(actualBuildStatement, expectedBuildStatement) == "" {
|
||||
buildStatementFound = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !buildStatementFound {
|
||||
t.Errorf("expected but missing %#v in %#v build statements", expectedBuildStatement, len(actualBuildStatements))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func TestBazelToolsRemovalFromTransitiveInputDepsets(t *testing.T) {
|
||||
const inputString = `{
|
||||
"artifacts": [
|
||||
{ "id": 1, "path_fragment_id": 10 },
|
||||
|
@ -939,7 +991,7 @@ func TestSymlinkMultipleInputs(t *testing.T) {
|
|||
{ "id": 3, "path_fragment_id": 3 }],
|
||||
"actions": [{
|
||||
"target_id": 1,
|
||||
"action_key": "x",
|
||||
"action_key": "action_x",
|
||||
"mnemonic": "Symlink",
|
||||
"input_dep_set_ids": [1],
|
||||
"output_ids": [3],
|
||||
|
@ -958,7 +1010,7 @@ func TestSymlinkMultipleInputs(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, `Expect 1 input and 1 output to symlink action, got: input ["file" "other_file"], output ["symlink"]`)
|
||||
assertError(t, err, `Expect 1 input and 1 output to symlink action, got: input ["file" "other_file"], output ["symlink"]: [Symlink] []`)
|
||||
}
|
||||
|
||||
func TestSymlinkMultipleOutputs(t *testing.T) {
|
||||
|
@ -989,7 +1041,7 @@ func TestSymlinkMultipleOutputs(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, "undefined outputId 2")
|
||||
assertError(t, err, "undefined outputId 2: [Symlink] []")
|
||||
}
|
||||
|
||||
func TestTemplateExpandActionSubstitutions(t *testing.T) {
|
||||
|
@ -1066,7 +1118,7 @@ func TestTemplateExpandActionNoOutput(t *testing.T) {
|
|||
return
|
||||
}
|
||||
_, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
|
||||
assertError(t, err, `Expect 1 output to template expand action, got: output []`)
|
||||
assertError(t, err, `Expect 1 output to template expand action, got: output []: [TemplateExpand] []`)
|
||||
}
|
||||
|
||||
func TestFileWrite(t *testing.T) {
|
||||
|
|
Loading…
Reference in a new issue