Merge "Utilize linter artifacts of the impl library in the top level sdk library" into main
diff --git a/aconfig/codegen/Android.bp b/aconfig/codegen/Android.bp
index 0c78b94..5fac0a8 100644
--- a/aconfig/codegen/Android.bp
+++ b/aconfig/codegen/Android.bp
@@ -12,7 +12,6 @@
"soong",
"soong-aconfig",
"soong-android",
- "soong-bazel",
"soong-java",
"soong-rust",
],
diff --git a/android/Android.bp b/android/Android.bp
index ce8c9b0..985ffa9 100644
--- a/android/Android.bp
+++ b/android/Android.bp
@@ -40,6 +40,7 @@
"base_module_context.go",
"build_prop.go",
"buildinfo_prop.go",
+ "compliance_metadata.go",
"config.go",
"test_config.go",
"configurable_properties.go",
diff --git a/android/compliance_metadata.go b/android/compliance_metadata.go
new file mode 100644
index 0000000..6ea6654
--- /dev/null
+++ b/android/compliance_metadata.go
@@ -0,0 +1,314 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package android
+
+import (
+ "bytes"
+ "encoding/csv"
+ "fmt"
+ "slices"
+ "strconv"
+ "strings"
+
+ "github.com/google/blueprint"
+)
+
+var (
+ // Constants of property names used in compliance metadata of modules
+ ComplianceMetadataProp = struct {
+ NAME string
+ PACKAGE string
+ MODULE_TYPE string
+ OS string
+ ARCH string
+ IS_PRIMARY_ARCH string
+ VARIANT string
+ IS_STATIC_LIB string
+ INSTALLED_FILES string
+ BUILT_FILES string
+ STATIC_DEPS string
+ STATIC_DEP_FILES string
+ WHOLE_STATIC_DEPS string
+ WHOLE_STATIC_DEP_FILES string
+ LICENSES string
+
+ // module_type=package
+ PKG_DEFAULT_APPLICABLE_LICENSES string
+
+ // module_type=license
+ LIC_LICENSE_KINDS string
+ LIC_LICENSE_TEXT string
+ LIC_PACKAGE_NAME string
+
+ // module_type=license_kind
+ LK_CONDITIONS string
+ LK_URL string
+ }{
+ "name",
+ "package",
+ "module_type",
+ "os",
+ "arch",
+ "is_primary_arch",
+ "variant",
+ "is_static_lib",
+ "installed_files",
+ "built_files",
+ "static_deps",
+ "static_dep_files",
+ "whole_static_deps",
+ "whole_static_dep_files",
+ "licenses",
+
+ "pkg_default_applicable_licenses",
+
+ "lic_license_kinds",
+ "lic_license_text",
+ "lic_package_name",
+
+ "lk_conditions",
+ "lk_url",
+ }
+
+ // A constant list of all property names in compliance metadata
+ // Order of properties here is the order of columns in the exported CSV file.
+ COMPLIANCE_METADATA_PROPS = []string{
+ ComplianceMetadataProp.NAME,
+ ComplianceMetadataProp.PACKAGE,
+ ComplianceMetadataProp.MODULE_TYPE,
+ ComplianceMetadataProp.OS,
+ ComplianceMetadataProp.ARCH,
+ ComplianceMetadataProp.VARIANT,
+ ComplianceMetadataProp.IS_STATIC_LIB,
+ ComplianceMetadataProp.IS_PRIMARY_ARCH,
+ // Space separated installed files
+ ComplianceMetadataProp.INSTALLED_FILES,
+ // Space separated built files
+ ComplianceMetadataProp.BUILT_FILES,
+ // Space separated module names of static dependencies
+ ComplianceMetadataProp.STATIC_DEPS,
+ // Space separated file paths of static dependencies
+ ComplianceMetadataProp.STATIC_DEP_FILES,
+ // Space separated module names of whole static dependencies
+ ComplianceMetadataProp.WHOLE_STATIC_DEPS,
+ // Space separated file paths of whole static dependencies
+ ComplianceMetadataProp.WHOLE_STATIC_DEP_FILES,
+ ComplianceMetadataProp.LICENSES,
+ // module_type=package
+ ComplianceMetadataProp.PKG_DEFAULT_APPLICABLE_LICENSES,
+ // module_type=license
+ ComplianceMetadataProp.LIC_LICENSE_KINDS,
+ ComplianceMetadataProp.LIC_LICENSE_TEXT, // resolve to file paths
+ ComplianceMetadataProp.LIC_PACKAGE_NAME,
+ // module_type=license_kind
+ ComplianceMetadataProp.LK_CONDITIONS,
+ ComplianceMetadataProp.LK_URL,
+ }
+)
+
+// ComplianceMetadataInfo provides all metadata of a module, e.g. name, module type, package, license,
+// dependencies, built/installed files, etc. It is a wrapper on a map[string]string with some utility
+// methods to get/set properties' values.
+type ComplianceMetadataInfo struct {
+ properties map[string]string
+}
+
+func NewComplianceMetadataInfo() *ComplianceMetadataInfo {
+ return &ComplianceMetadataInfo{
+ properties: map[string]string{},
+ }
+}
+
+func (c *ComplianceMetadataInfo) SetStringValue(propertyName string, value string) {
+ if !slices.Contains(COMPLIANCE_METADATA_PROPS, propertyName) {
+ panic(fmt.Errorf("Unknown metadata property: %s.", propertyName))
+ }
+ c.properties[propertyName] = value
+}
+
+func (c *ComplianceMetadataInfo) SetListValue(propertyName string, value []string) {
+ c.SetStringValue(propertyName, strings.TrimSpace(strings.Join(value, " ")))
+}
+
+func (c *ComplianceMetadataInfo) getStringValue(propertyName string) string {
+ if !slices.Contains(COMPLIANCE_METADATA_PROPS, propertyName) {
+ panic(fmt.Errorf("Unknown metadata property: %s.", propertyName))
+ }
+ return c.properties[propertyName]
+}
+
+func (c *ComplianceMetadataInfo) getAllValues() map[string]string {
+ return c.properties
+}
+
+var (
+ ComplianceMetadataProvider = blueprint.NewProvider[*ComplianceMetadataInfo]()
+)
+
+// buildComplianceMetadataProvider starts with the ModuleContext.ComplianceMetadataInfo() and fills in more common metadata
+// for different module types without accessing their private fields but through android.Module interface
+// and public/private fields of package android. The final metadata is stored to a module's ComplianceMetadataProvider.
+func buildComplianceMetadataProvider(ctx ModuleContext, m *ModuleBase) {
+ complianceMetadataInfo := ctx.ComplianceMetadataInfo()
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.NAME, m.Name())
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.PACKAGE, ctx.ModuleDir())
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.MODULE_TYPE, ctx.ModuleType())
+
+ switch ctx.ModuleType() {
+ case "license":
+ licenseModule := m.module.(*licenseModule)
+ complianceMetadataInfo.SetListValue(ComplianceMetadataProp.LIC_LICENSE_KINDS, licenseModule.properties.License_kinds)
+ complianceMetadataInfo.SetListValue(ComplianceMetadataProp.LIC_LICENSE_TEXT, PathsForModuleSrc(ctx, licenseModule.properties.License_text).Strings())
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.LIC_PACKAGE_NAME, String(licenseModule.properties.Package_name))
+ case "license_kind":
+ licenseKindModule := m.module.(*licenseKindModule)
+ complianceMetadataInfo.SetListValue(ComplianceMetadataProp.LK_CONDITIONS, licenseKindModule.properties.Conditions)
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.LK_URL, licenseKindModule.properties.Url)
+ default:
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.OS, ctx.Os().String())
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.ARCH, ctx.Arch().String())
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.IS_PRIMARY_ARCH, strconv.FormatBool(ctx.PrimaryArch()))
+ complianceMetadataInfo.SetStringValue(ComplianceMetadataProp.VARIANT, ctx.ModuleSubDir())
+ if m.primaryLicensesProperty != nil && m.primaryLicensesProperty.getName() == "licenses" {
+ complianceMetadataInfo.SetListValue(ComplianceMetadataProp.LICENSES, m.primaryLicensesProperty.getStrings())
+ }
+
+ var installed InstallPaths
+ installed = append(installed, m.module.FilesToInstall()...)
+ installed = append(installed, m.katiInstalls.InstallPaths()...)
+ installed = append(installed, m.katiSymlinks.InstallPaths()...)
+ installed = append(installed, m.katiInitRcInstalls.InstallPaths()...)
+ installed = append(installed, m.katiVintfInstalls.InstallPaths()...)
+ complianceMetadataInfo.SetListValue(ComplianceMetadataProp.INSTALLED_FILES, FirstUniqueStrings(installed.Strings()))
+ }
+ ctx.setProvider(ComplianceMetadataProvider, complianceMetadataInfo)
+}
+
+func init() {
+ RegisterComplianceMetadataSingleton(InitRegistrationContext)
+}
+
+func RegisterComplianceMetadataSingleton(ctx RegistrationContext) {
+ ctx.RegisterParallelSingletonType("compliance_metadata_singleton", complianceMetadataSingletonFactory)
+}
+
+var (
+ // sqlite3 command line tool
+ sqlite3 = pctx.HostBinToolVariable("sqlite3", "sqlite3")
+
+ // Command to import .csv files to sqlite3 database
+ importCsv = pctx.AndroidStaticRule("importCsv",
+ blueprint.RuleParams{
+ Command: `rm -rf $out && ` +
+ `${sqlite3} $out ".import --csv $in modules" && ` +
+ `${sqlite3} $out ".import --csv ${make_metadata} make_metadata" && ` +
+ `${sqlite3} $out ".import --csv ${make_modules} make_modules"`,
+ CommandDeps: []string{"${sqlite3}"},
+ }, "make_metadata", "make_modules")
+)
+
+func complianceMetadataSingletonFactory() Singleton {
+ return &complianceMetadataSingleton{}
+}
+
+type complianceMetadataSingleton struct {
+}
+
+func writerToCsv(csvWriter *csv.Writer, row []string) {
+ err := csvWriter.Write(row)
+ if err != nil {
+ panic(err)
+ }
+}
+
+// Collect compliance metadata from all Soong modules, write to a CSV file and
+// import compliance metadata from Make and Soong to a sqlite3 database.
+func (c *complianceMetadataSingleton) GenerateBuildActions(ctx SingletonContext) {
+ if !ctx.Config().HasDeviceProduct() {
+ return
+ }
+ var buffer bytes.Buffer
+ csvWriter := csv.NewWriter(&buffer)
+
+ // Collect compliance metadata of modules in Soong and write to out/soong/compliance-metadata/<product>/soong-modules.csv file.
+ columnNames := []string{"id"}
+ columnNames = append(columnNames, COMPLIANCE_METADATA_PROPS...)
+ writerToCsv(csvWriter, columnNames)
+
+ rowId := -1
+ ctx.VisitAllModules(func(module Module) {
+ if !module.Enabled(ctx) {
+ return
+ }
+ moduleType := ctx.ModuleType(module)
+ if moduleType == "package" {
+ metadataMap := map[string]string{
+ ComplianceMetadataProp.NAME: ctx.ModuleName(module),
+ ComplianceMetadataProp.MODULE_TYPE: ctx.ModuleType(module),
+ ComplianceMetadataProp.PKG_DEFAULT_APPLICABLE_LICENSES: strings.Join(module.base().primaryLicensesProperty.getStrings(), " "),
+ }
+ rowId = rowId + 1
+ metadata := []string{strconv.Itoa(rowId)}
+ for _, propertyName := range COMPLIANCE_METADATA_PROPS {
+ metadata = append(metadata, metadataMap[propertyName])
+ }
+ writerToCsv(csvWriter, metadata)
+ return
+ }
+ if provider, ok := ctx.moduleProvider(module, ComplianceMetadataProvider); ok {
+ metadataInfo := provider.(*ComplianceMetadataInfo)
+ rowId = rowId + 1
+ metadata := []string{strconv.Itoa(rowId)}
+ for _, propertyName := range COMPLIANCE_METADATA_PROPS {
+ metadata = append(metadata, metadataInfo.getStringValue(propertyName))
+ }
+ writerToCsv(csvWriter, metadata)
+ return
+ }
+ })
+ csvWriter.Flush()
+
+ deviceProduct := ctx.Config().DeviceProduct()
+ modulesCsv := PathForOutput(ctx, "compliance-metadata", deviceProduct, "soong-modules.csv")
+ WriteFileRuleVerbatim(ctx, modulesCsv, buffer.String())
+
+ // Metadata generated in Make
+ makeMetadataCsv := PathForOutput(ctx, "compliance-metadata", deviceProduct, "make-metadata.csv")
+ makeModulesCsv := PathForOutput(ctx, "compliance-metadata", deviceProduct, "make-modules.csv")
+
+ // Import metadata from Make and Soong to sqlite3 database
+ complianceMetadataDb := PathForOutput(ctx, "compliance-metadata", deviceProduct, "compliance-metadata.db")
+ ctx.Build(pctx, BuildParams{
+ Rule: importCsv,
+ Input: modulesCsv,
+ Implicits: []Path{
+ makeMetadataCsv,
+ makeModulesCsv,
+ },
+ Output: complianceMetadataDb,
+ Args: map[string]string{
+ "make_metadata": makeMetadataCsv.String(),
+ "make_modules": makeModulesCsv.String(),
+ },
+ })
+
+ // Phony rule "compliance-metadata.db". "m compliance-metadata.db" to create the compliance metadata database.
+ ctx.Build(pctx, BuildParams{
+ Rule: blueprint.Phony,
+ Inputs: []Path{complianceMetadataDb},
+ Output: PathForPhony(ctx, "compliance-metadata.db"),
+ })
+
+}
diff --git a/android/module.go b/android/module.go
index 2dc63d6..e2c24bf 100644
--- a/android/module.go
+++ b/android/module.go
@@ -26,8 +26,6 @@
"sort"
"strings"
- "android/soong/bazel"
-
"github.com/google/blueprint"
"github.com/google/blueprint/proptools"
)
@@ -849,9 +847,6 @@
// archPropRoot that is filled with arch specific values by the arch mutator.
archProperties [][]interface{}
- // Properties specific to the Blueprint to BUILD migration.
- bazelTargetModuleProperties bazel.BazelTargetModuleProperties
-
// Information about all the properties on the module that contains visibility rules that need
// checking.
visibilityPropertyInfo []visibilityProperty
@@ -919,6 +914,10 @@
// outputFiles stores the output of a module by tag and is used to set
// the OutputFilesProvider in GenerateBuildActions
outputFiles OutputFilesInfo
+
+ // complianceMetadataInfo is for different module types to dump metadata.
+ // See android.ModuleContext interface.
+ complianceMetadataInfo *ComplianceMetadataInfo
}
func (m *ModuleBase) AddJSONData(d *map[string]interface{}) {
@@ -2049,6 +2048,8 @@
if m.outputFiles.DefaultOutputFiles != nil || m.outputFiles.TaggedOutputFiles != nil {
SetProvider(ctx, OutputFilesProvider, m.outputFiles)
}
+
+ buildComplianceMetadataProvider(ctx, m)
}
func SetJarJarPrefixHandler(handler func(ModuleContext)) {
@@ -2540,34 +2541,44 @@
// reading OutputFilesProvider before GenerateBuildActions is finished.
// If a module doesn't have the OutputFilesProvider, nil is returned.
func outputFilesForModuleFromProvider(ctx PathContext, module blueprint.Module, tag string) (Paths, error) {
- // TODO: support OutputFilesProvider for singletons
- mctx, ok := ctx.(ModuleContext)
- if !ok {
- return nil, nil
- }
- if mctx.Module() != module {
- if outputFilesProvider, ok := OtherModuleProvider(mctx, module, OutputFilesProvider); ok {
+ var outputFilesProvider OutputFilesInfo
+
+ if mctx, isMctx := ctx.(ModuleContext); isMctx {
+ if mctx.Module() != module {
+ outputFilesProvider, _ = OtherModuleProvider(mctx, module, OutputFilesProvider)
+ } else {
if tag == "" {
- return outputFilesProvider.DefaultOutputFiles, nil
- } else if taggedOutputFiles, hasTag := outputFilesProvider.TaggedOutputFiles[tag]; hasTag {
+ return mctx.Module().base().outputFiles.DefaultOutputFiles, nil
+ } else if taggedOutputFiles, hasTag := mctx.Module().base().outputFiles.TaggedOutputFiles[tag]; hasTag {
return taggedOutputFiles, nil
} else {
- return nil, fmt.Errorf("unsupported module reference tag %q", tag)
+ return nil, fmt.Errorf("unsupported tag %q for module getting its own output files", tag)
}
}
- } else {
+ } else if cta, isCta := ctx.(*singletonContextAdaptor); isCta {
+ providerData, _ := cta.moduleProvider(module, OutputFilesProvider)
+ outputFilesProvider, _ = providerData.(OutputFilesInfo)
+ }
+ // TODO: Add a check for skipped context
+
+ if !outputFilesProvider.isEmpty() {
if tag == "" {
- return mctx.Module().base().outputFiles.DefaultOutputFiles, nil
- } else if taggedOutputFiles, hasTag := mctx.Module().base().outputFiles.TaggedOutputFiles[tag]; hasTag {
+ return outputFilesProvider.DefaultOutputFiles, nil
+ } else if taggedOutputFiles, hasTag := outputFilesProvider.TaggedOutputFiles[tag]; hasTag {
return taggedOutputFiles, nil
} else {
- return nil, fmt.Errorf("unsupported tag %q for module getting its own output files", tag)
+ return nil, fmt.Errorf("unsupported module reference tag %q", tag)
}
}
+
// TODO: Add a check for param module not having OutputFilesProvider set
return nil, nil
}
+func (o OutputFilesInfo) isEmpty() bool {
+ return o.DefaultOutputFiles == nil && o.TaggedOutputFiles == nil
+}
+
type OutputFilesInfo struct {
// default output files when tag is an empty string ""
DefaultOutputFiles Paths
diff --git a/android/module_context.go b/android/module_context.go
index e2677a4..2e16a24 100644
--- a/android/module_context.go
+++ b/android/module_context.go
@@ -216,6 +216,11 @@
// SetOutputFiles stores the outputFiles to outputFiles property, which is used
// to set the OutputFilesProvider later.
SetOutputFiles(outputFiles Paths, tag string)
+
+ // ComplianceMetadataInfo returns a ComplianceMetadataInfo instance for different module types to dump metadata,
+ // which usually happens in GenerateAndroidBuildActions() of a module type.
+ // See android.ModuleBase.complianceMetadataInfo
+ ComplianceMetadataInfo() *ComplianceMetadataInfo
}
type moduleContext struct {
@@ -729,6 +734,15 @@
}
}
+func (m *moduleContext) ComplianceMetadataInfo() *ComplianceMetadataInfo {
+ if complianceMetadataInfo := m.module.base().complianceMetadataInfo; complianceMetadataInfo != nil {
+ return complianceMetadataInfo
+ }
+ complianceMetadataInfo := NewComplianceMetadataInfo()
+ m.module.base().complianceMetadataInfo = complianceMetadataInfo
+ return complianceMetadataInfo
+}
+
// Returns a list of paths expanded from globs and modules referenced using ":module" syntax. The property must
// be tagged with `android:"path" to support automatic source module dependency resolution.
//
diff --git a/android/soongconfig/Android.bp b/android/soongconfig/Android.bp
index 8fe1ff1..5a6df26 100644
--- a/android/soongconfig/Android.bp
+++ b/android/soongconfig/Android.bp
@@ -9,7 +9,6 @@
"blueprint",
"blueprint-parser",
"blueprint-proptools",
- "soong-bazel",
"soong-starlark-format",
],
srcs: [
diff --git a/bazel/Android.bp b/bazel/Android.bp
index 4709f5c..f8273a8 100644
--- a/bazel/Android.bp
+++ b/bazel/Android.bp
@@ -6,22 +6,17 @@
name: "soong-bazel",
pkgPath: "android/soong/bazel",
srcs: [
- "aquery.go",
- "bazel_proxy.go",
"configurability.go",
- "constants.go",
"properties.go",
"testing.go",
],
testSrcs: [
- "aquery_test.go",
"properties_test.go",
],
pluginFor: [
"soong_build",
],
deps: [
- "bazel_analysis_v2_proto",
"blueprint",
],
}
diff --git a/bazel/aquery.go b/bazel/aquery.go
deleted file mode 100644
index 35942bc..0000000
--- a/bazel/aquery.go
+++ /dev/null
@@ -1,768 +0,0 @@
-// Copyright 2020 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package bazel
-
-import (
- "crypto/sha256"
- "encoding/base64"
- "encoding/json"
- "errors"
- "fmt"
- "path/filepath"
- "reflect"
- "sort"
- "strings"
- "sync"
-
- analysis_v2_proto "prebuilts/bazel/common/proto/analysis_v2"
-
- "github.com/google/blueprint/metrics"
- "github.com/google/blueprint/proptools"
- "google.golang.org/protobuf/proto"
-)
-
-type artifactId int
-type depsetId int
-type pathFragmentId int
-
-// KeyValuePair represents Bazel's aquery proto, KeyValuePair.
-type KeyValuePair struct {
- Key string
- Value string
-}
-
-// AqueryDepset is a depset definition from Bazel's aquery response. This is
-// akin to the `depSetOfFiles` in the response proto, except:
-// - direct artifacts are enumerated by full path instead of by ID
-// - it has a hash of the depset contents, instead of an int ID (for determinism)
-//
-// A depset is a data structure for efficient transitive handling of artifact
-// paths. A single depset consists of one or more artifact paths and one or
-// more "child" depsets.
-type AqueryDepset struct {
- ContentHash string
- DirectArtifacts []string
- TransitiveDepSetHashes []string
-}
-
-// BuildStatement contains information to register a build statement corresponding (one to one)
-// with a Bazel action from Bazel's action graph.
-type BuildStatement struct {
- Command string
- Depfile *string
- OutputPaths []string
- SymlinkPaths []string
- Env []*analysis_v2_proto.KeyValuePair
- Mnemonic string
-
- // Inputs of this build statement, either as unexpanded depsets or expanded
- // input paths. There should be no overlap between these fields; an input
- // path should either be included as part of an unexpanded depset or a raw
- // input path string, but not both.
- InputDepsetHashes []string
- InputPaths []string
- FileContents string
- // If ShouldRunInSbox is true, Soong will use sbox to created an isolated environment
- // and run the mixed build action there
- ShouldRunInSbox bool
- // A list of files to add as implicit deps to the outputs of this BuildStatement.
- // Unlike most properties in BuildStatement, these paths must be relative to the root of
- // the whole out/ folder, instead of relative to ctx.Config().BazelContext.OutputBase()
- ImplicitDeps []string
- IsExecutable bool
-}
-
-// A helper type for aquery processing which facilitates retrieval of path IDs from their
-// less readable Bazel structures (depset and path fragment).
-type aqueryArtifactHandler struct {
- // Maps depset id to AqueryDepset, a representation of depset which is
- // post-processed for middleman artifact handling, unhandled artifact
- // dropping, content hashing, etc.
- depsetIdToAqueryDepset map[depsetId]AqueryDepset
- emptyDepsetIds map[depsetId]struct{}
- // Maps content hash to AqueryDepset.
- depsetHashToAqueryDepset map[string]AqueryDepset
-
- // depsetIdToArtifactIdsCache is a memoization of depset flattening, because flattening
- // may be an expensive operation.
- depsetHashToArtifactPathsCache sync.Map
- // Maps artifact ids to fully expanded paths.
- artifactIdToPath map[artifactId]string
-}
-
-// The tokens should be substituted with the value specified here, instead of the
-// one returned in 'substitutions' of TemplateExpand action.
-var templateActionOverriddenTokens = map[string]string{
- // Uses "python3" for %python_binary% instead of the value returned by aquery
- // which is "py3wrapper.sh". See removePy3wrapperScript.
- "%python_binary%": "python3",
-}
-
-const (
- middlemanMnemonic = "Middleman"
- // The file name of py3wrapper.sh, which is used by py_binary targets.
- py3wrapperFileName = "/py3wrapper.sh"
-)
-
-func indexBy[K comparable, V any](values []V, keyFn func(v V) K) map[K]V {
- m := map[K]V{}
- for _, v := range values {
- m[keyFn(v)] = v
- }
- return m
-}
-
-func newAqueryHandler(aqueryResult *analysis_v2_proto.ActionGraphContainer) (*aqueryArtifactHandler, error) {
- pathFragments := indexBy(aqueryResult.PathFragments, func(pf *analysis_v2_proto.PathFragment) pathFragmentId {
- return pathFragmentId(pf.Id)
- })
-
- artifactIdToPath := make(map[artifactId]string, len(aqueryResult.Artifacts))
- for _, artifact := range aqueryResult.Artifacts {
- artifactPath, err := expandPathFragment(pathFragmentId(artifact.PathFragmentId), pathFragments)
- if err != nil {
- return nil, err
- }
- if artifact.IsTreeArtifact &&
- !strings.HasPrefix(artifactPath, "bazel-out/io_bazel_rules_go/") &&
- !strings.HasPrefix(artifactPath, "bazel-out/rules_java_builtin/") {
- // Since we're using ninja as an executor, we can't use tree artifacts. Ninja only
- // considers a file/directory "dirty" when it's mtime changes. Directories' mtimes will
- // only change when a file in the directory is added/removed, but not when files in
- // the directory are changed, or when files in subdirectories are changed/added/removed.
- // Bazel handles this by walking the directory and generating a hash for it after the
- // action runs, which we would have to do as well if we wanted to support these
- // artifacts in mixed builds.
- //
- // However, there are some bazel built-in rules that use tree artifacts. Allow those,
- // but keep in mind that they'll have incrementality issues.
- return nil, fmt.Errorf("tree artifacts are currently not supported in mixed builds: " + artifactPath)
- }
- artifactIdToPath[artifactId(artifact.Id)] = artifactPath
- }
-
- // Map middleman artifact ContentHash to input artifact depset ID.
- // Middleman artifacts are treated as "substitute" artifacts for mixed builds. For example,
- // if we find a middleman action which has inputs [foo, bar], and output [baz_middleman], then,
- // for each other action which has input [baz_middleman], we add [foo, bar] to the inputs for
- // that action instead.
- middlemanIdToDepsetIds := map[artifactId][]uint32{}
- for _, actionEntry := range aqueryResult.Actions {
- if actionEntry.Mnemonic == middlemanMnemonic {
- for _, outputId := range actionEntry.OutputIds {
- middlemanIdToDepsetIds[artifactId(outputId)] = actionEntry.InputDepSetIds
- }
- }
- }
-
- depsetIdToDepset := indexBy(aqueryResult.DepSetOfFiles, func(d *analysis_v2_proto.DepSetOfFiles) depsetId {
- return depsetId(d.Id)
- })
-
- aqueryHandler := aqueryArtifactHandler{
- depsetIdToAqueryDepset: map[depsetId]AqueryDepset{},
- depsetHashToAqueryDepset: map[string]AqueryDepset{},
- depsetHashToArtifactPathsCache: sync.Map{},
- emptyDepsetIds: make(map[depsetId]struct{}, 0),
- artifactIdToPath: artifactIdToPath,
- }
-
- // Validate and adjust aqueryResult.DepSetOfFiles values.
- for _, depset := range aqueryResult.DepSetOfFiles {
- _, err := aqueryHandler.populateDepsetMaps(depset, middlemanIdToDepsetIds, depsetIdToDepset)
- if err != nil {
- return nil, err
- }
- }
-
- return &aqueryHandler, nil
-}
-
-// Ensures that the handler's depsetIdToAqueryDepset map contains an entry for the given
-// depset.
-func (a *aqueryArtifactHandler) populateDepsetMaps(depset *analysis_v2_proto.DepSetOfFiles, middlemanIdToDepsetIds map[artifactId][]uint32, depsetIdToDepset map[depsetId]*analysis_v2_proto.DepSetOfFiles) (*AqueryDepset, error) {
- if aqueryDepset, containsDepset := a.depsetIdToAqueryDepset[depsetId(depset.Id)]; containsDepset {
- return &aqueryDepset, nil
- }
- transitiveDepsetIds := depset.TransitiveDepSetIds
- directArtifactPaths := make([]string, 0, len(depset.DirectArtifactIds))
- for _, id := range depset.DirectArtifactIds {
- aId := artifactId(id)
- path, pathExists := a.artifactIdToPath[aId]
- if !pathExists {
- return nil, fmt.Errorf("undefined input artifactId %d", aId)
- }
- // Filter out any inputs which are universally dropped, and swap middleman
- // artifacts with their corresponding depsets.
- if depsetsToUse, isMiddleman := middlemanIdToDepsetIds[aId]; isMiddleman {
- // Swap middleman artifacts with their corresponding depsets and drop the middleman artifacts.
- transitiveDepsetIds = append(transitiveDepsetIds, depsetsToUse...)
- } else if strings.HasSuffix(path, py3wrapperFileName) ||
- strings.HasPrefix(path, "../bazel_tools") {
- continue
- // Drop these artifacts.
- // See go/python-binary-host-mixed-build for more details.
- // 1) Drop py3wrapper.sh, just use python binary, the launcher script generated by the
- // TemplateExpandAction handles everything necessary to launch a Pythin application.
- // 2) ../bazel_tools: they have MODIFY timestamp 10years in the future and would cause the
- // containing depset to always be considered newer than their outputs.
- } else {
- directArtifactPaths = append(directArtifactPaths, path)
- }
- }
-
- childDepsetHashes := make([]string, 0, len(transitiveDepsetIds))
- for _, id := range transitiveDepsetIds {
- childDepsetId := depsetId(id)
- childDepset, exists := depsetIdToDepset[childDepsetId]
- if !exists {
- if _, empty := a.emptyDepsetIds[childDepsetId]; empty {
- continue
- } else {
- return nil, fmt.Errorf("undefined input depsetId %d (referenced by depsetId %d)", childDepsetId, depset.Id)
- }
- }
- if childAqueryDepset, err := a.populateDepsetMaps(childDepset, middlemanIdToDepsetIds, depsetIdToDepset); err != nil {
- return nil, err
- } else if childAqueryDepset == nil {
- continue
- } else {
- childDepsetHashes = append(childDepsetHashes, childAqueryDepset.ContentHash)
- }
- }
- if len(directArtifactPaths) == 0 && len(childDepsetHashes) == 0 {
- a.emptyDepsetIds[depsetId(depset.Id)] = struct{}{}
- return nil, nil
- }
- aqueryDepset := AqueryDepset{
- ContentHash: depsetContentHash(directArtifactPaths, childDepsetHashes),
- DirectArtifacts: directArtifactPaths,
- TransitiveDepSetHashes: childDepsetHashes,
- }
- a.depsetIdToAqueryDepset[depsetId(depset.Id)] = aqueryDepset
- a.depsetHashToAqueryDepset[aqueryDepset.ContentHash] = aqueryDepset
- return &aqueryDepset, nil
-}
-
-// getInputPaths flattens the depsets of the given IDs and returns all transitive
-// input paths contained in these depsets.
-// This is a potentially expensive operation, and should not be invoked except
-// for actions which need specialized input handling.
-func (a *aqueryArtifactHandler) getInputPaths(depsetIds []uint32) ([]string, error) {
- var inputPaths []string
-
- for _, id := range depsetIds {
- inputDepSetId := depsetId(id)
- depset := a.depsetIdToAqueryDepset[inputDepSetId]
- inputArtifacts, err := a.artifactPathsFromDepsetHash(depset.ContentHash)
- if err != nil {
- return nil, err
- }
- for _, inputPath := range inputArtifacts {
- inputPaths = append(inputPaths, inputPath)
- }
- }
-
- return inputPaths, nil
-}
-
-func (a *aqueryArtifactHandler) artifactPathsFromDepsetHash(depsetHash string) ([]string, error) {
- if result, exists := a.depsetHashToArtifactPathsCache.Load(depsetHash); exists {
- return result.([]string), nil
- }
- if depset, exists := a.depsetHashToAqueryDepset[depsetHash]; exists {
- result := depset.DirectArtifacts
- for _, childHash := range depset.TransitiveDepSetHashes {
- childArtifactIds, err := a.artifactPathsFromDepsetHash(childHash)
- if err != nil {
- return nil, err
- }
- result = append(result, childArtifactIds...)
- }
- a.depsetHashToArtifactPathsCache.Store(depsetHash, result)
- return result, nil
- } else {
- return nil, fmt.Errorf("undefined input depset hash %s", depsetHash)
- }
-}
-
-// AqueryBuildStatements returns a slice of BuildStatements and a slice of AqueryDepset
-// which should be registered (and output to a ninja file) to correspond with Bazel's
-// action graph, as described by the given action graph json proto.
-// BuildStatements are one-to-one with actions in the given action graph, and AqueryDepsets
-// are one-to-one with Bazel's depSetOfFiles objects.
-func AqueryBuildStatements(aqueryJsonProto []byte, eventHandler *metrics.EventHandler) ([]*BuildStatement, []AqueryDepset, error) {
- aqueryProto := &analysis_v2_proto.ActionGraphContainer{}
- err := proto.Unmarshal(aqueryJsonProto, aqueryProto)
- if err != nil {
- return nil, nil, err
- }
-
- var aqueryHandler *aqueryArtifactHandler
- {
- eventHandler.Begin("init_handler")
- defer eventHandler.End("init_handler")
- aqueryHandler, err = newAqueryHandler(aqueryProto)
- if err != nil {
- return nil, nil, err
- }
- }
-
- // allocate both length and capacity so each goroutine can write to an index independently without
- // any need for synchronization for slice access.
- buildStatements := make([]*BuildStatement, len(aqueryProto.Actions))
- {
- eventHandler.Begin("build_statements")
- defer eventHandler.End("build_statements")
- wg := sync.WaitGroup{}
- var errOnce sync.Once
- id2targets := make(map[uint32]string, len(aqueryProto.Targets))
- for _, t := range aqueryProto.Targets {
- id2targets[t.GetId()] = t.GetLabel()
- }
- for i, actionEntry := range aqueryProto.Actions {
- wg.Add(1)
- go func(i int, actionEntry *analysis_v2_proto.Action) {
- if strings.HasPrefix(id2targets[actionEntry.TargetId], "@bazel_tools//") {
- // bazel_tools are removed depsets in `populateDepsetMaps()` so skipping
- // conversion to build statements as well
- buildStatements[i] = nil
- } else if buildStatement, aErr := aqueryHandler.actionToBuildStatement(actionEntry); aErr != nil {
- errOnce.Do(func() {
- aErr = fmt.Errorf("%s: [%s] [%s]", aErr.Error(), actionEntry.GetMnemonic(), id2targets[actionEntry.TargetId])
- err = aErr
- })
- } else {
- // set build statement at an index rather than appending such that each goroutine does not
- // impact other goroutines
- buildStatements[i] = buildStatement
- }
- wg.Done()
- }(i, actionEntry)
- }
- wg.Wait()
- }
- if err != nil {
- return nil, nil, err
- }
-
- depsetsByHash := map[string]AqueryDepset{}
- depsets := make([]AqueryDepset, 0, len(aqueryHandler.depsetIdToAqueryDepset))
- {
- eventHandler.Begin("depsets")
- defer eventHandler.End("depsets")
- for _, aqueryDepset := range aqueryHandler.depsetIdToAqueryDepset {
- if prevEntry, hasKey := depsetsByHash[aqueryDepset.ContentHash]; hasKey {
- // Two depsets collide on hash. Ensure that their contents are identical.
- if !reflect.DeepEqual(aqueryDepset, prevEntry) {
- return nil, nil, fmt.Errorf("two different depsets have the same hash: %v, %v", prevEntry, aqueryDepset)
- }
- } else {
- depsetsByHash[aqueryDepset.ContentHash] = aqueryDepset
- depsets = append(depsets, aqueryDepset)
- }
- }
- }
-
- eventHandler.Do("build_statement_sort", func() {
- // Build Statements and depsets must be sorted by their content hash to
- // preserve determinism between builds (this will result in consistent ninja file
- // output). Note they are not sorted by their original IDs nor their Bazel ordering,
- // as Bazel gives nondeterministic ordering / identifiers in aquery responses.
- sort.Slice(buildStatements, func(i, j int) bool {
- // Sort all nil statements to the end of the slice
- if buildStatements[i] == nil {
- return false
- } else if buildStatements[j] == nil {
- return true
- }
- //For build statements, compare output lists. In Bazel, each output file
- // may only have one action which generates it, so this will provide
- // a deterministic ordering.
- outputs_i := buildStatements[i].OutputPaths
- outputs_j := buildStatements[j].OutputPaths
- if len(outputs_i) != len(outputs_j) {
- return len(outputs_i) < len(outputs_j)
- }
- if len(outputs_i) == 0 {
- // No outputs for these actions, so compare commands.
- return buildStatements[i].Command < buildStatements[j].Command
- }
- // There may be multiple outputs, but the output ordering is deterministic.
- return outputs_i[0] < outputs_j[0]
- })
- })
- eventHandler.Do("depset_sort", func() {
- sort.Slice(depsets, func(i, j int) bool {
- return depsets[i].ContentHash < depsets[j].ContentHash
- })
- })
- return buildStatements, depsets, nil
-}
-
-// depsetContentHash computes and returns a SHA256 checksum of the contents of
-// the given depset. This content hash may serve as the depset's identifier.
-// Using a content hash for an identifier is superior for determinism. (For example,
-// using an integer identifier which depends on the order in which the depsets are
-// created would result in nondeterministic depset IDs.)
-func depsetContentHash(directPaths []string, transitiveDepsetHashes []string) string {
- h := sha256.New()
- // Use newline as delimiter, as paths cannot contain newline.
- h.Write([]byte(strings.Join(directPaths, "\n")))
- h.Write([]byte(strings.Join(transitiveDepsetHashes, "")))
- fullHash := base64.RawURLEncoding.EncodeToString(h.Sum(nil))
- return fullHash
-}
-
-func (a *aqueryArtifactHandler) depsetContentHashes(inputDepsetIds []uint32) ([]string, error) {
- var hashes []string
- for _, id := range inputDepsetIds {
- dId := depsetId(id)
- if aqueryDepset, exists := a.depsetIdToAqueryDepset[dId]; !exists {
- if _, empty := a.emptyDepsetIds[dId]; !empty {
- return nil, fmt.Errorf("undefined (not even empty) input depsetId %d", dId)
- }
- } else {
- hashes = append(hashes, aqueryDepset.ContentHash)
- }
- }
- return hashes, nil
-}
-
-// escapes the args received from aquery and creates a command string
-func commandString(actionEntry *analysis_v2_proto.Action) string {
- argsEscaped := make([]string, len(actionEntry.Arguments))
- for i, arg := range actionEntry.Arguments {
- if arg == "" {
- // If this is an empty string, add ''
- // And not
- // 1. (literal empty)
- // 2. `''\'''\'''` (escaped version of '')
- //
- // If we had used (1), then this would appear as a whitespace when we strings.Join
- argsEscaped[i] = "''"
- } else {
- argsEscaped[i] = proptools.ShellEscapeIncludingSpaces(arg)
- }
- }
- return strings.Join(argsEscaped, " ")
-}
-
-func (a *aqueryArtifactHandler) normalActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- command := commandString(actionEntry)
- inputDepsetHashes, err := a.depsetContentHashes(actionEntry.InputDepSetIds)
- if err != nil {
- return nil, err
- }
- outputPaths, depfile, err := a.getOutputPaths(actionEntry)
- if err != nil {
- return nil, err
- }
-
- buildStatement := &BuildStatement{
- Command: command,
- Depfile: depfile,
- OutputPaths: outputPaths,
- InputDepsetHashes: inputDepsetHashes,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- }
- if buildStatement.Mnemonic == "GoToolchainBinaryBuild" {
- // Unlike b's execution root, mixed build execution root contains a symlink to prebuilts/go
- // This causes issues for `GOCACHE=$(mktemp -d) go build ...`
- // To prevent this, sandbox this action in mixed builds as well
- buildStatement.ShouldRunInSbox = true
- }
- return buildStatement, nil
-}
-
-func (a *aqueryArtifactHandler) templateExpandActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- outputPaths, depfile, err := a.getOutputPaths(actionEntry)
- if err != nil {
- return nil, err
- }
- if len(outputPaths) != 1 {
- return nil, fmt.Errorf("Expect 1 output to template expand action, got: output %q", outputPaths)
- }
- expandedTemplateContent := expandTemplateContent(actionEntry)
- // The expandedTemplateContent is escaped for being used in double quotes and shell unescape,
- // and the new line characters (\n) are also changed to \\n which avoids some Ninja escape on \n, which might
- // change \n to space and mess up the format of Python programs.
- // sed is used to convert \\n back to \n before saving to output file.
- // See go/python-binary-host-mixed-build for more details.
- command := fmt.Sprintf(`/bin/bash -c 'echo "%[1]s" | sed "s/\\\\n/\\n/g" > %[2]s && chmod a+x %[2]s'`,
- escapeCommandlineArgument(expandedTemplateContent), outputPaths[0])
- inputDepsetHashes, err := a.depsetContentHashes(actionEntry.InputDepSetIds)
- if err != nil {
- return nil, err
- }
-
- buildStatement := &BuildStatement{
- Command: command,
- Depfile: depfile,
- OutputPaths: outputPaths,
- InputDepsetHashes: inputDepsetHashes,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- }
- return buildStatement, nil
-}
-
-func (a *aqueryArtifactHandler) fileWriteActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- outputPaths, _, err := a.getOutputPaths(actionEntry)
- var depsetHashes []string
- if err == nil {
- depsetHashes, err = a.depsetContentHashes(actionEntry.InputDepSetIds)
- }
- if err != nil {
- return nil, err
- }
- return &BuildStatement{
- Depfile: nil,
- OutputPaths: outputPaths,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- InputDepsetHashes: depsetHashes,
- FileContents: actionEntry.FileContents,
- IsExecutable: actionEntry.IsExecutable,
- }, nil
-}
-
-func (a *aqueryArtifactHandler) symlinkTreeActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- outputPaths, _, err := a.getOutputPaths(actionEntry)
- if err != nil {
- return nil, err
- }
- inputPaths, err := a.getInputPaths(actionEntry.InputDepSetIds)
- if err != nil {
- return nil, err
- }
- if len(inputPaths) != 1 || len(outputPaths) != 1 {
- return nil, fmt.Errorf("Expect 1 input and 1 output to symlink action, got: input %q, output %q", inputPaths, outputPaths)
- }
- // The actual command is generated in bazelSingleton.GenerateBuildActions
- return &BuildStatement{
- Depfile: nil,
- OutputPaths: outputPaths,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- InputPaths: inputPaths,
- }, nil
-}
-
-type bazelSandwichJson struct {
- Target string `json:"target"`
- DependOnTarget *bool `json:"depend_on_target,omitempty"`
- ImplicitDeps []string `json:"implicit_deps"`
-}
-
-func (a *aqueryArtifactHandler) unresolvedSymlinkActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- outputPaths, depfile, err := a.getOutputPaths(actionEntry)
- if err != nil {
- return nil, err
- }
- if len(actionEntry.InputDepSetIds) != 0 || len(outputPaths) != 1 {
- return nil, fmt.Errorf("expected 0 inputs and 1 output to symlink action, got: input %q, output %q", actionEntry.InputDepSetIds, outputPaths)
- }
- target := actionEntry.UnresolvedSymlinkTarget
- if target == "" {
- return nil, fmt.Errorf("expected an unresolved_symlink_target, but didn't get one")
- }
- if filepath.Clean(target) != target {
- return nil, fmt.Errorf("expected %q, got %q", filepath.Clean(target), target)
- }
- if strings.HasPrefix(target, "/") {
- return nil, fmt.Errorf("no absolute symlinks allowed: %s", target)
- }
-
- out := outputPaths[0]
- outDir := filepath.Dir(out)
- var implicitDeps []string
- if strings.HasPrefix(target, "bazel_sandwich:") {
- j := bazelSandwichJson{}
- err := json.Unmarshal([]byte(target[len("bazel_sandwich:"):]), &j)
- if err != nil {
- return nil, err
- }
- if proptools.BoolDefault(j.DependOnTarget, true) {
- implicitDeps = append(implicitDeps, j.Target)
- }
- implicitDeps = append(implicitDeps, j.ImplicitDeps...)
- dotDotsToReachCwd := ""
- if outDir != "." {
- dotDotsToReachCwd = strings.Repeat("../", strings.Count(outDir, "/")+1)
- }
- target = proptools.ShellEscapeIncludingSpaces(j.Target)
- target = "{DOTDOTS_TO_OUTPUT_ROOT}" + dotDotsToReachCwd + target
- } else {
- target = proptools.ShellEscapeIncludingSpaces(target)
- }
-
- outDir = proptools.ShellEscapeIncludingSpaces(outDir)
- out = proptools.ShellEscapeIncludingSpaces(out)
- // Use absolute paths, because some soong actions don't play well with relative paths (for example, `cp -d`).
- command := fmt.Sprintf("mkdir -p %[1]s && rm -f %[2]s && ln -sf %[3]s %[2]s", outDir, out, target)
- symlinkPaths := outputPaths[:]
-
- buildStatement := &BuildStatement{
- Command: command,
- Depfile: depfile,
- OutputPaths: outputPaths,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- SymlinkPaths: symlinkPaths,
- ImplicitDeps: implicitDeps,
- }
- return buildStatement, nil
-}
-
-func (a *aqueryArtifactHandler) symlinkActionBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- outputPaths, depfile, err := a.getOutputPaths(actionEntry)
- if err != nil {
- return nil, err
- }
-
- inputPaths, err := a.getInputPaths(actionEntry.InputDepSetIds)
- if err != nil {
- return nil, err
- }
- if len(inputPaths) != 1 || len(outputPaths) != 1 {
- return nil, fmt.Errorf("Expect 1 input and 1 output to symlink action, got: input %q, output %q", inputPaths, outputPaths)
- }
- out := outputPaths[0]
- outDir := proptools.ShellEscapeIncludingSpaces(filepath.Dir(out))
- out = proptools.ShellEscapeIncludingSpaces(out)
- in := filepath.Join("$PWD", proptools.ShellEscapeIncludingSpaces(inputPaths[0]))
- // Use absolute paths, because some soong actions don't play well with relative paths (for example, `cp -d`).
- command := fmt.Sprintf("mkdir -p %[1]s && rm -f %[2]s && ln -sf %[3]s %[2]s", outDir, out, in)
- symlinkPaths := outputPaths[:]
-
- buildStatement := &BuildStatement{
- Command: command,
- Depfile: depfile,
- OutputPaths: outputPaths,
- InputPaths: inputPaths,
- Env: actionEntry.EnvironmentVariables,
- Mnemonic: actionEntry.Mnemonic,
- SymlinkPaths: symlinkPaths,
- }
- return buildStatement, nil
-}
-
-func (a *aqueryArtifactHandler) getOutputPaths(actionEntry *analysis_v2_proto.Action) (outputPaths []string, depfile *string, err error) {
- for _, outputId := range actionEntry.OutputIds {
- outputPath, exists := a.artifactIdToPath[artifactId(outputId)]
- if !exists {
- err = fmt.Errorf("undefined outputId %d", outputId)
- return
- }
- ext := filepath.Ext(outputPath)
- if ext == ".d" {
- if depfile != nil {
- err = fmt.Errorf("found multiple potential depfiles %q, %q", *depfile, outputPath)
- return
- } else {
- depfile = &outputPath
- }
- } else {
- outputPaths = append(outputPaths, outputPath)
- }
- }
- return
-}
-
-// expandTemplateContent substitutes the tokens in a template.
-func expandTemplateContent(actionEntry *analysis_v2_proto.Action) string {
- replacerString := make([]string, len(actionEntry.Substitutions)*2)
- for i, pair := range actionEntry.Substitutions {
- value := pair.Value
- if val, ok := templateActionOverriddenTokens[pair.Key]; ok {
- value = val
- }
- replacerString[i*2] = pair.Key
- replacerString[i*2+1] = value
- }
- replacer := strings.NewReplacer(replacerString...)
- return replacer.Replace(actionEntry.TemplateContent)
-}
-
-// \->\\, $->\$, `->\`, "->\", \n->\\n, '->'"'"'
-var commandLineArgumentReplacer = strings.NewReplacer(
- `\`, `\\`,
- `$`, `\$`,
- "`", "\\`",
- `"`, `\"`,
- "\n", "\\n",
- `'`, `'"'"'`,
-)
-
-func escapeCommandlineArgument(str string) string {
- return commandLineArgumentReplacer.Replace(str)
-}
-
-func (a *aqueryArtifactHandler) actionToBuildStatement(actionEntry *analysis_v2_proto.Action) (*BuildStatement, error) {
- switch actionEntry.Mnemonic {
- // Middleman actions are not handled like other actions; they are handled separately as a
- // preparatory step so that their inputs may be relayed to actions depending on middleman
- // artifacts.
- case middlemanMnemonic:
- return nil, nil
- // PythonZipper is bogus action returned by aquery, ignore it (b/236198693)
- case "PythonZipper":
- return nil, nil
- // Skip "Fail" actions, which are placeholder actions designed to always fail.
- case "Fail":
- return nil, nil
- case "BaselineCoverage":
- return nil, nil
- case "Symlink", "SolibSymlink", "ExecutableSymlink":
- return a.symlinkActionBuildStatement(actionEntry)
- case "TemplateExpand":
- if len(actionEntry.Arguments) < 1 {
- return a.templateExpandActionBuildStatement(actionEntry)
- }
- case "FileWrite", "SourceSymlinkManifest", "RepoMappingManifest":
- return a.fileWriteActionBuildStatement(actionEntry)
- case "SymlinkTree":
- return a.symlinkTreeActionBuildStatement(actionEntry)
- case "UnresolvedSymlink":
- return a.unresolvedSymlinkActionBuildStatement(actionEntry)
- }
-
- if len(actionEntry.Arguments) < 1 {
- return nil, errors.New("received action with no command")
- }
- return a.normalActionBuildStatement(actionEntry)
-
-}
-
-func expandPathFragment(id pathFragmentId, pathFragmentsMap map[pathFragmentId]*analysis_v2_proto.PathFragment) (string, error) {
- var labels []string
- currId := id
- // Only positive IDs are valid for path fragments. An ID of zero indicates a terminal node.
- for currId > 0 {
- currFragment, ok := pathFragmentsMap[currId]
- if !ok {
- return "", fmt.Errorf("undefined path fragment id %d", currId)
- }
- labels = append([]string{currFragment.Label}, labels...)
- parentId := pathFragmentId(currFragment.ParentId)
- if currId == parentId {
- return "", fmt.Errorf("fragment cannot refer to itself as parent %#v", currFragment)
- }
- currId = parentId
- }
- return filepath.Join(labels...), nil
-}
diff --git a/bazel/aquery_test.go b/bazel/aquery_test.go
deleted file mode 100644
index cbd2791..0000000
--- a/bazel/aquery_test.go
+++ /dev/null
@@ -1,1411 +0,0 @@
-// Copyright 2020 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package bazel
-
-import (
- "encoding/json"
- "fmt"
- "reflect"
- "sort"
- "testing"
-
- analysis_v2_proto "prebuilts/bazel/common/proto/analysis_v2"
-
- "github.com/google/blueprint/metrics"
- "google.golang.org/protobuf/proto"
-)
-
-func TestAqueryMultiArchGenrule(t *testing.T) {
- // This input string is retrieved from a real build of bionic-related genrules.
- const inputString = `
-{
- "Artifacts": [
- { "Id": 1, "path_fragment_id": 1 },
- { "Id": 2, "path_fragment_id": 6 },
- { "Id": 3, "path_fragment_id": 8 },
- { "Id": 4, "path_fragment_id": 12 },
- { "Id": 5, "path_fragment_id": 19 },
- { "Id": 6, "path_fragment_id": 20 },
- { "Id": 7, "path_fragment_id": 21 }],
- "Actions": [{
- "target_id": 1,
- "action_key": "ab53f6ecbdc2ee8cb8812613b63205464f1f5083f6dca87081a0a398c0f1ecf7",
- "Mnemonic": "Genrule",
- "configuration_id": 1,
- "Arguments": ["/bin/bash", "-c", "source ../bazel_tools/tools/genrule/genrule-setup.sh; ../sourceroot/bionic/libc/tools/gensyscalls.py arm ../sourceroot/bionic/libc/SYSCALLS.TXT \u003e bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-arm.S"],
- "environment_variables": [{
- "Key": "PATH",
- "Value": "/bin:/usr/bin:/usr/local/bin"
- }],
- "input_dep_set_ids": [1],
- "output_ids": [4],
- "primary_output_id": 4
- }, {
- "target_id": 2,
- "action_key": "9f4309ce165dac458498cb92811c18b0b7919782cc37b82a42d2141b8cc90826",
- "Mnemonic": "Genrule",
- "configuration_id": 1,
- "Arguments": ["/bin/bash", "-c", "source ../bazel_tools/tools/genrule/genrule-setup.sh; ../sourceroot/bionic/libc/tools/gensyscalls.py x86 ../sourceroot/bionic/libc/SYSCALLS.TXT \u003e bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-x86.S"],
- "environment_variables": [{
- "Key": "PATH",
- "Value": "/bin:/usr/bin:/usr/local/bin"
- }],
- "input_dep_set_ids": [2],
- "output_ids": [5],
- "primary_output_id": 5
- }, {
- "target_id": 3,
- "action_key": "50d6c586103ebeed3a218195540bcc30d329464eae36377eb82f8ce7c36ac342",
- "Mnemonic": "Genrule",
- "configuration_id": 1,
- "Arguments": ["/bin/bash", "-c", "source ../bazel_tools/tools/genrule/genrule-setup.sh; ../sourceroot/bionic/libc/tools/gensyscalls.py x86_64 ../sourceroot/bionic/libc/SYSCALLS.TXT \u003e bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-x86_64.S"],
- "environment_variables": [{
- "Key": "PATH",
- "Value": "/bin:/usr/bin:/usr/local/bin"
- }],
- "input_dep_set_ids": [3],
- "output_ids": [6],
- "primary_output_id": 6
- }, {
- "target_id": 4,
- "action_key": "f30cbe442f5216f4223cf16a39112cad4ec56f31f49290d85cff587e48647ffa",
- "Mnemonic": "Genrule",
- "configuration_id": 1,
- "Arguments": ["/bin/bash", "-c", "source ../bazel_tools/tools/genrule/genrule-setup.sh; ../sourceroot/bionic/libc/tools/gensyscalls.py arm64 ../sourceroot/bionic/libc/SYSCALLS.TXT \u003e bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-arm64.S"],
- "environment_variables": [{
- "Key": "PATH",
- "Value": "/bin:/usr/bin:/usr/local/bin"
- }],
- "input_dep_set_ids": [4],
- "output_ids": [7],
- "primary_output_id": 7
- }],
- "Targets": [
- { "Id": 1, "Label": "@sourceroot//bionic/libc:syscalls-arm", "rule_class_id": 1 },
- { "Id": 2, "Label": "@sourceroot//bionic/libc:syscalls-x86", "rule_class_id": 1 },
- { "Id": 3, "Label": "@sourceroot//bionic/libc:syscalls-x86_64", "rule_class_id": 1 },
- { "Id": 4, "Label": "@sourceroot//bionic/libc:syscalls-arm64", "rule_class_id": 1 }],
- "dep_set_of_files": [
- { "Id": 1, "direct_artifact_ids": [1, 2, 3] },
- { "Id": 2, "direct_artifact_ids": [1, 2, 3] },
- { "Id": 3, "direct_artifact_ids": [1, 2, 3] },
- { "Id": 4, "direct_artifact_ids": [1, 2, 3] }],
- "Configuration": [{
- "Id": 1,
- "Mnemonic": "k8-fastbuild",
- "platform_name": "k8",
- "Checksum": "485c362832c178e367d972177f68e69e0981e51e67ef1c160944473db53fe046"
- }],
- "rule_classes": [{ "Id": 1, "Name": "genrule"}],
- "path_fragments": [
- { "Id": 5, "Label": ".." },
- { "Id": 4, "Label": "sourceroot", "parent_id": 5 },
- { "Id": 3, "Label": "bionic", "parent_id": 4 },
- { "Id": 2, "Label": "libc", "parent_id": 3 },
- { "Id": 1, "Label": "SYSCALLS.TXT", "parent_id": 2 },
- { "Id": 7, "Label": "tools", "parent_id": 2 },
- { "Id": 6, "Label": "gensyscalls.py", "parent_id": 7 },
- { "Id": 11, "Label": "bazel_tools", "parent_id": 5 },
- { "Id": 10, "Label": "tools", "parent_id": 11 },
- { "Id": 9, "Label": "genrule", "parent_id": 10 },
- { "Id": 8, "Label": "genrule-setup.sh", "parent_id": 9 },
- { "Id": 18, "Label": "bazel-out" },
- { "Id": 17, "Label": "sourceroot", "parent_id": 18 },
- { "Id": 16, "Label": "k8-fastbuild", "parent_id": 17 },
- { "Id": 15, "Label": "bin", "parent_id": 16 },
- { "Id": 14, "Label": "bionic", "parent_id": 15 },
- { "Id": 13, "Label": "libc", "parent_id": 14 },
- { "Id": 12, "Label": "syscalls-arm.S", "parent_id": 13 },
- { "Id": 19, "Label": "syscalls-x86.S", "parent_id": 13 },
- { "Id": 20, "Label": "syscalls-x86_64.S", "parent_id": 13 },
- { "Id": 21, "Label": "syscalls-arm64.S", "parent_id": 13 }]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualbuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
- var expectedBuildStatements []*BuildStatement
- for _, arch := range []string{"arm", "arm64", "x86", "x86_64"} {
- expectedBuildStatements = append(expectedBuildStatements,
- &BuildStatement{
- Command: fmt.Sprintf(
- "/bin/bash -c 'source ../bazel_tools/tools/genrule/genrule-setup.sh; ../sourceroot/bionic/libc/tools/gensyscalls.py %s ../sourceroot/bionic/libc/SYSCALLS.TXT > bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-%s.S'",
- arch, arch),
- OutputPaths: []string{
- fmt.Sprintf("bazel-out/sourceroot/k8-fastbuild/bin/bionic/libc/syscalls-%s.S", arch),
- },
- Env: []*analysis_v2_proto.KeyValuePair{
- {Key: "PATH", Value: "/bin:/usr/bin:/usr/local/bin"},
- },
- Mnemonic: "Genrule",
- })
- }
- assertBuildStatements(t, expectedBuildStatements, actualbuildStatements)
-
- expectedFlattenedInputs := []string{
- "../sourceroot/bionic/libc/SYSCALLS.TXT",
- "../sourceroot/bionic/libc/tools/gensyscalls.py",
- }
- // In this example, each depset should have the same expected inputs.
- for _, actualDepset := range actualDepsets {
- actualFlattenedInputs := flattenDepsets([]string{actualDepset.ContentHash}, actualDepsets)
- if !reflect.DeepEqual(actualFlattenedInputs, expectedFlattenedInputs) {
- t.Errorf("Expected flattened inputs %v, but got %v", expectedFlattenedInputs, actualFlattenedInputs)
- }
- }
-}
-
-func TestInvalidOutputId(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "action_x",
- "mnemonic": "X",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [3],
- "primary_output_id": 3
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 2] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined outputId 3: [X] []")
-}
-
-func TestInvalidInputDepsetIdFromAction(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "action_x",
- "mnemonic": "X",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [2],
- "output_ids": [1],
- "primary_output_id": 1
- }],
- "targets": [{
- "id": 1,
- "label": "target_x"
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 2] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined (not even empty) input depsetId 2: [X] [target_x]")
-}
-
-func TestInvalidInputDepsetIdFromDepset(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "x",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [1],
- "primary_output_id": 1
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 2], "transitive_dep_set_ids": [42] }],
- "path_fragments": [
- { "id": 1, "label": "one"},
- { "id": 2, "label": "two" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined input depsetId 42 (referenced by depsetId 1)")
-}
-
-func TestInvalidInputArtifactId(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "x",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [1],
- "primary_output_id": 1
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 3] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined input artifactId 3")
-}
-
-func TestInvalidPathFragmentId(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "x",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [1],
- "primary_output_id": 1
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 2] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two", "parent_id": 3 }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined path fragment id 3")
-}
-
-func TestDepfiles(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 },
- { "id": 3, "path_fragment_id": 3 }],
- "actions": [{
- "target_Id": 1,
- "action_Key": "x",
- "mnemonic": "x",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [2, 3],
- "primary_output_id": 2
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_Artifact_Ids": [1, 2, 3] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two" },
- { "id": 3, "label": "two.d" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- if expected := 1; len(actual) != expected {
- t.Fatalf("Expected %d build statements, got %d", expected, len(actual))
- return
- }
-
- bs := actual[0]
- expectedDepfile := "two.d"
- if bs.Depfile == nil {
- t.Errorf("Expected depfile %q, but there was none found", expectedDepfile)
- } else if *bs.Depfile != expectedDepfile {
- t.Errorf("Expected depfile %q, but got %q", expectedDepfile, *bs.Depfile)
- }
-}
-
-func TestMultipleDepfiles(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 },
- { "id": 3, "path_fragment_id": 3 },
- { "id": 4, "path_fragment_id": 4 }],
- "actions": [{
- "target_id": 1,
- "action_key": "action_x",
- "mnemonic": "X",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [2,3,4],
- "primary_output_id": 2
- }],
- "dep_set_of_files": [{
- "id": 1,
- "direct_artifact_ids": [1, 2, 3, 4]
- }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "two" },
- { "id": 3, "label": "two.d" },
- { "id": 4, "label": "other.d" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, `found multiple potential depfiles "two.d", "other.d": [X] []`)
-}
-
-func TestTransitiveInputDepsets(t *testing.T) {
- // The input aquery for this test comes from a proof-of-concept starlark rule which registers
- // a single action with many inputs given via a deep depset.
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 7 },
- { "id": 3, "path_fragment_id": 8 },
- { "id": 4, "path_fragment_id": 9 },
- { "id": 5, "path_fragment_id": 10 },
- { "id": 6, "path_fragment_id": 11 },
- { "id": 7, "path_fragment_id": 12 },
- { "id": 8, "path_fragment_id": 13 },
- { "id": 9, "path_fragment_id": 14 },
- { "id": 10, "path_fragment_id": 15 },
- { "id": 11, "path_fragment_id": 16 },
- { "id": 12, "path_fragment_id": 17 },
- { "id": 13, "path_fragment_id": 18 },
- { "id": 14, "path_fragment_id": 19 },
- { "id": 15, "path_fragment_id": 20 },
- { "id": 16, "path_fragment_id": 21 },
- { "id": 17, "path_fragment_id": 22 },
- { "id": 18, "path_fragment_id": 23 },
- { "id": 19, "path_fragment_id": 24 },
- { "id": 20, "path_fragment_id": 25 },
- { "id": 21, "path_fragment_id": 26 }],
- "actions": [{
- "target_id": 1,
- "action_key": "3b826d17fadbbbcd8313e456b90ec47c078c438088891dd45b4adbcd8889dc50",
- "mnemonic": "Action",
- "configuration_id": 1,
- "arguments": ["/bin/bash", "-c", "touch bazel-out/sourceroot/k8-fastbuild/bin/testpkg/test_out"],
- "input_dep_set_ids": [1],
- "output_ids": [21],
- "primary_output_id": 21
- }],
- "dep_set_of_files": [
- { "id": 3, "direct_artifact_ids": [1, 2, 3, 4, 5] },
- { "id": 4, "direct_artifact_ids": [6, 7, 8, 9, 10] },
- { "id": 2, "transitive_dep_set_ids": [3, 4], "direct_artifact_ids": [11, 12, 13, 14, 15] },
- { "id": 5, "direct_artifact_ids": [16, 17, 18, 19] },
- { "id": 1, "transitive_dep_set_ids": [2, 5], "direct_artifact_ids": [20] }],
- "path_fragments": [
- { "id": 6, "label": "bazel-out" },
- { "id": 5, "label": "sourceroot", "parent_id": 6 },
- { "id": 4, "label": "k8-fastbuild", "parent_id": 5 },
- { "id": 3, "label": "bin", "parent_id": 4 },
- { "id": 2, "label": "testpkg", "parent_id": 3 },
- { "id": 1, "label": "test_1", "parent_id": 2 },
- { "id": 7, "label": "test_2", "parent_id": 2 },
- { "id": 8, "label": "test_3", "parent_id": 2 },
- { "id": 9, "label": "test_4", "parent_id": 2 },
- { "id": 10, "label": "test_5", "parent_id": 2 },
- { "id": 11, "label": "test_6", "parent_id": 2 },
- { "id": 12, "label": "test_7", "parent_id": 2 },
- { "id": 13, "label": "test_8", "parent_id": 2 },
- { "id": 14, "label": "test_9", "parent_id": 2 },
- { "id": 15, "label": "test_10", "parent_id": 2 },
- { "id": 16, "label": "test_11", "parent_id": 2 },
- { "id": 17, "label": "test_12", "parent_id": 2 },
- { "id": 18, "label": "test_13", "parent_id": 2 },
- { "id": 19, "label": "test_14", "parent_id": 2 },
- { "id": 20, "label": "test_15", "parent_id": 2 },
- { "id": 21, "label": "test_16", "parent_id": 2 },
- { "id": 22, "label": "test_17", "parent_id": 2 },
- { "id": 23, "label": "test_18", "parent_id": 2 },
- { "id": 24, "label": "test_19", "parent_id": 2 },
- { "id": 25, "label": "test_root", "parent_id": 2 },
- { "id": 26,"label": "test_out", "parent_id": 2 }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualbuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
-
- expectedBuildStatements := []*BuildStatement{
- &BuildStatement{
- Command: "/bin/bash -c 'touch bazel-out/sourceroot/k8-fastbuild/bin/testpkg/test_out'",
- OutputPaths: []string{"bazel-out/sourceroot/k8-fastbuild/bin/testpkg/test_out"},
- Mnemonic: "Action",
- SymlinkPaths: []string{},
- },
- }
- assertBuildStatements(t, expectedBuildStatements, actualbuildStatements)
-
- // Inputs for the action are test_{i} from 1 to 20, and test_root. These inputs
- // are given via a deep depset, but the depset is flattened when returned as a
- // BuildStatement slice.
- var expectedFlattenedInputs []string
- for i := 1; i < 20; i++ {
- expectedFlattenedInputs = append(expectedFlattenedInputs, fmt.Sprintf("bazel-out/sourceroot/k8-fastbuild/bin/testpkg/test_%d", i))
- }
- expectedFlattenedInputs = append(expectedFlattenedInputs, "bazel-out/sourceroot/k8-fastbuild/bin/testpkg/test_root")
-
- actualDepsetHashes := actualbuildStatements[0].InputDepsetHashes
- actualFlattenedInputs := flattenDepsets(actualDepsetHashes, actualDepsets)
- if !reflect.DeepEqual(actualFlattenedInputs, expectedFlattenedInputs) {
- t.Errorf("Expected flattened inputs %v, but got %v", expectedFlattenedInputs, actualFlattenedInputs)
- }
-}
-
-func TestSymlinkTree(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "SymlinkTree",
- "configuration_id": 1,
- "input_dep_set_ids": [1],
- "output_ids": [2],
- "primary_output_id": 2,
- "execution_platform": "//build/bazel/platforms:linux_x86_64"
- }],
- "path_fragments": [
- { "id": 1, "label": "foo.manifest" },
- { "id": 2, "label": "foo.runfiles/MANIFEST" }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1] }]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{
- &BuildStatement{
- Command: "",
- OutputPaths: []string{"foo.runfiles/MANIFEST"},
- Mnemonic: "SymlinkTree",
- InputPaths: []string{"foo.manifest"},
- SymlinkPaths: []string{},
- },
- }, actual)
-}
-
-func TestBazelToolsRemovalFromInputDepsets(t *testing.T) {
- const inputString = `{
- "artifacts": [
- { "id": 1, "path_fragment_id": 10 },
- { "id": 2, "path_fragment_id": 20 },
- { "id": 3, "path_fragment_id": 30 },
- { "id": 4, "path_fragment_id": 40 }],
- "dep_set_of_files": [{
- "id": 1111,
- "direct_artifact_ids": [3 , 4]
- }, {
- "id": 2222,
- "direct_artifact_ids": [3]
- }],
- "actions": [{
- "target_id": 100,
- "action_key": "x",
- "input_dep_set_ids": [1111, 2222],
- "mnemonic": "x",
- "arguments": ["bogus", "command"],
- "output_ids": [2],
- "primary_output_id": 1
- }],
- "path_fragments": [
- { "id": 10, "label": "input" },
- { "id": 20, "label": "output" },
- { "id": 30, "label": "dep1", "parent_id": 50 },
- { "id": 40, "label": "dep2", "parent_id": 60 },
- { "id": 50, "label": "bazel_tools", "parent_id": 60 },
- { "id": 60, "label": ".."}
- ]
-}`
- /* depsets
- 1111 2222
- / \ |
- ../dep2 ../bazel_tools/dep1
- */
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualBuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
- if len(actualDepsets) != 1 {
- t.Errorf("expected 1 depset but found %#v", actualDepsets)
- return
- }
- dep2Found := false
- for _, dep := range flattenDepsets([]string{actualDepsets[0].ContentHash}, actualDepsets) {
- if dep == "../bazel_tools/dep1" {
- t.Errorf("dependency %s expected to be removed but still exists", dep)
- } else if dep == "../dep2" {
- dep2Found = true
- }
- }
- if !dep2Found {
- t.Errorf("dependency ../dep2 expected but not found")
- }
-
- expectedBuildStatement := &BuildStatement{
- Command: "bogus command",
- OutputPaths: []string{"output"},
- Mnemonic: "x",
- SymlinkPaths: []string{},
- }
- buildStatementFound := false
- for _, actualBuildStatement := range actualBuildStatements {
- if buildStatementEquals(actualBuildStatement, expectedBuildStatement) == "" {
- buildStatementFound = true
- break
- }
- }
- if !buildStatementFound {
- t.Errorf("expected but missing %#v in %#v", expectedBuildStatement, actualBuildStatements)
- return
- }
-}
-
-func TestBazelToolsRemovalFromTargets(t *testing.T) {
- const inputString = `{
- "artifacts": [{ "id": 1, "path_fragment_id": 10 }],
- "targets": [
- { "id": 100, "label": "targetX" },
- { "id": 200, "label": "@bazel_tools//tool_y" }
-],
- "actions": [{
- "target_id": 100,
- "action_key": "actionX",
- "arguments": ["bogus", "command"],
- "mnemonic" : "x",
- "output_ids": [1]
- }, {
- "target_id": 200,
- "action_key": "y"
- }],
- "path_fragments": [{ "id": 10, "label": "outputX"}]
-}`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualBuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
- if len(actualDepsets) != 0 {
- t.Errorf("expected 0 depset but found %#v", actualDepsets)
- return
- }
- expectedBuildStatement := &BuildStatement{
- Command: "bogus command",
- OutputPaths: []string{"outputX"},
- Mnemonic: "x",
- SymlinkPaths: []string{},
- }
- buildStatementFound := false
- for _, actualBuildStatement := range actualBuildStatements {
- if buildStatementEquals(actualBuildStatement, expectedBuildStatement) == "" {
- buildStatementFound = true
- break
- }
- }
- if !buildStatementFound {
- t.Errorf("expected but missing %#v in %#v build statements", expectedBuildStatement, len(actualBuildStatements))
- return
- }
-}
-
-func TestBazelToolsRemovalFromTransitiveInputDepsets(t *testing.T) {
- const inputString = `{
- "artifacts": [
- { "id": 1, "path_fragment_id": 10 },
- { "id": 2, "path_fragment_id": 20 },
- { "id": 3, "path_fragment_id": 30 }],
- "dep_set_of_files": [{
- "id": 1111,
- "transitive_dep_set_ids": [2222]
- }, {
- "id": 2222,
- "direct_artifact_ids": [3]
- }, {
- "id": 3333,
- "direct_artifact_ids": [3]
- }, {
- "id": 4444,
- "transitive_dep_set_ids": [3333]
- }],
- "actions": [{
- "target_id": 100,
- "action_key": "x",
- "input_dep_set_ids": [1111, 4444],
- "mnemonic": "x",
- "arguments": ["bogus", "command"],
- "output_ids": [2],
- "primary_output_id": 1
- }],
- "path_fragments": [
- { "id": 10, "label": "input" },
- { "id": 20, "label": "output" },
- { "id": 30, "label": "dep", "parent_id": 50 },
- { "id": 50, "label": "bazel_tools", "parent_id": 60 },
- { "id": 60, "label": ".."}
- ]
-}`
- /* depsets
- 1111 4444
- || ||
- 2222 3333
- | |
- ../bazel_tools/dep
- Note: in dep_set_of_files:
- 1111 appears BEFORE its dependency,2222 while
- 4444 appears AFTER its dependency 3333
- and this test shows that that order doesn't affect empty depset pruning
- */
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualBuildStatements, actualDepsets, _ := AqueryBuildStatements(data, &metrics.EventHandler{})
- if len(actualDepsets) != 0 {
- t.Errorf("expected 0 depsets but found %#v", actualDepsets)
- return
- }
-
- expectedBuildStatement := &BuildStatement{
- Command: "bogus command",
- OutputPaths: []string{"output"},
- Mnemonic: "x",
- }
- buildStatementFound := false
- for _, actualBuildStatement := range actualBuildStatements {
- if buildStatementEquals(actualBuildStatement, expectedBuildStatement) == "" {
- buildStatementFound = true
- break
- }
- }
- if !buildStatementFound {
- t.Errorf("expected but missing %#v in %#v", expectedBuildStatement, actualBuildStatements)
- return
- }
-}
-
-func TestMiddlemenAction(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 },
- { "id": 3, "path_fragment_id": 3 },
- { "id": 4, "path_fragment_id": 4 },
- { "id": 5, "path_fragment_id": 5 },
- { "id": 6, "path_fragment_id": 6 }],
- "path_fragments": [
- { "id": 1, "label": "middleinput_one" },
- { "id": 2, "label": "middleinput_two" },
- { "id": 3, "label": "middleman_artifact" },
- { "id": 4, "label": "maininput_one" },
- { "id": 5, "label": "maininput_two" },
- { "id": 6, "label": "output" }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1, 2] },
- { "id": 2, "direct_artifact_ids": [3, 4, 5] }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "Middleman",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [1],
- "output_ids": [3],
- "primary_output_id": 3
- }, {
- "target_id": 2,
- "action_key": "y",
- "mnemonic": "Main action",
- "arguments": ["touch", "foo"],
- "input_dep_set_ids": [2],
- "output_ids": [6],
- "primary_output_id": 6
- }]
-}`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actualBuildStatements, actualDepsets, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- if expected := 2; len(actualBuildStatements) != expected {
- t.Fatalf("Expected %d build statements, got %d %#v", expected, len(actualBuildStatements), actualBuildStatements)
- return
- }
-
- expectedDepsetFiles := [][]string{
- {"middleinput_one", "middleinput_two", "maininput_one", "maininput_two"},
- {"middleinput_one", "middleinput_two"},
- }
- assertFlattenedDepsets(t, actualDepsets, expectedDepsetFiles)
-
- bs := actualBuildStatements[0]
- if len(bs.InputPaths) > 0 {
- t.Errorf("Expected main action raw inputs to be empty, but got %q", bs.InputPaths)
- }
-
- expectedOutputs := []string{"output"}
- if !reflect.DeepEqual(bs.OutputPaths, expectedOutputs) {
- t.Errorf("Expected main action outputs %q, but got %q", expectedOutputs, bs.OutputPaths)
- }
-
- expectedFlattenedInputs := []string{"middleinput_one", "middleinput_two", "maininput_one", "maininput_two"}
- actualFlattenedInputs := flattenDepsets(bs.InputDepsetHashes, actualDepsets)
-
- if !reflect.DeepEqual(actualFlattenedInputs, expectedFlattenedInputs) {
- t.Errorf("Expected flattened inputs %v, but got %v", expectedFlattenedInputs, actualFlattenedInputs)
- }
-
- bs = actualBuildStatements[1]
- if bs != nil {
- t.Errorf("Expected nil action for skipped")
- }
-}
-
-// Returns the contents of given depsets in concatenated post order.
-func flattenDepsets(depsetHashesToFlatten []string, allDepsets []AqueryDepset) []string {
- depsetsByHash := map[string]AqueryDepset{}
- for _, depset := range allDepsets {
- depsetsByHash[depset.ContentHash] = depset
- }
- var result []string
- for _, depsetId := range depsetHashesToFlatten {
- result = append(result, flattenDepset(depsetId, depsetsByHash)...)
- }
- return result
-}
-
-// Returns the contents of a given depset in post order.
-func flattenDepset(depsetHashToFlatten string, allDepsets map[string]AqueryDepset) []string {
- depset := allDepsets[depsetHashToFlatten]
- var result []string
- for _, depsetId := range depset.TransitiveDepSetHashes {
- result = append(result, flattenDepset(depsetId, allDepsets)...)
- }
- result = append(result, depset.DirectArtifacts...)
- return result
-}
-
-func assertFlattenedDepsets(t *testing.T, actualDepsets []AqueryDepset, expectedDepsetFiles [][]string) {
- t.Helper()
- if len(actualDepsets) != len(expectedDepsetFiles) {
- t.Errorf("Expected %d depsets, but got %d depsets", len(expectedDepsetFiles), len(actualDepsets))
- }
- for i, actualDepset := range actualDepsets {
- actualFlattenedInputs := flattenDepsets([]string{actualDepset.ContentHash}, actualDepsets)
- if !reflect.DeepEqual(actualFlattenedInputs, expectedDepsetFiles[i]) {
- t.Errorf("Expected depset files: %v, but got %v", expectedDepsetFiles[i], actualFlattenedInputs)
- }
- }
-}
-
-func TestSimpleSymlink(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 3 },
- { "id": 2, "path_fragment_id": 5 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "Symlink",
- "input_dep_set_ids": [1],
- "output_ids": [2],
- "primary_output_id": 2
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "file_subdir", "parent_id": 1 },
- { "id": 3, "label": "file", "parent_id": 2 },
- { "id": 4, "label": "symlink_subdir", "parent_id": 1 },
- { "id": 5, "label": "symlink", "parent_id": 4 }]
-}`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
-
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
-
- expectedBuildStatements := []*BuildStatement{
- &BuildStatement{
- Command: "mkdir -p one/symlink_subdir && " +
- "rm -f one/symlink_subdir/symlink && " +
- "ln -sf $PWD/one/file_subdir/file one/symlink_subdir/symlink",
- InputPaths: []string{"one/file_subdir/file"},
- OutputPaths: []string{"one/symlink_subdir/symlink"},
- SymlinkPaths: []string{"one/symlink_subdir/symlink"},
- Mnemonic: "Symlink",
- },
- }
- assertBuildStatements(t, actual, expectedBuildStatements)
-}
-
-func TestSymlinkQuotesPaths(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 3 },
- { "id": 2, "path_fragment_id": 5 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "SolibSymlink",
- "input_dep_set_ids": [1],
- "output_ids": [2],
- "primary_output_id": 2
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1] }],
- "path_fragments": [
- { "id": 1, "label": "one" },
- { "id": 2, "label": "file subdir", "parent_id": 1 },
- { "id": 3, "label": "file", "parent_id": 2 },
- { "id": 4, "label": "symlink subdir", "parent_id": 1 },
- { "id": 5, "label": "symlink", "parent_id": 4 }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
-
- expectedBuildStatements := []*BuildStatement{
- &BuildStatement{
- Command: "mkdir -p 'one/symlink subdir' && " +
- "rm -f 'one/symlink subdir/symlink' && " +
- "ln -sf $PWD/'one/file subdir/file' 'one/symlink subdir/symlink'",
- InputPaths: []string{"one/file subdir/file"},
- OutputPaths: []string{"one/symlink subdir/symlink"},
- SymlinkPaths: []string{"one/symlink subdir/symlink"},
- Mnemonic: "SolibSymlink",
- },
- }
- assertBuildStatements(t, expectedBuildStatements, actual)
-}
-
-func TestSymlinkMultipleInputs(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 2, "path_fragment_id": 2 },
- { "id": 3, "path_fragment_id": 3 }],
- "actions": [{
- "target_id": 1,
- "action_key": "action_x",
- "mnemonic": "Symlink",
- "input_dep_set_ids": [1],
- "output_ids": [3],
- "primary_output_id": 3
- }],
- "dep_set_of_files": [{ "id": 1, "direct_artifact_ids": [1,2] }],
- "path_fragments": [
- { "id": 1, "label": "file" },
- { "id": 2, "label": "other_file" },
- { "id": 3, "label": "symlink" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, `Expect 1 input and 1 output to symlink action, got: input ["file" "other_file"], output ["symlink"]: [Symlink] []`)
-}
-
-func TestSymlinkMultipleOutputs(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 },
- { "id": 3, "path_fragment_id": 3 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "Symlink",
- "input_dep_set_ids": [1],
- "output_ids": [2,3],
- "primary_output_id": 2
- }],
- "dep_set_of_files": [
- { "id": 1, "direct_artifact_ids": [1] }],
- "path_fragments": [
- { "id": 1, "label": "file" },
- { "id": 2, "label": "symlink" },
- { "id": 3, "label": "other_symlink" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, "undefined outputId 2: [Symlink] []")
-}
-
-func TestTemplateExpandActionSubstitutions(t *testing.T) {
- const inputString = `
-{
- "artifacts": [{
- "id": 1,
- "path_fragment_id": 1
- }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "TemplateExpand",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "template_content": "Test template substitutions: %token1%, %python_binary%",
- "substitutions": [
- { "key": "%token1%", "value": "abcd" },
- { "key": "%python_binary%", "value": "python3" }]
- }],
- "path_fragments": [
- { "id": 1, "label": "template_file" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
-
- expectedBuildStatements := []*BuildStatement{
- &BuildStatement{
- Command: "/bin/bash -c 'echo \"Test template substitutions: abcd, python3\" | sed \"s/\\\\\\\\n/\\\\n/g\" > template_file && " +
- "chmod a+x template_file'",
- OutputPaths: []string{"template_file"},
- Mnemonic: "TemplateExpand",
- SymlinkPaths: []string{},
- },
- }
- assertBuildStatements(t, expectedBuildStatements, actual)
-}
-
-func TestTemplateExpandActionNoOutput(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "TemplateExpand",
- "configuration_id": 1,
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "templateContent": "Test template substitutions: %token1%, %python_binary%",
- "substitutions": [
- { "key": "%token1%", "value": "abcd" },
- { "key": "%python_binary%", "value": "python3" }]
- }],
- "path_fragments": [
- { "id": 1, "label": "template_file" }]
-}`
-
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- _, _, err = AqueryBuildStatements(data, &metrics.EventHandler{})
- assertError(t, err, `Expect 1 output to template expand action, got: output []: [TemplateExpand] []`)
-}
-
-func TestFileWrite(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "FileWrite",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "file_contents": "file data\n"
- }],
- "path_fragments": [
- { "id": 1, "label": "foo.manifest" }]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{
- &BuildStatement{
- OutputPaths: []string{"foo.manifest"},
- Mnemonic: "FileWrite",
- FileContents: "file data\n",
- SymlinkPaths: []string{},
- },
- }, actual)
-}
-
-func TestSourceSymlinkManifest(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "SourceSymlinkManifest",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "file_contents": "symlink target\n"
- }],
- "path_fragments": [
- { "id": 1, "label": "foo.manifest" }]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{
- &BuildStatement{
- OutputPaths: []string{"foo.manifest"},
- Mnemonic: "SourceSymlinkManifest",
- SymlinkPaths: []string{},
- },
- }, actual)
-}
-
-func TestUnresolvedSymlink(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }
- ],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "UnresolvedSymlink",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "unresolved_symlink_target": "symlink/target"
- }],
- "path_fragments": [
- { "id": 1, "label": "path/to/symlink" }
- ]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{{
- Command: "mkdir -p path/to && rm -f path/to/symlink && ln -sf symlink/target path/to/symlink",
- OutputPaths: []string{"path/to/symlink"},
- Mnemonic: "UnresolvedSymlink",
- SymlinkPaths: []string{"path/to/symlink"},
- }}, actual)
-}
-
-func TestUnresolvedSymlinkBazelSandwich(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }
- ],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "UnresolvedSymlink",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "unresolved_symlink_target": "bazel_sandwich:{\"target\":\"target/product/emulator_x86_64/system\"}"
- }],
- "path_fragments": [
- { "id": 1, "label": "path/to/symlink" }
- ]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{{
- Command: "mkdir -p path/to && rm -f path/to/symlink && ln -sf {DOTDOTS_TO_OUTPUT_ROOT}../../target/product/emulator_x86_64/system path/to/symlink",
- OutputPaths: []string{"path/to/symlink"},
- Mnemonic: "UnresolvedSymlink",
- SymlinkPaths: []string{"path/to/symlink"},
- ImplicitDeps: []string{"target/product/emulator_x86_64/system"},
- }}, actual)
-}
-
-func TestUnresolvedSymlinkBazelSandwichWithAlternativeDeps(t *testing.T) {
- const inputString = `
-{
- "artifacts": [
- { "id": 1, "path_fragment_id": 1 }
- ],
- "actions": [{
- "target_id": 1,
- "action_key": "x",
- "mnemonic": "UnresolvedSymlink",
- "configuration_id": 1,
- "output_ids": [1],
- "primary_output_id": 1,
- "execution_platform": "//build/bazel/platforms:linux_x86_64",
- "unresolved_symlink_target": "bazel_sandwich:{\"depend_on_target\":false,\"implicit_deps\":[\"target/product/emulator_x86_64/obj/PACKAGING/systemimage_intermediates/staging_dir.stamp\"],\"target\":\"target/product/emulator_x86_64/system\"}"
- }],
- "path_fragments": [
- { "id": 1, "label": "path/to/symlink" }
- ]
-}
-`
- data, err := JsonToActionGraphContainer(inputString)
- if err != nil {
- t.Error(err)
- return
- }
- actual, _, err := AqueryBuildStatements(data, &metrics.EventHandler{})
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- return
- }
- assertBuildStatements(t, []*BuildStatement{{
- Command: "mkdir -p path/to && rm -f path/to/symlink && ln -sf {DOTDOTS_TO_OUTPUT_ROOT}../../target/product/emulator_x86_64/system path/to/symlink",
- OutputPaths: []string{"path/to/symlink"},
- Mnemonic: "UnresolvedSymlink",
- SymlinkPaths: []string{"path/to/symlink"},
- // Note that the target of the symlink, target/product/emulator_x86_64/system, is not listed here
- ImplicitDeps: []string{"target/product/emulator_x86_64/obj/PACKAGING/systemimage_intermediates/staging_dir.stamp"},
- }}, actual)
-}
-
-func assertError(t *testing.T, err error, expected string) {
- t.Helper()
- if err == nil {
- t.Errorf("expected error '%s', but got no error", expected)
- } else if err.Error() != expected {
- t.Errorf("expected error:\n\t'%s', but got:\n\t'%s'", expected, err.Error())
- }
-}
-
-// Asserts that the given actual build statements match the given expected build statements.
-// Build statement equivalence is determined using buildStatementEquals.
-func assertBuildStatements(t *testing.T, expected []*BuildStatement, actual []*BuildStatement) {
- t.Helper()
- if len(expected) != len(actual) {
- t.Errorf("expected %d build statements, but got %d,\n expected: %#v,\n actual: %#v",
- len(expected), len(actual), expected, actual)
- return
- }
- type compareFn = func(i int, j int) bool
- byCommand := func(slice []*BuildStatement) compareFn {
- return func(i int, j int) bool {
- if slice[i] == nil {
- return false
- } else if slice[j] == nil {
- return false
- }
- return slice[i].Command < slice[j].Command
- }
- }
- sort.SliceStable(expected, byCommand(expected))
- sort.SliceStable(actual, byCommand(actual))
- for i, actualStatement := range actual {
- expectedStatement := expected[i]
- if differingField := buildStatementEquals(actualStatement, expectedStatement); differingField != "" {
- t.Errorf("%s differs\nunexpected build statement %#v.\nexpected: %#v",
- differingField, actualStatement, expectedStatement)
- return
- }
- }
-}
-
-func buildStatementEquals(first *BuildStatement, second *BuildStatement) string {
- if (first == nil) != (second == nil) {
- return "Nil"
- }
- if first.Mnemonic != second.Mnemonic {
- return "Mnemonic"
- }
- if first.Command != second.Command {
- return "Command"
- }
- // Ordering is significant for environment variables.
- if !reflect.DeepEqual(first.Env, second.Env) {
- return "Env"
- }
- // Ordering is irrelevant for input and output paths, so compare sets.
- if !reflect.DeepEqual(sortedStrings(first.InputPaths), sortedStrings(second.InputPaths)) {
- return "InputPaths"
- }
- if !reflect.DeepEqual(sortedStrings(first.OutputPaths), sortedStrings(second.OutputPaths)) {
- return "OutputPaths"
- }
- if !reflect.DeepEqual(sortedStrings(first.SymlinkPaths), sortedStrings(second.SymlinkPaths)) {
- return "SymlinkPaths"
- }
- if !reflect.DeepEqual(sortedStrings(first.ImplicitDeps), sortedStrings(second.ImplicitDeps)) {
- return "ImplicitDeps"
- }
- if first.Depfile != second.Depfile {
- return "Depfile"
- }
- return ""
-}
-
-func sortedStrings(stringSlice []string) []string {
- sorted := make([]string, len(stringSlice))
- copy(sorted, stringSlice)
- sort.Strings(sorted)
- return sorted
-}
-
-// Transform the json format to ActionGraphContainer
-func JsonToActionGraphContainer(inputString string) ([]byte, error) {
- var aqueryProtoResult analysis_v2_proto.ActionGraphContainer
- err := json.Unmarshal([]byte(inputString), &aqueryProtoResult)
- if err != nil {
- return []byte(""), err
- }
- data, _ := proto.Marshal(&aqueryProtoResult)
- return data, err
-}
diff --git a/bazel/bazel_proxy.go b/bazel/bazel_proxy.go
deleted file mode 100644
index 229818d..0000000
--- a/bazel/bazel_proxy.go
+++ /dev/null
@@ -1,237 +0,0 @@
-// Copyright 2023 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package bazel
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "net"
- os_lib "os"
- "os/exec"
- "path/filepath"
- "strings"
- "time"
-)
-
-// Logs events of ProxyServer.
-type ServerLogger interface {
- Fatal(v ...interface{})
- Fatalf(format string, v ...interface{})
- Println(v ...interface{})
-}
-
-// CmdRequest is a request to the Bazel Proxy server.
-type CmdRequest struct {
- // Args to the Bazel command.
- Argv []string
- // Environment variables to pass to the Bazel invocation. Strings should be of
- // the form "KEY=VALUE".
- Env []string
-}
-
-// CmdResponse is a response from the Bazel Proxy server.
-type CmdResponse struct {
- Stdout string
- Stderr string
- ErrorString string
-}
-
-// ProxyClient is a client which can issue Bazel commands to the Bazel
-// proxy server. Requests are issued (and responses received) via a unix socket.
-// See ProxyServer for more details.
-type ProxyClient struct {
- outDir string
-}
-
-// ProxyServer is a server which runs as a background goroutine. Each
-// request to the server describes a Bazel command which the server should run.
-// The server then issues the Bazel command, and returns a response describing
-// the stdout/stderr of the command.
-// Client-server communication is done via a unix socket under the output
-// directory.
-// The server is intended to circumvent sandboxing for subprocesses of the
-// build. The build orchestrator (soong_ui) can launch a server to exist outside
-// of sandboxing, and sandboxed processes (such as soong_build) can issue
-// bazel commands through this socket tunnel. This allows a sandboxed process
-// to issue bazel requests to a bazel that resides outside of sandbox. This
-// is particularly useful to maintain a persistent Bazel server which lives
-// past the duration of a single build.
-// The ProxyServer will only live as long as soong_ui does; the
-// underlying Bazel server will live past the duration of the build.
-type ProxyServer struct {
- logger ServerLogger
- outDir string
- workspaceDir string
- bazeliskVersion string
- // The server goroutine will listen on this channel and stop handling requests
- // once it is written to.
- done chan struct{}
-}
-
-// NewProxyClient is a constructor for a ProxyClient.
-func NewProxyClient(outDir string) *ProxyClient {
- return &ProxyClient{
- outDir: outDir,
- }
-}
-
-func unixSocketPath(outDir string) string {
- return filepath.Join(outDir, "bazelsocket.sock")
-}
-
-// IssueCommand issues a request to the Bazel Proxy Server to issue a Bazel
-// request. Returns a response describing the output from the Bazel process
-// (if the Bazel process had an error, then the response will include an error).
-// Returns an error if there was an issue with the connection to the Bazel Proxy
-// server.
-func (b *ProxyClient) IssueCommand(req CmdRequest) (CmdResponse, error) {
- var resp CmdResponse
- var err error
- // Check for connections every 1 second. This is chosen to be a relatively
- // short timeout, because the proxy server should accept requests quite
- // quickly.
- d := net.Dialer{Timeout: 1 * time.Second}
- var conn net.Conn
- conn, err = d.Dial("unix", unixSocketPath(b.outDir))
- if err != nil {
- return resp, err
- }
- defer conn.Close()
-
- enc := gob.NewEncoder(conn)
- if err = enc.Encode(req); err != nil {
- return resp, err
- }
- dec := gob.NewDecoder(conn)
- err = dec.Decode(&resp)
- return resp, err
-}
-
-// NewProxyServer is a constructor for a ProxyServer.
-func NewProxyServer(logger ServerLogger, outDir string, workspaceDir string, bazeliskVersion string) *ProxyServer {
- if len(bazeliskVersion) > 0 {
- logger.Println("** Using Bazelisk for this build, due to env var USE_BAZEL_VERSION=" + bazeliskVersion + " **")
- }
-
- return &ProxyServer{
- logger: logger,
- outDir: outDir,
- workspaceDir: workspaceDir,
- done: make(chan struct{}),
- bazeliskVersion: bazeliskVersion,
- }
-}
-
-func ExecBazel(bazelPath string, workspaceDir string, request CmdRequest) (stdout []byte, stderr []byte, cmdErr error) {
- bazelCmd := exec.Command(bazelPath, request.Argv...)
- bazelCmd.Dir = workspaceDir
- bazelCmd.Env = request.Env
-
- stderrBuffer := &bytes.Buffer{}
- bazelCmd.Stderr = stderrBuffer
-
- if output, err := bazelCmd.Output(); err != nil {
- cmdErr = fmt.Errorf("bazel command failed: %s\n---command---\n%s\n---env---\n%s\n---stderr---\n%s---",
- err, bazelCmd, strings.Join(bazelCmd.Env, "\n"), stderrBuffer)
- } else {
- stdout = output
- }
- stderr = stderrBuffer.Bytes()
- return
-}
-
-func (b *ProxyServer) handleRequest(conn net.Conn) error {
- defer conn.Close()
-
- dec := gob.NewDecoder(conn)
- var req CmdRequest
- if err := dec.Decode(&req); err != nil {
- return fmt.Errorf("Error decoding request: %s", err)
- }
-
- if len(b.bazeliskVersion) > 0 {
- req.Env = append(req.Env, "USE_BAZEL_VERSION="+b.bazeliskVersion)
- }
- stdout, stderr, cmdErr := ExecBazel("./build/bazel/bin/bazel", b.workspaceDir, req)
- errorString := ""
- if cmdErr != nil {
- errorString = cmdErr.Error()
- }
-
- resp := CmdResponse{string(stdout), string(stderr), errorString}
- enc := gob.NewEncoder(conn)
- if err := enc.Encode(&resp); err != nil {
- return fmt.Errorf("Error encoding response: %s", err)
- }
- return nil
-}
-
-func (b *ProxyServer) listenUntilClosed(listener net.Listener) error {
- for {
- // Check for connections every 1 second. This is a blocking operation, so
- // if the server is closed, the goroutine will not fully close until this
- // deadline is reached. Thus, this deadline is short (but not too short
- // so that the routine churns).
- listener.(*net.UnixListener).SetDeadline(time.Now().Add(time.Second))
- conn, err := listener.Accept()
-
- select {
- case <-b.done:
- return nil
- default:
- }
-
- if err != nil {
- if opErr, ok := err.(*net.OpError); ok && opErr.Timeout() {
- // Timeout is normal and expected while waiting for client to establish
- // a connection.
- continue
- } else {
- b.logger.Fatalf("Listener error: %s", err)
- }
- }
-
- err = b.handleRequest(conn)
- if err != nil {
- b.logger.Fatal(err)
- }
- }
-}
-
-// Start initializes the server unix socket and (in a separate goroutine)
-// handles requests on the socket until the server is closed. Returns an error
-// if a failure occurs during initialization. Will log any post-initialization
-// errors to the server's logger.
-func (b *ProxyServer) Start() error {
- unixSocketAddr := unixSocketPath(b.outDir)
- if err := os_lib.RemoveAll(unixSocketAddr); err != nil {
- return fmt.Errorf("couldn't remove socket '%s': %s", unixSocketAddr, err)
- }
- listener, err := net.Listen("unix", unixSocketAddr)
-
- if err != nil {
- return fmt.Errorf("error listening on socket '%s': %s", unixSocketAddr, err)
- }
-
- go b.listenUntilClosed(listener)
- return nil
-}
-
-// Close shuts down the server. This will stop the server from listening for
-// additional requests.
-func (b *ProxyServer) Close() {
- b.done <- struct{}{}
-}
diff --git a/bazel/constants.go b/bazel/constants.go
deleted file mode 100644
index b10f256..0000000
--- a/bazel/constants.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package bazel
-
-type RunName string
-
-// Below is a list bazel execution run names used through out the
-// Platform Build systems. Each run name represents an unique key
-// to query the bazel metrics.
-const (
- // Perform a bazel build of the phony root to generate symlink forests
- // for dependencies of the bazel build.
- BazelBuildPhonyRootRunName = RunName("bazel-build-phony-root")
-
- // Perform aquery of the bazel build root to retrieve action information.
- AqueryBuildRootRunName = RunName("aquery-buildroot")
-
- // Perform cquery of the Bazel build root and its dependencies.
- CqueryBuildRootRunName = RunName("cquery-buildroot")
-
- // Run bazel as a ninja executer
- BazelNinjaExecRunName = RunName("bazel-ninja-exec")
-
- SoongInjectionDirName = "soong_injection"
-
- GeneratedBazelFileWarning = "# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT."
-)
-
-// String returns the name of the run.
-func (c RunName) String() string {
- return string(c)
-}
diff --git a/bazel/cquery/Android.bp b/bazel/cquery/Android.bp
deleted file mode 100644
index 74f7721..0000000
--- a/bazel/cquery/Android.bp
+++ /dev/null
@@ -1,17 +0,0 @@
-package {
- default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-bootstrap_go_package {
- name: "soong-cquery",
- pkgPath: "android/soong/bazel/cquery",
- srcs: [
- "request_type.go",
- ],
- pluginFor: [
- "soong_build",
- ],
- testSrcs: [
- "request_type_test.go",
- ],
-}
diff --git a/bazel/cquery/request_type.go b/bazel/cquery/request_type.go
deleted file mode 100644
index 791c6bc..0000000
--- a/bazel/cquery/request_type.go
+++ /dev/null
@@ -1,426 +0,0 @@
-package cquery
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-)
-
-var (
- GetOutputFiles = &getOutputFilesRequestType{}
- GetCcInfo = &getCcInfoType{}
- GetApexInfo = &getApexInfoType{}
- GetCcUnstrippedInfo = &getCcUnstrippedInfoType{}
- GetPrebuiltFileInfo = &getPrebuiltFileInfo{}
-)
-
-type CcAndroidMkInfo struct {
- LocalStaticLibs []string
- LocalWholeStaticLibs []string
- LocalSharedLibs []string
-}
-
-type CcInfo struct {
- CcAndroidMkInfo
- OutputFiles []string
- CcObjectFiles []string
- CcSharedLibraryFiles []string
- CcStaticLibraryFiles []string
- Includes []string
- SystemIncludes []string
- Headers []string
- // Archives owned by the current target (not by its dependencies). These will
- // be a subset of OutputFiles. (or static libraries, this will be equal to OutputFiles,
- // but general cc_library will also have dynamic libraries in output files).
- RootStaticArchives []string
- // Dynamic libraries (.so files) created by the current target. These will
- // be a subset of OutputFiles. (or shared libraries, this will be equal to OutputFiles,
- // but general cc_library will also have dynamic libraries in output files).
- RootDynamicLibraries []string
- TidyFiles []string
- TocFile string
- UnstrippedOutput string
- AbiDiffFiles []string
-}
-
-type getOutputFilesRequestType struct{}
-
-// Name returns a string name for this request type. Such request type names must be unique,
-// and must only consist of alphanumeric characters.
-func (g getOutputFilesRequestType) Name() string {
- return "getOutputFiles"
-}
-
-// StarlarkFunctionBody returns a starlark function body to process this request type.
-// The returned string is the body of a Starlark function which obtains
-// all request-relevant information about a target and returns a string containing
-// this information.
-// The function should have the following properties:
-// - The arguments are `target` (a configured target) and `id_string` (the label + configuration).
-// - The return value must be a string.
-// - The function body should not be indented outside of its own scope.
-func (g getOutputFilesRequestType) StarlarkFunctionBody() string {
- return "return ', '.join([f.path for f in target.files.to_list()])"
-}
-
-// ParseResult returns a value obtained by parsing the result of the request's Starlark function.
-// The given rawString must correspond to the string output which was created by evaluating the
-// Starlark given in StarlarkFunctionBody.
-func (g getOutputFilesRequestType) ParseResult(rawString string) []string {
- return splitOrEmpty(rawString, ", ")
-}
-
-type getCcInfoType struct{}
-
-// Name returns a string name for this request type. Such request type names must be unique,
-// and must only consist of alphanumeric characters.
-func (g getCcInfoType) Name() string {
- return "getCcInfo"
-}
-
-// StarlarkFunctionBody returns a starlark function body to process this request type.
-// The returned string is the body of a Starlark function which obtains
-// all request-relevant information about a target and returns a string containing
-// this information.
-// The function should have the following properties:
-// - The arguments are `target` (a configured target) and `id_string` (the label + configuration).
-// - The return value must be a string.
-// - The function body should not be indented outside of its own scope.
-func (g getCcInfoType) StarlarkFunctionBody() string {
- return `
-outputFiles = [f.path for f in target.files.to_list()]
-p = providers(target)
-cc_info = p.get("CcInfo")
-if not cc_info:
- fail("%s did not provide CcInfo" % id_string)
-
-includes = cc_info.compilation_context.includes.to_list()
-system_includes = cc_info.compilation_context.system_includes.to_list()
-headers = [f.path for f in cc_info.compilation_context.headers.to_list()]
-
-ccObjectFiles = []
-staticLibraries = []
-rootStaticArchives = []
-linker_inputs = cc_info.linking_context.linker_inputs.to_list()
-
-static_info_tag = "//build/bazel/rules/cc:cc_library_static.bzl%CcStaticLibraryInfo"
-if static_info_tag in p:
- static_info = p[static_info_tag]
- ccObjectFiles = [f.path for f in static_info.objects]
- rootStaticArchives = [static_info.root_static_archive.path]
-else:
- for linker_input in linker_inputs:
- for library in linker_input.libraries:
- for object in library.objects:
- ccObjectFiles += [object.path]
- if library.static_library:
- staticLibraries.append(library.static_library.path)
- if linker_input.owner == target.label:
- rootStaticArchives.append(library.static_library.path)
-
-sharedLibraries = []
-rootSharedLibraries = []
-
-shared_info_tag = "//build/bazel/rules/cc:cc_library_shared.bzl%CcSharedLibraryOutputInfo"
-stubs_tag = "//build/bazel/rules/cc:cc_stub_library.bzl%CcStubInfo"
-unstripped_tag = "//build/bazel/rules/cc:stripped_cc_common.bzl%CcUnstrippedInfo"
-unstripped = ""
-
-if shared_info_tag in p:
- shared_info = p[shared_info_tag]
- path = shared_info.output_file.path
- sharedLibraries.append(path)
- rootSharedLibraries += [path]
- unstripped = path
- if unstripped_tag in p:
- unstripped = p[unstripped_tag].unstripped.path
-elif stubs_tag in p:
- rootSharedLibraries.extend([f.path for f in target.files.to_list()])
-else:
- for linker_input in linker_inputs:
- for library in linker_input.libraries:
- if library.dynamic_library:
- path = library.dynamic_library.path
- sharedLibraries.append(path)
- if linker_input.owner == target.label:
- rootSharedLibraries.append(path)
-
-toc_file = ""
-toc_file_tag = "//build/bazel/rules/cc:generate_toc.bzl%CcTocInfo"
-if toc_file_tag in p:
- toc_file = p[toc_file_tag].toc.path
-else:
- # NOTE: It's OK if there's no ToC, as Soong just uses it for optimization
- pass
-
-tidy_files = []
-clang_tidy_info = p.get("//build/bazel/rules/cc:clang_tidy.bzl%ClangTidyInfo")
-if clang_tidy_info:
- tidy_files = [v.path for v in clang_tidy_info.transitive_tidy_files.to_list()]
-
-abi_diff_files = []
-abi_diff_info = p.get("//build/bazel/rules/abi:abi_dump.bzl%AbiDiffInfo")
-if abi_diff_info:
- abi_diff_files = [f.path for f in abi_diff_info.diff_files.to_list()]
-
-local_static_libs = []
-local_whole_static_libs = []
-local_shared_libs = []
-androidmk_tag = "//build/bazel/rules/cc:cc_library_common.bzl%CcAndroidMkInfo"
-if androidmk_tag in p:
- androidmk_info = p[androidmk_tag]
- local_static_libs = androidmk_info.local_static_libs
- local_whole_static_libs = androidmk_info.local_whole_static_libs
- local_shared_libs = androidmk_info.local_shared_libs
-
-return json.encode({
- "OutputFiles": outputFiles,
- "CcObjectFiles": ccObjectFiles,
- "CcSharedLibraryFiles": sharedLibraries,
- "CcStaticLibraryFiles": staticLibraries,
- "Includes": includes,
- "SystemIncludes": system_includes,
- "Headers": headers,
- "RootStaticArchives": rootStaticArchives,
- "RootDynamicLibraries": rootSharedLibraries,
- "TidyFiles": [t for t in tidy_files],
- "TocFile": toc_file,
- "UnstrippedOutput": unstripped,
- "AbiDiffFiles": abi_diff_files,
- "LocalStaticLibs": [l for l in local_static_libs],
- "LocalWholeStaticLibs": [l for l in local_whole_static_libs],
- "LocalSharedLibs": [l for l in local_shared_libs],
-})`
-
-}
-
-// ParseResult returns a value obtained by parsing the result of the request's Starlark function.
-// The given rawString must correspond to the string output which was created by evaluating the
-// Starlark given in StarlarkFunctionBody.
-func (g getCcInfoType) ParseResult(rawString string) (CcInfo, error) {
- var ccInfo CcInfo
- if err := parseJson(rawString, &ccInfo); err != nil {
- return ccInfo, err
- }
- return ccInfo, nil
-}
-
-// Query Bazel for the artifacts generated by the apex modules.
-type getApexInfoType struct{}
-
-// Name returns a string name for this request type. Such request type names must be unique,
-// and must only consist of alphanumeric characters.
-func (g getApexInfoType) Name() string {
- return "getApexInfo"
-}
-
-// StarlarkFunctionBody returns a starlark function body to process this request type.
-// The returned string is the body of a Starlark function which obtains
-// all request-relevant information about a target and returns a string containing
-// this information. The function should have the following properties:
-// - The arguments are `target` (a configured target) and `id_string` (the label + configuration).
-// - The return value must be a string.
-// - The function body should not be indented outside of its own scope.
-func (g getApexInfoType) StarlarkFunctionBody() string {
- return `
-info = providers(target).get("//build/bazel/rules/apex:apex_info.bzl%ApexInfo")
-if not info:
- fail("%s did not provide ApexInfo" % id_string)
-bundle_key_info = info.bundle_key_info
-container_key_info = info.container_key_info
-
-signed_compressed_output = "" # no .capex if the apex is not compressible, cannot be None as it needs to be json encoded.
-if info.signed_compressed_output:
- signed_compressed_output = info.signed_compressed_output.path
-
-mk_info = providers(target).get("//build/bazel/rules/apex:apex_info.bzl%ApexMkInfo")
-if not mk_info:
- fail("%s did not provide ApexMkInfo" % id_string)
-
-tidy_files = []
-clang_tidy_info = providers(target).get("//build/bazel/rules/cc:clang_tidy.bzl%ClangTidyInfo")
-if clang_tidy_info:
- tidy_files = [v.path for v in clang_tidy_info.transitive_tidy_files.to_list()]
-
-return json.encode({
- "signed_output": info.signed_output.path,
- "signed_compressed_output": signed_compressed_output,
- "unsigned_output": info.unsigned_output.path,
- "provides_native_libs": [str(lib) for lib in info.provides_native_libs],
- "requires_native_libs": [str(lib) for lib in info.requires_native_libs],
- "bundle_key_info": [bundle_key_info.public_key.path, bundle_key_info.private_key.path],
- "container_key_info": [container_key_info.pem.path, container_key_info.pk8.path, container_key_info.key_name],
- "package_name": info.package_name,
- "symbols_used_by_apex": info.symbols_used_by_apex.path,
- "java_symbols_used_by_apex": info.java_symbols_used_by_apex.path,
- "backing_libs": info.backing_libs.path,
- "bundle_file": info.base_with_config_zip.path,
- "installed_files": info.installed_files.path,
- "make_modules_to_install": mk_info.make_modules_to_install,
- "files_info": mk_info.files_info,
- "tidy_files": [t for t in tidy_files],
-})`
-}
-
-type ApexInfo struct {
- // From the ApexInfo provider
- SignedOutput string `json:"signed_output"`
- SignedCompressedOutput string `json:"signed_compressed_output"`
- UnsignedOutput string `json:"unsigned_output"`
- ProvidesLibs []string `json:"provides_native_libs"`
- RequiresLibs []string `json:"requires_native_libs"`
- BundleKeyInfo []string `json:"bundle_key_info"`
- ContainerKeyInfo []string `json:"container_key_info"`
- PackageName string `json:"package_name"`
- SymbolsUsedByApex string `json:"symbols_used_by_apex"`
- JavaSymbolsUsedByApex string `json:"java_symbols_used_by_apex"`
- BackingLibs string `json:"backing_libs"`
- BundleFile string `json:"bundle_file"`
- InstalledFiles string `json:"installed_files"`
- TidyFiles []string `json:"tidy_files"`
-
- // From the ApexMkInfo provider
- MakeModulesToInstall []string `json:"make_modules_to_install"`
- PayloadFilesInfo []map[string]string `json:"files_info"`
-}
-
-// ParseResult returns a value obtained by parsing the result of the request's Starlark function.
-// The given rawString must correspond to the string output which was created by evaluating the
-// Starlark given in StarlarkFunctionBody.
-func (g getApexInfoType) ParseResult(rawString string) (ApexInfo, error) {
- var info ApexInfo
- err := parseJson(rawString, &info)
- return info, err
-}
-
-// getCcUnstrippedInfoType implements cqueryRequest interface. It handles the
-// interaction with `bazel cquery` to retrieve CcUnstrippedInfo provided
-// by the` cc_binary` and `cc_shared_library` rules.
-type getCcUnstrippedInfoType struct{}
-
-func (g getCcUnstrippedInfoType) Name() string {
- return "getCcUnstrippedInfo"
-}
-
-func (g getCcUnstrippedInfoType) StarlarkFunctionBody() string {
- return `
-p = providers(target)
-output_path = target.files.to_list()[0].path
-
-unstripped = output_path
-unstripped_tag = "//build/bazel/rules/cc:stripped_cc_common.bzl%CcUnstrippedInfo"
-if unstripped_tag in p:
- unstripped_info = p[unstripped_tag]
- unstripped = unstripped_info.unstripped[0].files.to_list()[0].path
-
-local_static_libs = []
-local_whole_static_libs = []
-local_shared_libs = []
-androidmk_tag = "//build/bazel/rules/cc:cc_library_common.bzl%CcAndroidMkInfo"
-if androidmk_tag in p:
- androidmk_info = p[androidmk_tag]
- local_static_libs = androidmk_info.local_static_libs
- local_whole_static_libs = androidmk_info.local_whole_static_libs
- local_shared_libs = androidmk_info.local_shared_libs
-
-tidy_files = []
-clang_tidy_info = p.get("//build/bazel/rules/cc:clang_tidy.bzl%ClangTidyInfo")
-if clang_tidy_info:
- tidy_files = [v.path for v in clang_tidy_info.transitive_tidy_files.to_list()]
-
-return json.encode({
- "OutputFile": output_path,
- "UnstrippedOutput": unstripped,
- "LocalStaticLibs": [l for l in local_static_libs],
- "LocalWholeStaticLibs": [l for l in local_whole_static_libs],
- "LocalSharedLibs": [l for l in local_shared_libs],
- "TidyFiles": [t for t in tidy_files],
-})
-`
-}
-
-// ParseResult returns a value obtained by parsing the result of the request's Starlark function.
-// The given rawString must correspond to the string output which was created by evaluating the
-// Starlark given in StarlarkFunctionBody.
-func (g getCcUnstrippedInfoType) ParseResult(rawString string) (CcUnstrippedInfo, error) {
- var info CcUnstrippedInfo
- err := parseJson(rawString, &info)
- return info, err
-}
-
-type CcUnstrippedInfo struct {
- CcAndroidMkInfo
- OutputFile string
- UnstrippedOutput string
- TidyFiles []string
-}
-
-// splitOrEmpty is a modification of strings.Split() that returns an empty list
-// if the given string is empty.
-func splitOrEmpty(s string, sep string) []string {
- if len(s) < 1 {
- return []string{}
- } else {
- return strings.Split(s, sep)
- }
-}
-
-// parseJson decodes json string into the fields of the receiver.
-// Unknown attribute name causes panic.
-func parseJson(jsonString string, info interface{}) error {
- decoder := json.NewDecoder(strings.NewReader(jsonString))
- decoder.DisallowUnknownFields() //useful to detect typos, e.g. in unit tests
- err := decoder.Decode(info)
- if err != nil {
- return fmt.Errorf("cannot parse cquery result '%s': %s", jsonString, err)
- }
- return nil
-}
-
-type getPrebuiltFileInfo struct{}
-
-// Name returns a string name for this request type. Such request type names must be unique,
-// and must only consist of alphanumeric characters.
-func (g getPrebuiltFileInfo) Name() string {
- return "getPrebuiltFileInfo"
-}
-
-// StarlarkFunctionBody returns a starlark function body to process this request type.
-// The returned string is the body of a Starlark function which obtains
-// all request-relevant information about a target and returns a string containing
-// this information.
-// The function should have the following properties:
-// - The arguments are `target` (a configured target) and `id_string` (the label + configuration).
-// - The return value must be a string.
-// - The function body should not be indented outside of its own scope.
-func (g getPrebuiltFileInfo) StarlarkFunctionBody() string {
- return `
-p = providers(target)
-prebuilt_file_info = p.get("//build/bazel/rules:prebuilt_file.bzl%PrebuiltFileInfo")
-if not prebuilt_file_info:
- fail("%s did not provide PrebuiltFileInfo" % id_string)
-
-return json.encode({
- "Src": prebuilt_file_info.src.path,
- "Dir": prebuilt_file_info.dir,
- "Filename": prebuilt_file_info.filename,
- "Installable": prebuilt_file_info.installable,
-})`
-}
-
-type PrebuiltFileInfo struct {
- // TODO: b/207489266 - Fully support all properties in prebuilt_file
- Src string
- Dir string
- Filename string
- Installable bool
-}
-
-// ParseResult returns a value obtained by parsing the result of the request's Starlark function.
-// The given rawString must correspond to the string output which was created by evaluating the
-// Starlark given in StarlarkFunctionBody.
-func (g getPrebuiltFileInfo) ParseResult(rawString string) (PrebuiltFileInfo, error) {
- var info PrebuiltFileInfo
- err := parseJson(rawString, &info)
- return info, err
-}
diff --git a/bazel/cquery/request_type_test.go b/bazel/cquery/request_type_test.go
deleted file mode 100644
index e772bb7..0000000
--- a/bazel/cquery/request_type_test.go
+++ /dev/null
@@ -1,281 +0,0 @@
-package cquery
-
-import (
- "encoding/json"
- "reflect"
- "strings"
- "testing"
-)
-
-func TestGetOutputFilesParseResults(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedOutput []string
- }{
- {
- description: "no result",
- input: "",
- expectedOutput: []string{},
- },
- {
- description: "one result",
- input: "test",
- expectedOutput: []string{"test"},
- },
- {
- description: "splits on comma with space",
- input: "foo, bar",
- expectedOutput: []string{"foo", "bar"},
- },
- }
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- actualOutput := GetOutputFiles.ParseResult(tc.input)
- if !reflect.DeepEqual(tc.expectedOutput, actualOutput) {
- t.Errorf("expected %#v != actual %#v", tc.expectedOutput, actualOutput)
- }
- })
- }
-}
-
-func TestGetCcInfoParseResults(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- inputCcInfo CcInfo
- expectedOutput CcInfo
- }{
- {
- description: "no result",
- inputCcInfo: CcInfo{},
- expectedOutput: CcInfo{},
- },
- {
- description: "all items set",
- inputCcInfo: CcInfo{
- OutputFiles: []string{"out1", "out2"},
- CcObjectFiles: []string{"object1", "object2"},
- CcSharedLibraryFiles: []string{"shared_lib1", "shared_lib2"},
- CcStaticLibraryFiles: []string{"static_lib1", "static_lib2"},
- Includes: []string{".", "dir/subdir"},
- SystemIncludes: []string{"system/dir", "system/other/dir"},
- Headers: []string{"dir/subdir/hdr.h"},
- RootStaticArchives: []string{"rootstaticarchive1"},
- RootDynamicLibraries: []string{"rootdynamiclibrary1"},
- TocFile: "lib.so.toc",
- },
- expectedOutput: CcInfo{
- OutputFiles: []string{"out1", "out2"},
- CcObjectFiles: []string{"object1", "object2"},
- CcSharedLibraryFiles: []string{"shared_lib1", "shared_lib2"},
- CcStaticLibraryFiles: []string{"static_lib1", "static_lib2"},
- Includes: []string{".", "dir/subdir"},
- SystemIncludes: []string{"system/dir", "system/other/dir"},
- Headers: []string{"dir/subdir/hdr.h"},
- RootStaticArchives: []string{"rootstaticarchive1"},
- RootDynamicLibraries: []string{"rootdynamiclibrary1"},
- TocFile: "lib.so.toc",
- },
- },
- }
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- jsonInput, _ := json.Marshal(tc.inputCcInfo)
- actualOutput, err := GetCcInfo.ParseResult(string(jsonInput))
- if err != nil {
- t.Errorf("error parsing result: %q", err)
- } else if err == nil && !reflect.DeepEqual(tc.expectedOutput, actualOutput) {
- t.Errorf("expected %#v\n!= actual %#v", tc.expectedOutput, actualOutput)
- }
- })
- }
-}
-
-func TestGetCcInfoParseResultsError(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedError string
- }{
- {
- description: "not json",
- input: ``,
- expectedError: `cannot parse cquery result '': EOF`,
- },
- {
- description: "invalid field",
- input: `{
- "toc_file": "dir/file.so.toc"
-}`,
- expectedError: `json: unknown field "toc_file"`,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- _, err := GetCcInfo.ParseResult(tc.input)
- if !strings.Contains(err.Error(), tc.expectedError) {
- t.Errorf("expected string %q in error message, got %q", tc.expectedError, err)
- }
- })
- }
-}
-
-func TestGetApexInfoParseResults(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedOutput ApexInfo
- }{
- {
- description: "no result",
- input: "{}",
- expectedOutput: ApexInfo{},
- },
- {
- description: "one result",
- input: `{
- "signed_output":"my.apex",
- "unsigned_output":"my.apex.unsigned",
- "requires_native_libs":["//bionic/libc:libc","//bionic/libdl:libdl"],
- "bundle_key_info":["foo.pem", "foo.privkey"],
- "container_key_info":["foo.x509.pem", "foo.pk8", "foo"],
- "package_name":"package.name",
- "symbols_used_by_apex": "path/to/my.apex_using.txt",
- "backing_libs":"path/to/backing.txt",
- "bundle_file": "dir/bundlefile.zip",
- "installed_files":"path/to/installed-files.txt",
- "provides_native_libs":[],
- "make_modules_to_install": ["foo","bar"]
-}`,
- expectedOutput: ApexInfo{
- // ApexInfo
- SignedOutput: "my.apex",
- UnsignedOutput: "my.apex.unsigned",
- RequiresLibs: []string{"//bionic/libc:libc", "//bionic/libdl:libdl"},
- ProvidesLibs: []string{},
- BundleKeyInfo: []string{"foo.pem", "foo.privkey"},
- ContainerKeyInfo: []string{"foo.x509.pem", "foo.pk8", "foo"},
- PackageName: "package.name",
- SymbolsUsedByApex: "path/to/my.apex_using.txt",
- BackingLibs: "path/to/backing.txt",
- BundleFile: "dir/bundlefile.zip",
- InstalledFiles: "path/to/installed-files.txt",
-
- // ApexMkInfo
- MakeModulesToInstall: []string{"foo", "bar"},
- },
- },
- }
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- actualOutput, err := GetApexInfo.ParseResult(tc.input)
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- }
- if !reflect.DeepEqual(tc.expectedOutput, actualOutput) {
- t.Errorf("expected %#v != actual %#v", tc.expectedOutput, actualOutput)
- }
- })
- }
-}
-
-func TestGetApexInfoParseResultsError(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedError string
- }{
- {
- description: "not json",
- input: ``,
- expectedError: `cannot parse cquery result '': EOF`,
- },
- {
- description: "invalid field",
- input: `{
- "fake_field": "path/to/file"
-}`,
- expectedError: `json: unknown field "fake_field"`,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- _, err := GetApexInfo.ParseResult(tc.input)
- if !strings.Contains(err.Error(), tc.expectedError) {
- t.Errorf("expected string %q in error message, got %q", tc.expectedError, err)
- }
- })
- }
-}
-
-func TestGetCcUnstrippedParseResults(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedOutput CcUnstrippedInfo
- }{
- {
- description: "no result",
- input: "{}",
- expectedOutput: CcUnstrippedInfo{},
- },
- {
- description: "one result",
- input: `{"OutputFile":"myapp", "UnstrippedOutput":"myapp_unstripped"}`,
- expectedOutput: CcUnstrippedInfo{
- OutputFile: "myapp",
- UnstrippedOutput: "myapp_unstripped",
- },
- },
- }
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- actualOutput, err := GetCcUnstrippedInfo.ParseResult(tc.input)
- if err != nil {
- t.Errorf("Unexpected error %q", err)
- }
- if !reflect.DeepEqual(tc.expectedOutput, actualOutput) {
- t.Errorf("expected %#v != actual %#v", tc.expectedOutput, actualOutput)
- }
- })
- }
-}
-
-func TestGetCcUnstrippedParseResultsErrors(t *testing.T) {
- t.Parallel()
- testCases := []struct {
- description string
- input string
- expectedError string
- }{
- {
- description: "not json",
- input: ``,
- expectedError: `cannot parse cquery result '': EOF`,
- },
- {
- description: "invalid field",
- input: `{
- "fake_field": "path/to/file"
-}`,
- expectedError: `json: unknown field "fake_field"`,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.description, func(t *testing.T) {
- _, err := GetCcUnstrippedInfo.ParseResult(tc.input)
- if !strings.Contains(err.Error(), tc.expectedError) {
- t.Errorf("expected string %q in error message, got %q", tc.expectedError, err)
- }
- })
- }
-}
diff --git a/cc/cc.go b/cc/cc.go
index 2dfb23a..d8fe319 100644
--- a/cc/cc.go
+++ b/cc/cc.go
@@ -2121,6 +2121,43 @@
}
}
+
+ buildComplianceMetadataInfo(ctx, c, deps)
+}
+
+func buildComplianceMetadataInfo(ctx ModuleContext, c *Module, deps PathDeps) {
+ // Dump metadata that can not be done in android/compliance-metadata.go
+ complianceMetadataInfo := ctx.ComplianceMetadataInfo()
+ complianceMetadataInfo.SetStringValue(android.ComplianceMetadataProp.IS_STATIC_LIB, strconv.FormatBool(ctx.static()))
+ complianceMetadataInfo.SetStringValue(android.ComplianceMetadataProp.BUILT_FILES, c.outputFile.String())
+
+ // Static deps
+ staticDeps := ctx.GetDirectDepsWithTag(StaticDepTag(false))
+ staticDepNames := make([]string, 0, len(staticDeps))
+ for _, dep := range staticDeps {
+ staticDepNames = append(staticDepNames, dep.Name())
+ }
+
+ staticDepPaths := make([]string, 0, len(deps.StaticLibs))
+ for _, dep := range deps.StaticLibs {
+ staticDepPaths = append(staticDepPaths, dep.String())
+ }
+ complianceMetadataInfo.SetListValue(android.ComplianceMetadataProp.STATIC_DEPS, android.FirstUniqueStrings(staticDepNames))
+ complianceMetadataInfo.SetListValue(android.ComplianceMetadataProp.STATIC_DEP_FILES, android.FirstUniqueStrings(staticDepPaths))
+
+ // Whole static deps
+ wholeStaticDeps := ctx.GetDirectDepsWithTag(StaticDepTag(true))
+ wholeStaticDepNames := make([]string, 0, len(wholeStaticDeps))
+ for _, dep := range wholeStaticDeps {
+ wholeStaticDepNames = append(wholeStaticDepNames, dep.Name())
+ }
+
+ wholeStaticDepPaths := make([]string, 0, len(deps.WholeStaticLibs))
+ for _, dep := range deps.WholeStaticLibs {
+ wholeStaticDepPaths = append(wholeStaticDepPaths, dep.String())
+ }
+ complianceMetadataInfo.SetListValue(android.ComplianceMetadataProp.WHOLE_STATIC_DEPS, android.FirstUniqueStrings(wholeStaticDepNames))
+ complianceMetadataInfo.SetListValue(android.ComplianceMetadataProp.WHOLE_STATIC_DEP_FILES, android.FirstUniqueStrings(wholeStaticDepPaths))
}
func (c *Module) maybeUnhideFromMake() {
diff --git a/go.mod b/go.mod
index 13834fc..aa43066 100644
--- a/go.mod
+++ b/go.mod
@@ -5,6 +5,5 @@
require (
github.com/google/blueprint v0.0.0
google.golang.org/protobuf v0.0.0
- prebuilts/bazel/common/proto/analysis_v2 v0.0.0
go.starlark.net v0.0.0
)
diff --git a/go.work b/go.work
index 9a7e6db..46a135b 100644
--- a/go.work
+++ b/go.work
@@ -5,8 +5,6 @@
../../external/go-cmp
../../external/golang-protobuf
../../external/starlark-go
- ../../prebuilts/bazel/common/proto/analysis_v2
- ../../prebuilts/bazel/common/proto/build
../blueprint
)
@@ -15,7 +13,5 @@
github.com/google/blueprint v0.0.0 => ../blueprint
github.com/google/go-cmp v0.0.0 => ../../external/go-cmp
google.golang.org/protobuf v0.0.0 => ../../external/golang-protobuf
- prebuilts/bazel/common/proto/analysis_v2 v0.0.0 => ../../prebuilts/bazel/common/proto/analysis_v2
- prebuilts/bazel/common/proto/build v0.0.0 => ../../prebuilts/bazel/common/proto/build
go.starlark.net v0.0.0 => ../../external/starlark-go
)
diff --git a/java/dex.go b/java/dex.go
index 32546d9..c75e774 100644
--- a/java/dex.go
+++ b/java/dex.go
@@ -180,7 +180,7 @@
"$r8Template": &remoteexec.REParams{
Labels: map[string]string{"type": "compile", "compiler": "r8"},
Inputs: []string{"$implicits", "${config.R8Jar}"},
- OutputFiles: []string{"${outUsage}", "${outConfig}", "${outDict}", "${resourcesOutput}"},
+ OutputFiles: []string{"${outUsage}", "${outConfig}", "${outDict}", "${resourcesOutput}", "${outR8ArtProfile}"},
ExecStrategy: "${config.RER8ExecStrategy}",
ToolchainInputs: []string{"${config.JavaCmd}"},
Platform: map[string]string{remoteexec.PoolKey: "${config.REJavaPool}"},
@@ -200,7 +200,7 @@
Platform: map[string]string{remoteexec.PoolKey: "${config.REJavaPool}"},
},
}, []string{"outDir", "outDict", "outConfig", "outUsage", "outUsageZip", "outUsageDir",
- "r8Flags", "zipFlags", "mergeZipsFlags", "resourcesOutput"}, []string{"implicits"})
+ "r8Flags", "zipFlags", "mergeZipsFlags", "resourcesOutput", "outR8ArtProfile"}, []string{"implicits"})
func (d *dexer) dexCommonFlags(ctx android.ModuleContext,
dexParams *compileDexParams) (flags []string, deps android.Paths) {
@@ -463,13 +463,6 @@
proguardConfiguration,
}
r8Flags, r8Deps, r8ArtProfileOutputPath := d.r8Flags(ctx, dexParams)
- if r8ArtProfileOutputPath != nil {
- artProfileOutputPath = r8ArtProfileOutputPath
- implicitOutputs = append(
- implicitOutputs,
- artProfileOutputPath,
- )
- }
rule := r8
args := map[string]string{
"r8Flags": strings.Join(append(commonFlags, r8Flags...), " "),
@@ -482,6 +475,17 @@
"outDir": outDir.String(),
"mergeZipsFlags": mergeZipsFlags,
}
+ if r8ArtProfileOutputPath != nil {
+ artProfileOutputPath = r8ArtProfileOutputPath
+ implicitOutputs = append(
+ implicitOutputs,
+ artProfileOutputPath,
+ )
+ // Add the implicit r8 Art profile output to args so that r8RE knows
+ // about this implicit output
+ args["outR8ArtProfile"] = artProfileOutputPath.String()
+ }
+
if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_R8") {
rule = r8RE
args["implicits"] = strings.Join(r8Deps.Strings(), ",")
diff --git a/java/platform_compat_config.go b/java/platform_compat_config.go
index 7cc6231..67ed84e 100644
--- a/java/platform_compat_config.go
+++ b/java/platform_compat_config.go
@@ -109,23 +109,13 @@
p.installDirPath = android.PathForModuleInstall(ctx, "etc", "compatconfig")
p.installConfigFile = android.PathForModuleInstall(ctx, "etc", "compatconfig", p.configFile.Base())
rule.Build(configFileName, "Extract compat/compat_config.xml and install it")
-}
-
-func (p *platformCompatConfig) FilesToInstall() android.InstallPaths {
- return android.InstallPaths{p.installConfigFile}
+ ctx.InstallFile(p.installDirPath, p.configFile.Base(), p.configFile)
}
func (p *platformCompatConfig) AndroidMkEntries() []android.AndroidMkEntries {
return []android.AndroidMkEntries{android.AndroidMkEntries{
Class: "ETC",
OutputFile: android.OptionalPathForPath(p.configFile),
- Include: "$(BUILD_PREBUILT)",
- ExtraEntries: []android.AndroidMkExtraEntriesFunc{
- func(ctx android.AndroidMkExtraEntriesContext, entries *android.AndroidMkEntries) {
- entries.SetString("LOCAL_MODULE_PATH", p.installDirPath.String())
- entries.SetString("LOCAL_INSTALLED_MODULE_STEM", p.configFile.Base())
- },
- },
}}
}
diff --git a/rust/rust.go b/rust/rust.go
index 8a053c1..3a3ca4d 100644
--- a/rust/rust.go
+++ b/rust/rust.go
@@ -16,6 +16,7 @@
import (
"fmt"
+ "strconv"
"strings"
"android/soong/bloaty"
@@ -208,27 +209,6 @@
return false
}
-func (mod *Module) OutputFiles(tag string) (android.Paths, error) {
- switch tag {
- case "":
- if mod.sourceProvider != nil && (mod.compiler == nil || mod.compiler.Disabled()) {
- return mod.sourceProvider.Srcs(), nil
- } else {
- if mod.OutputFile().Valid() {
- return android.Paths{mod.OutputFile().Path()}, nil
- }
- return android.Paths{}, nil
- }
- case "unstripped":
- if mod.compiler != nil {
- return android.PathsIfNonNil(mod.compiler.unstrippedOutputFilePath()), nil
- }
- return nil, nil
- default:
- return nil, fmt.Errorf("unsupported module reference tag %q", tag)
- }
-}
-
func (mod *Module) SelectedStl() string {
return ""
}
@@ -988,6 +968,61 @@
if mod.testModule {
android.SetProvider(ctx, testing.TestModuleProviderKey, testing.TestModuleProviderData{})
}
+
+ mod.setOutputFiles(ctx)
+
+ buildComplianceMetadataInfo(ctx, mod, deps)
+}
+
+func (mod *Module) setOutputFiles(ctx ModuleContext) {
+ if mod.sourceProvider != nil && (mod.compiler == nil || mod.compiler.Disabled()) {
+ ctx.SetOutputFiles(mod.sourceProvider.Srcs(), "")
+ } else if mod.OutputFile().Valid() {
+ ctx.SetOutputFiles(android.Paths{mod.OutputFile().Path()}, "")
+ } else {
+ ctx.SetOutputFiles(android.Paths{}, "")
+ }
+ if mod.compiler != nil {
+ ctx.SetOutputFiles(android.PathsIfNonNil(mod.compiler.unstrippedOutputFilePath()), "unstripped")
+ }
+}
+
+func buildComplianceMetadataInfo(ctx *moduleContext, mod *Module, deps PathDeps) {
+ // Dump metadata that can not be done in android/compliance-metadata.go
+ metadataInfo := ctx.ComplianceMetadataInfo()
+ metadataInfo.SetStringValue(android.ComplianceMetadataProp.IS_STATIC_LIB, strconv.FormatBool(mod.Static()))
+ metadataInfo.SetStringValue(android.ComplianceMetadataProp.BUILT_FILES, mod.outputFile.String())
+
+ // Static libs
+ staticDeps := ctx.GetDirectDepsWithTag(rlibDepTag)
+ staticDepNames := make([]string, 0, len(staticDeps))
+ for _, dep := range staticDeps {
+ staticDepNames = append(staticDepNames, dep.Name())
+ }
+ ccStaticDeps := ctx.GetDirectDepsWithTag(cc.StaticDepTag(false))
+ for _, dep := range ccStaticDeps {
+ staticDepNames = append(staticDepNames, dep.Name())
+ }
+
+ staticDepPaths := make([]string, 0, len(deps.StaticLibs)+len(deps.RLibs))
+ // C static libraries
+ for _, dep := range deps.StaticLibs {
+ staticDepPaths = append(staticDepPaths, dep.String())
+ }
+ // Rust static libraries
+ for _, dep := range deps.RLibs {
+ staticDepPaths = append(staticDepPaths, dep.Path.String())
+ }
+ metadataInfo.SetListValue(android.ComplianceMetadataProp.STATIC_DEPS, android.FirstUniqueStrings(staticDepNames))
+ metadataInfo.SetListValue(android.ComplianceMetadataProp.STATIC_DEP_FILES, android.FirstUniqueStrings(staticDepPaths))
+
+ // C Whole static libs
+ ccWholeStaticDeps := ctx.GetDirectDepsWithTag(cc.StaticDepTag(true))
+ wholeStaticDepNames := make([]string, 0, len(ccWholeStaticDeps))
+ for _, dep := range ccStaticDeps {
+ wholeStaticDepNames = append(wholeStaticDepNames, dep.Name())
+ }
+ metadataInfo.SetListValue(android.ComplianceMetadataProp.STATIC_DEPS, android.FirstUniqueStrings(staticDepNames))
}
func (mod *Module) deps(ctx DepsContext) Deps {
@@ -1471,7 +1506,7 @@
var srcProviderDepFiles android.Paths
for _, dep := range directSrcProvidersDeps {
- srcs, _ := dep.OutputFiles("")
+ srcs := android.OutputFilesForModule(ctx, dep, "")
srcProviderDepFiles = append(srcProviderDepFiles, srcs...)
}
for _, dep := range directSrcDeps {
@@ -1858,5 +1893,3 @@
var BoolDefault = proptools.BoolDefault
var String = proptools.String
var StringPtr = proptools.StringPtr
-
-var _ android.OutputFileProducer = (*Module)(nil)
diff --git a/rust/test_test.go b/rust/test_test.go
index 6d0ebcf..dc796c8 100644
--- a/rust/test_test.go
+++ b/rust/test_test.go
@@ -106,12 +106,9 @@
ctx := testRust(t, bp)
- module := ctx.ModuleForTests("main_test", "android_arm64_armv8-a").Module()
- testBinary := module.(*Module).compiler.(*testDecorator)
- outputFiles, err := module.(android.OutputFileProducer).OutputFiles("")
- if err != nil {
- t.Fatalf("Expected rust_test to produce output files, error: %s", err)
- }
+ testingModule := ctx.ModuleForTests("main_test", "android_arm64_armv8-a")
+ testBinary := testingModule.Module().(*Module).compiler.(*testDecorator)
+ outputFiles := testingModule.OutputFiles(t, "")
if len(outputFiles) != 1 {
t.Fatalf("expected exactly one output file. output files: [%s]", outputFiles)
}
@@ -168,12 +165,10 @@
`
ctx := testRust(t, bp)
- module := ctx.ModuleForTests("main_test", "android_arm64_armv8-a").Module()
+ testingModule := ctx.ModuleForTests("main_test", "android_arm64_armv8-a")
+ module := testingModule.Module()
testBinary := module.(*Module).compiler.(*testDecorator)
- outputFiles, err := module.(android.OutputFileProducer).OutputFiles("")
- if err != nil {
- t.Fatalf("Expected rust_test to produce output files, error: %s", err)
- }
+ outputFiles := testingModule.OutputFiles(t, "")
if len(outputFiles) != 1 {
t.Fatalf("expected exactly one output file. output files: [%s]", outputFiles)
}
diff --git a/shared/Android.bp b/shared/Android.bp
index 3c84f55..d5e8614 100644
--- a/shared/Android.bp
+++ b/shared/Android.bp
@@ -15,7 +15,6 @@
"paths_test.go",
],
deps: [
- "soong-bazel",
"golang-protobuf-proto",
],
}
diff --git a/shared/paths.go b/shared/paths.go
index fca8b4c..1ee66d5 100644
--- a/shared/paths.go
+++ b/shared/paths.go
@@ -18,8 +18,6 @@
import (
"path/filepath"
-
- "android/soong/bazel"
)
// A SharedPaths represents a list of paths that are shared between
@@ -49,11 +47,3 @@
func TempDirForOutDir(outDir string) (tempPath string) {
return filepath.Join(outDir, ".temp")
}
-
-// BazelMetricsFilename returns the bazel profile filename based
-// on the action name. This is to help to store a set of bazel
-// profiles since bazel may execute multiple times during a single
-// build.
-func BazelMetricsFilename(s SharedPaths, actionName bazel.RunName) string {
- return filepath.Join(s.BazelMetricsDir(), actionName.String()+"_bazel_profile.gz")
-}
diff --git a/ui/build/config.go b/ui/build/config.go
index 8dddea5..c4a6797 100644
--- a/ui/build/config.go
+++ b/ui/build/config.go
@@ -1246,6 +1246,11 @@
}
func (c *configImpl) canSupportRBE() bool {
+ // Only supported on linux
+ if runtime.GOOS != "linux" {
+ return false
+ }
+
// Do not use RBE with prod credentials in scenarios when stubby doesn't exist, since
// its unlikely that we will be able to obtain necessary creds without stubby.
authType, _ := c.rbeAuth()
diff --git a/ui/build/soong.go b/ui/build/soong.go
index 9a4583c..77fee0a 100644
--- a/ui/build/soong.go
+++ b/ui/build/soong.go
@@ -27,7 +27,6 @@
"android/soong/ui/tracer"
- "android/soong/bazel"
"android/soong/ui/metrics"
"android/soong/ui/metrics/metrics_proto"
"android/soong/ui/status"
@@ -603,10 +602,6 @@
checkEnvironmentFile(ctx, soongBuildEnv, config.UsedEnvFile(soongBuildTag))
- // Remove bazel files in the event that bazel is disabled for the build.
- // These files may have been left over from a previous bazel-enabled build.
- cleanBazelFiles(config)
-
if config.JsonModuleGraph() {
checkEnvironmentFile(ctx, soongBuildEnv, config.UsedEnvFile(jsonModuleGraphTag))
}
@@ -758,18 +753,6 @@
}
}
-func cleanBazelFiles(config Config) {
- files := []string{
- shared.JoinPath(config.SoongOutDir(), "bp2build"),
- shared.JoinPath(config.SoongOutDir(), "workspace"),
- shared.JoinPath(config.SoongOutDir(), bazel.SoongInjectionDirName),
- shared.JoinPath(config.OutDir(), "bazel")}
-
- for _, f := range files {
- os.RemoveAll(f)
- }
-}
-
func runMicrofactory(ctx Context, config Config, name string, pkg string, mapping map[string]string) {
ctx.BeginTrace(metrics.RunSoong, name)
defer ctx.EndTrace()