Merge "Read LLVM version from $LLVM_PREBUILTS_VERSION if set."
diff --git a/OWNERS b/OWNERS
index f15bd32..f0ccd82 100644
--- a/OWNERS
+++ b/OWNERS
@@ -3,13 +3,18 @@
# AMER
ahumesky@google.com
+alexmarquez@google.com
asmundak@google.com
ccross@android.com
cparsons@google.com
dwillemsen@google.com
eakammer@google.com
+jobredeaux@google.com
joeo@google.com
+lamontjones@google.com
spandandas@google.com
+weiwli@google.com
+yudiliu@google.com
yuntaoxu@google.com
# APAC
diff --git a/README.md b/README.md
index 10ddd73..a67c393 100644
--- a/README.md
+++ b/README.md
@@ -33,8 +33,9 @@
Every module must have a `name` property, and the value must be unique across
all Android.bp files.
-For a list of valid module types and their properties see
-[$OUT_DIR/soong/docs/soong_build.html](https://ci.android.com/builds/latest/branches/aosp-build-tools/targets/linux/view/soong_build.html).
+The list of valid module types and their properties can be generated by calling
+`m soong_docs`. It will be written to `$OUT_DIR/soong/docs/soong_build.html`.
+This list for the current version of Soong can be found [here](https://ci.android.com/builds/latest/branches/aosp-build-tools/targets/linux/view/soong_build.html).
### File lists
@@ -451,15 +452,10 @@
The values of the variables can be set from a product's `BoardConfig.mk` file:
```
-SOONG_CONFIG_NAMESPACES += acme
-SOONG_CONFIG_acme += \
- board \
- feature \
- width \
-
-SOONG_CONFIG_acme_board := soc_a
-SOONG_CONFIG_acme_feature := true
-SOONG_CONFIG_acme_width := 200
+$(call add_soong_config_namespace, acme)
+$(call add_soong_config_var_value, acme, board, soc_a)
+$(call add_soong_config_var_value, acme, feature, true)
+$(call add_soong_config_var_value, acme, width, 200)
```
The `acme_cc_defaults` module type can be used anywhere after the definition in
diff --git a/android/Android.bp b/android/Android.bp
index 1bccd7b..f3a3850 100644
--- a/android/Android.bp
+++ b/android/Android.bp
@@ -66,7 +66,6 @@
"prebuilt.go",
"prebuilt_build_tool.go",
"proto.go",
- "queryview.go",
"register.go",
"rule_builder.go",
"sandbox.go",
@@ -81,7 +80,6 @@
"util.go",
"variable.go",
"visibility.go",
- "writedocs.go",
],
testSrcs: [
"android_test.go",
diff --git a/android/androidmk.go b/android/androidmk.go
index f032f1b..9853d2c 100644
--- a/android/androidmk.go
+++ b/android/androidmk.go
@@ -571,7 +571,7 @@
if host {
makeOs := amod.Os().String()
- if amod.Os() == Linux || amod.Os() == LinuxBionic {
+ if amod.Os() == Linux || amod.Os() == LinuxBionic || amod.Os() == LinuxMusl {
makeOs = "linux"
}
a.SetString("LOCAL_MODULE_HOST_OS", makeOs)
diff --git a/android/bazel.go b/android/bazel.go
index fa19e52..373e292 100644
--- a/android/bazel.go
+++ b/android/bazel.go
@@ -120,6 +120,10 @@
// allows modules to opt-out.
Bp2BuildDefaultTrueRecursively BazelConversionConfigEntry = iota + 1
+ // all modules in this package (not recursively) default to bp2build_available: true.
+ // allows modules to opt-out.
+ Bp2BuildDefaultTrue
+
// all modules in this package (not recursively) default to bp2build_available: false.
// allows modules to opt-in.
Bp2BuildDefaultFalse
@@ -137,12 +141,15 @@
// build/bazel explicitly.
"build/bazel":/* recursive = */ false,
"build/bazel/examples/android_app":/* recursive = */ true,
+ "build/bazel/examples/java":/* recursive = */ true,
"build/bazel/bazel_skylib":/* recursive = */ true,
"build/bazel/rules":/* recursive = */ true,
"build/bazel/rules_cc":/* recursive = */ true,
+ "build/bazel/scripts":/* recursive = */ true,
"build/bazel/tests":/* recursive = */ true,
"build/bazel/platforms":/* recursive = */ true,
"build/bazel/product_variables":/* recursive = */ true,
+ "build/bazel_common_rules":/* recursive = */ true,
"build/pesto":/* recursive = */ true,
// external/bazelbuild-rules_android/... is needed by mixed builds, otherwise mixed builds analysis fails
@@ -160,8 +167,11 @@
bp2buildDefaultConfig = Bp2BuildConfig{
"bionic": Bp2BuildDefaultTrueRecursively,
"build/bazel/examples/apex/minimal": Bp2BuildDefaultTrueRecursively,
+ "development/sdk": Bp2BuildDefaultTrueRecursively,
"external/gwp_asan": Bp2BuildDefaultTrueRecursively,
+ "external/brotli": Bp2BuildDefaultTrue,
"system/core/libcutils": Bp2BuildDefaultTrueRecursively,
+ "system/core/libprocessgroup": Bp2BuildDefaultTrue,
"system/core/property_service/libpropertyinfoparser": Bp2BuildDefaultTrueRecursively,
"system/libbase": Bp2BuildDefaultTrueRecursively,
"system/logging/liblog": Bp2BuildDefaultTrueRecursively,
@@ -170,7 +180,9 @@
"external/arm-optimized-routines": Bp2BuildDefaultTrueRecursively,
"external/fmtlib": Bp2BuildDefaultTrueRecursively,
"external/jemalloc_new": Bp2BuildDefaultTrueRecursively,
+ "external/libcxx": Bp2BuildDefaultTrueRecursively,
"external/libcxxabi": Bp2BuildDefaultTrueRecursively,
+ "external/libcap": Bp2BuildDefaultTrueRecursively,
"external/scudo": Bp2BuildDefaultTrueRecursively,
"prebuilts/clang/host/linux-x86": Bp2BuildDefaultTrueRecursively,
}
@@ -214,6 +226,16 @@
"gwp_asan_crash_handler", // cc_library, ld.lld: error: undefined symbol: memset
+ //system/core/libprocessgroup/...
+ "libprocessgroup", // depends on //system/core/libprocessgroup/cgrouprc:libcgrouprc
+
+ //external/brotli/...
+ "brotli-fuzzer-corpus", // "declared output 'external/brotli/c/fuzz/73231c6592f195ffd41100b8706d1138ff6893b9' was not created by genrule"
+
+ // //external/libcap/...
+ "libcap", // http://b/198595332, depends on _makenames, a cc_binary
+ "cap_names.h", // http://b/198596102, depends on _makenames, a cc_binary
+
// Tests. Handle later.
"libbionic_tests_headers_posix", // http://b/186024507, cc_library_static, sched.h, time.h not found
"libjemalloc5_integrationtest",
@@ -234,8 +256,12 @@
// Per-module denylist to opt modules out of mixed builds. Such modules will
// still be generated via bp2build.
mixedBuildsDisabledList = []string{
- "libc++abi", // http://b/195970501, cc_library_static, duplicate symbols because it propagates libc objects.
- "libc++demangle", // http://b/195970501, cc_library_static, duplicate symbols because it propagates libc objects.
+ "libbrotli", // http://b/198585397, ld.lld: error: bionic/libc/arch-arm64/generic/bionic/memmove.S:95:(.text+0x10): relocation R_AARCH64_CONDBR19 out of range: -1404176 is not in [-1048576, 1048575]; references __memcpy
+ "libc++fs", // http://b/198403271, Missing symbols/members in the global namespace when referenced from headers in //external/libcxx/includes
+ "libc++_experimental", // http://b/198403271, Missing symbols/members in the global namespace when referenced from headers in //external/libcxx/includes
+ "libc++_static", // http://b/198403271, Missing symbols/members in the global namespace when referenced from headers in //external/libcxx/includes
+ "libc++abi", // http://b/195970501, cc_library_static, duplicate symbols because it propagates libc objects.
+ "libc++demangle", // http://b/195970501, cc_library_static, duplicate symbols because it propagates libc objects.
}
// Used for quicker lookups
@@ -337,11 +363,10 @@
func bp2buildDefaultTrueRecursively(packagePath string, config Bp2BuildConfig) bool {
ret := false
- // Return exact matches in the config.
- if config[packagePath] == Bp2BuildDefaultTrueRecursively {
+ // Check if the package path has an exact match in the config.
+ if config[packagePath] == Bp2BuildDefaultTrue || config[packagePath] == Bp2BuildDefaultTrueRecursively {
return true
- }
- if config[packagePath] == Bp2BuildDefaultFalse {
+ } else if config[packagePath] == Bp2BuildDefaultFalse {
return false
}
diff --git a/android/bazel_handler.go b/android/bazel_handler.go
index 312f009..50b79fa 100644
--- a/android/bazel_handler.go
+++ b/android/bazel_handler.go
@@ -27,9 +27,9 @@
"sync"
"android/soong/bazel/cquery"
+ "android/soong/shared"
"android/soong/bazel"
- "android/soong/shared"
)
type cqueryRequest interface {
@@ -106,7 +106,7 @@
bazelPath string
outputBase string
workspaceDir string
- buildDir string
+ soongOutDir string
metricsDir string
}
@@ -254,7 +254,7 @@
func bazelPathsFromConfig(c *config) (*bazelPaths, error) {
p := bazelPaths{
- buildDir: c.buildDir,
+ soongOutDir: c.soongOutDir,
}
missingEnvVars := []string{}
if len(c.Getenv("BAZEL_HOME")) > 1 {
@@ -353,7 +353,16 @@
// the invocation returned an error code.
func (r *builtinBazelRunner) issueBazelCommand(paths *bazelPaths, runName bazel.RunName, command bazelCommand,
extraFlags ...string) (string, string, error) {
- cmdFlags := []string{"--output_base=" + absolutePath(paths.outputBase), command.command}
+ cmdFlags := []string{
+ // --noautodetect_server_javabase has the practical consequence of preventing Bazel from
+ // attempting to download rules_java, which is incompatible with
+ // --experimental_repository_disable_download set further below.
+ // rules_java is also not needed until mixed builds start building java targets.
+ // TODO(b/197958133): Once rules_java is pulled into AOSP, remove this flag.
+ "--noautodetect_server_javabase",
+ "--output_base=" + absolutePath(paths.outputBase),
+ command.command,
+ }
cmdFlags = append(cmdFlags, command.expression)
cmdFlags = append(cmdFlags, "--profile="+shared.BazelMetricsFilename(paths, runName))
@@ -382,7 +391,7 @@
bazelCmd.Env = append(os.Environ(),
"HOME="+paths.homeDir,
pwdPrefix(),
- "BUILD_DIR="+absolutePath(paths.buildDir),
+ "BUILD_DIR="+absolutePath(paths.soongOutDir),
// Make OUT_DIR absolute here so tools/bazel.sh uses the correct
// OUT_DIR at <root>/out, instead of <root>/out/soong/workspace/out.
"OUT_DIR="+absolutePath(paths.outDir()),
@@ -483,6 +492,12 @@
)
`
+ commonArchFilegroupString := `
+filegroup(name = "common",
+ srcs = [%s],
+)
+`
+
configNodesSection := ""
labelsByArch := map[string][]string{}
@@ -492,14 +507,22 @@
labelsByArch[archString] = append(labelsByArch[archString], labelString)
}
- configNodeLabels := []string{}
+ allLabels := []string{}
for archString, labels := range labelsByArch {
- configNodeLabels = append(configNodeLabels, fmt.Sprintf("\":%s\"", archString))
- labelsString := strings.Join(labels, ",\n ")
- configNodesSection += fmt.Sprintf(configNodeFormatString, archString, archString, labelsString)
+ if archString == "common" {
+ // arch-less labels (e.g. filegroups) don't need a config_node
+ allLabels = append(allLabels, "\":common\"")
+ labelsString := strings.Join(labels, ",\n ")
+ configNodesSection += fmt.Sprintf(commonArchFilegroupString, labelsString)
+ } else {
+ // Create a config_node, and add the config_node's label to allLabels
+ allLabels = append(allLabels, fmt.Sprintf("\":%s\"", archString))
+ labelsString := strings.Join(labels, ",\n ")
+ configNodesSection += fmt.Sprintf(configNodeFormatString, archString, archString, labelsString)
+ }
}
- return []byte(fmt.Sprintf(formatString, configNodesSection, strings.Join(configNodeLabels, ",\n ")))
+ return []byte(fmt.Sprintf(formatString, configNodesSection, strings.Join(allLabels, ",\n ")))
}
func indent(original string) string {
@@ -564,6 +587,12 @@
%s
def get_arch(target):
+ # TODO(b/199363072): filegroups and file targets aren't associated with any
+ # specific platform architecture in mixed builds. This is consistent with how
+ # Soong treats filegroups, but it may not be the case with manually-written
+ # filegroup BUILD targets.
+ if target.kind in ["filegroup", ""]:
+ return "common"
buildoptions = build_options(target)
platforms = build_options(target)["//command_line_option:platforms"]
if len(platforms) != 1:
@@ -599,24 +628,24 @@
// Returns a path containing build-related metadata required for interfacing
// with Bazel. Example: out/soong/bazel.
func (p *bazelPaths) intermediatesDir() string {
- return filepath.Join(p.buildDir, "bazel")
+ return filepath.Join(p.soongOutDir, "bazel")
}
// Returns the path where the contents of the @soong_injection repository live.
// It is used by Soong to tell Bazel things it cannot over the command line.
func (p *bazelPaths) injectedFilesDir() string {
- return filepath.Join(p.buildDir, bazel.SoongInjectionDirName)
+ return filepath.Join(p.soongOutDir, bazel.SoongInjectionDirName)
}
// Returns the path of the synthetic Bazel workspace that contains a symlink
// forest composed the whole source tree and BUILD files generated by bp2build.
func (p *bazelPaths) syntheticWorkspaceDir() string {
- return filepath.Join(p.buildDir, "workspace")
+ return filepath.Join(p.soongOutDir, "workspace")
}
// Returns the path to the top level out dir ($OUT_DIR).
func (p *bazelPaths) outDir() string {
- return filepath.Dir(p.buildDir)
+ return filepath.Dir(p.soongOutDir)
}
// Issues commands to Bazel to receive results for all cquery requests
@@ -662,11 +691,12 @@
if err != nil {
return err
}
+
buildrootLabel := "@soong_injection//mixed_builds:buildroot"
cqueryOutput, cqueryErr, err = context.issueBazelCommand(
context.paths,
bazel.CqueryBuildRootRunName,
- bazelCommand{"cquery", fmt.Sprintf("kind(rule, deps(%s))", buildrootLabel)},
+ bazelCommand{"cquery", fmt.Sprintf("deps(%s)", buildrootLabel)},
"--output=starlark",
"--starlark:file="+absolutePath(cqueryFileRelpath))
err = ioutil.WriteFile(filepath.Join(soongInjectionPath, "cquery.out"),
diff --git a/android/bazel_handler_test.go b/android/bazel_handler_test.go
index f1fabec..cdf1a63 100644
--- a/android/bazel_handler_test.go
+++ b/android/bazel_handler_test.go
@@ -11,7 +11,7 @@
label := "//foo:bar"
arch := Arm64
bazelContext, _ := testBazelContext(t, map[bazelCommand]string{
- bazelCommand{command: "cquery", expression: "kind(rule, deps(@soong_injection//mixed_builds:buildroot))"}: `//foo:bar|arm64>>out/foo/bar.txt`,
+ bazelCommand{command: "cquery", expression: "deps(@soong_injection//mixed_builds:buildroot)"}: `//foo:bar|arm64>>out/foo/bar.txt`,
})
g, ok := bazelContext.GetOutputFiles(label, arch)
if ok {
@@ -101,7 +101,7 @@
func testBazelContext(t *testing.T, bazelCommandResults map[bazelCommand]string) (*bazelContext, string) {
t.Helper()
p := bazelPaths{
- buildDir: t.TempDir(),
+ soongOutDir: t.TempDir(),
outputBase: "outputbase",
workspaceDir: "workspace_dir",
}
@@ -114,5 +114,5 @@
bazelRunner: runner,
paths: &p,
requests: map[cqueryKey]bool{},
- }, p.buildDir
+ }, p.soongOutDir
}
diff --git a/android/bazel_paths.go b/android/bazel_paths.go
index c09d218..a4bd2ef 100644
--- a/android/bazel_paths.go
+++ b/android/bazel_paths.go
@@ -414,7 +414,7 @@
}
outputPath := OutputPath{basePath{"", ""},
- ctx.Config().buildDir,
+ ctx.Config().soongOutDir,
ctx.Config().BazelContext.OutputBase()}
return BazelOutPath{
diff --git a/android/config.go b/android/config.go
index 35403b8..0767e7b 100644
--- a/android/config.go
+++ b/android/config.go
@@ -66,19 +66,31 @@
*config
}
-// BuildDir returns the build output directory for the configuration.
-func (c Config) BuildDir() string {
- return c.buildDir
+// SoongOutDir returns the build output directory for the configuration.
+func (c Config) SoongOutDir() string {
+ return c.soongOutDir
}
-func (c Config) NinjaBuildDir() string {
- return c.buildDir
+func (c Config) OutDir() string {
+ return c.outDir
+}
+
+func (c Config) RunGoTests() bool {
+ return c.runGoTests
}
func (c Config) DebugCompilation() bool {
return false // Never compile Go code in the main build for debugging
}
+func (c Config) Subninjas() []string {
+ return []string{}
+}
+
+func (c Config) PrimaryBuilderInvocations() []bootstrap.PrimaryBuilderInvocation {
+ return []bootstrap.PrimaryBuilderInvocation{}
+}
+
// A DeviceConfig object represents the configuration for a particular device
// being built. For now there will only be one of these, but in the future there
// may be multiple devices being built.
@@ -122,9 +134,12 @@
deviceConfig *deviceConfig
- buildDir string // the path of the build output directory
+ outDir string // The output directory (usually out/)
+ soongOutDir string
moduleListFile string // the path to the file which lists blueprint files to parse.
+ runGoTests bool
+
env map[string]string
envLock sync.Mutex
envDeps map[string]string
@@ -137,8 +152,6 @@
captureBuild bool // true for tests, saves build parameters for each module
ignoreEnvironment bool // true for tests, returns empty from all Getenv calls
- stopBefore bootstrap.StopBefore
-
fs pathtools.FileSystem
mockBpList string
@@ -283,11 +296,12 @@
// NullConfig returns a mostly empty Config for use by standalone tools like dexpreopt_gen that
// use the android package.
-func NullConfig(buildDir string) Config {
+func NullConfig(outDir, soongOutDir string) Config {
return Config{
config: &config{
- buildDir: buildDir,
- fs: pathtools.OsFs,
+ outDir: outDir,
+ soongOutDir: soongOutDir,
+ fs: pathtools.OsFs,
},
}
}
@@ -319,7 +333,10 @@
ShippingApiLevel: stringPtr("30"),
},
- buildDir: buildDir,
+ outDir: buildDir,
+ // soongOutDir is inconsistent with production (it should be buildDir + "/soong")
+ // but a lot of tests assume this :(
+ soongOutDir: buildDir,
captureBuild: true,
env: envCopy,
@@ -397,7 +414,7 @@
// multiple runs in the same program execution is carried over (such as Bazel
// context or environment deps).
func ConfigForAdditionalRun(c Config) (Config, error) {
- newConfig, err := NewConfig(c.buildDir, c.moduleListFile, c.env)
+ newConfig, err := NewConfig(c.moduleListFile, c.runGoTests, c.outDir, c.soongOutDir, c.env)
if err != nil {
return Config{}, err
}
@@ -408,14 +425,16 @@
// NewConfig creates a new Config object. The srcDir argument specifies the path
// to the root source directory. It also loads the config file, if found.
-func NewConfig(buildDir string, moduleListFile string, availableEnv map[string]string) (Config, error) {
+func NewConfig(moduleListFile string, runGoTests bool, outDir, soongOutDir string, availableEnv map[string]string) (Config, error) {
// Make a config with default options.
config := &config{
- ProductVariablesFileName: filepath.Join(buildDir, productVariablesFileName),
+ ProductVariablesFileName: filepath.Join(soongOutDir, productVariablesFileName),
env: availableEnv,
- buildDir: buildDir,
+ outDir: outDir,
+ soongOutDir: soongOutDir,
+ runGoTests: runGoTests,
multilibConflicts: make(map[ArchType]bool),
moduleListFile: moduleListFile,
@@ -428,7 +447,7 @@
// Soundness check of the build and source directories. This won't catch strange
// configurations with symlinks, but at least checks the obvious case.
- absBuildDir, err := filepath.Abs(buildDir)
+ absBuildDir, err := filepath.Abs(soongOutDir)
if err != nil {
return Config{}, err
}
@@ -448,7 +467,7 @@
return Config{}, err
}
- KatiEnabledMarkerFile := filepath.Join(buildDir, ".soong.kati_enabled")
+ KatiEnabledMarkerFile := filepath.Join(soongOutDir, ".soong.kati_enabled")
if _, err := os.Stat(absolutePath(KatiEnabledMarkerFile)); err == nil {
config.katiEnabled = true
}
@@ -525,7 +544,7 @@
pathsToParse := []string{}
for candidate := range mockFS {
base := filepath.Base(candidate)
- if base == "Blueprints" || base == "Android.bp" {
+ if base == "Android.bp" {
pathsToParse = append(pathsToParse, candidate)
}
}
@@ -538,29 +557,16 @@
c.mockBpList = blueprint.MockModuleListFile
}
-func (c *config) StopBefore() bootstrap.StopBefore {
- return c.stopBefore
-}
-
-// SetStopBefore configures soong_build to exit earlier at a specific point.
-func (c *config) SetStopBefore(stopBefore bootstrap.StopBefore) {
- c.stopBefore = stopBefore
-}
-
func (c *config) SetAllowMissingDependencies() {
c.productVariables.Allow_missing_dependencies = proptools.BoolPtr(true)
}
-var _ bootstrap.ConfigStopBefore = (*config)(nil)
-
// BlueprintToolLocation returns the directory containing build system tools
// from Blueprint, like soong_zip and merge_zips.
-func (c *config) BlueprintToolLocation() string {
- return filepath.Join(c.buildDir, "host", c.PrebuiltOS(), "bin")
+func (c *config) HostToolDir() string {
+ return filepath.Join(c.soongOutDir, "host", c.PrebuiltOS(), "bin")
}
-var _ bootstrap.ConfigBlueprintToolLocation = (*config)(nil)
-
func (c *config) HostToolPath(ctx PathContext, tool string) Path {
return PathForOutput(ctx, "host", c.PrebuiltOS(), "bin", tool)
}
@@ -1665,8 +1671,9 @@
return ConfiguredJarList{apexes, jars}
}
-// Filter keeps the entries if a jar appears in the given list of jars to keep; returns a new list.
-func (l *ConfiguredJarList) Filter(jarsToKeep []string) ConfiguredJarList {
+// Filter keeps the entries if a jar appears in the given list of jars to keep. Returns a new list
+// and any remaining jars that are not on this list.
+func (l *ConfiguredJarList) Filter(jarsToKeep []string) (ConfiguredJarList, []string) {
var apexes []string
var jars []string
@@ -1677,7 +1684,7 @@
}
}
- return ConfiguredJarList{apexes, jars}
+ return ConfiguredJarList{apexes, jars}, RemoveListFromList(jarsToKeep, jars)
}
// CopyOfJars returns a copy of the list of strings containing jar module name
diff --git a/android/filegroup.go b/android/filegroup.go
index 54d01d3..4db165f 100644
--- a/android/filegroup.go
+++ b/android/filegroup.go
@@ -42,6 +42,27 @@
srcs := bazel.MakeLabelListAttribute(
BazelLabelForModuleSrcExcludes(ctx, fg.properties.Srcs, fg.properties.Exclude_srcs))
+
+ // For Bazel compatibility, don't generate the filegroup if there is only 1
+ // source file, and that the source file is named the same as the module
+ // itself. In Bazel, eponymous filegroups like this would be an error.
+ //
+ // Instead, dependents on this single-file filegroup can just depend
+ // on the file target, instead of rule target, directly.
+ //
+ // You may ask: what if a filegroup has multiple files, and one of them
+ // shares the name? The answer: we haven't seen that in the wild, and
+ // should lock Soong itself down to prevent the behavior. For now,
+ // we raise an error if bp2build sees this problem.
+ for _, f := range srcs.Value.Includes {
+ if f.Label == fg.Name() {
+ if len(srcs.Value.Includes) > 1 {
+ ctx.ModuleErrorf("filegroup '%s' cannot contain a file with the same name", fg.Name())
+ }
+ return
+ }
+ }
+
attrs := &bazelFilegroupAttributes{
Srcs: srcs,
}
@@ -97,7 +118,7 @@
}
bazelCtx := ctx.Config().BazelContext
- filePaths, ok := bazelCtx.GetOutputFiles(fg.GetBazelLabel(ctx, fg), ctx.Arch().ArchType)
+ filePaths, ok := bazelCtx.GetOutputFiles(fg.GetBazelLabel(ctx, fg), Common)
if !ok {
return false
}
diff --git a/android/fixture.go b/android/fixture.go
index fd051a7..728f031 100644
--- a/android/fixture.go
+++ b/android/fixture.go
@@ -834,7 +834,7 @@
func (r *TestResult) NormalizePathForTesting(path Path) string {
pathContext := PathContextForTesting(r.Config)
pathAsString := path.String()
- if rel, isRel := MaybeRel(pathContext, r.Config.BuildDir(), pathAsString); isRel {
+ if rel, isRel := MaybeRel(pathContext, r.Config.SoongOutDir(), pathAsString); isRel {
return rel
}
return pathAsString
diff --git a/android/license_kind_test.go b/android/license_kind_test.go
index 1f09568..7a909a6 100644
--- a/android/license_kind_test.go
+++ b/android/license_kind_test.go
@@ -14,38 +14,38 @@
{
name: "license_kind must not accept licenses property",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_license",
licenses: ["other_license"],
}`),
},
expectedErrors: []string{
- `top/Blueprints:4:14: unrecognized property "licenses"`,
+ `top/Android.bp:4:14: unrecognized property "licenses"`,
},
},
{
name: "bad license_kind",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_notice",
conditions: ["notice"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_license {
name: "other_notice",
license_kinds: ["notice"],
}`),
},
expectedErrors: []string{
- `other/Blueprints:2:5: "other_notice" depends on undefined module "notice"`,
+ `other/Android.bp:2:5: "other_notice" depends on undefined module "notice"`,
},
},
{
name: "good license kind",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_by_exception_only",
conditions: ["by_exception_only"],
@@ -55,7 +55,7 @@
name: "top_proprietary",
license_kinds: ["top_by_exception_only"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_license {
name: "other_proprietary",
license_kinds: ["top_proprietary"],
@@ -65,7 +65,7 @@
{
name: "multiple license kinds",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_notice",
conditions: ["notice"],
@@ -85,7 +85,7 @@
name: "top_proprietary",
license_kinds: ["top_by_exception_only"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_license {
name: "other_rule",
license_kinds: ["top_by_exception_only"],
diff --git a/android/license_sdk_member.go b/android/license_sdk_member.go
index cd36ed6..2ce921b 100644
--- a/android/license_sdk_member.go
+++ b/android/license_sdk_member.go
@@ -31,9 +31,9 @@
SdkMemberTypeBase
}
-func (l *licenseSdkMemberType) AddDependencies(mctx BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
+func (l *licenseSdkMemberType) AddDependencies(ctx SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
// Add dependencies onto the license module from the sdk module.
- mctx.AddDependency(mctx.Module(), dependencyTag, names...)
+ ctx.AddDependency(ctx.Module(), dependencyTag, names...)
}
func (l *licenseSdkMemberType) IsInstance(module Module) bool {
diff --git a/android/license_test.go b/android/license_test.go
index 26b33c3..7222cd7 100644
--- a/android/license_test.go
+++ b/android/license_test.go
@@ -27,7 +27,7 @@
{
name: "license must not accept licenses property",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license {
name: "top_license",
visibility: ["//visibility:private"],
@@ -35,13 +35,13 @@
}`),
},
expectedErrors: []string{
- `top/Blueprints:5:14: unrecognized property "licenses"`,
+ `top/Android.bp:5:14: unrecognized property "licenses"`,
},
},
{
name: "private license",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_notice",
conditions: ["notice"],
@@ -53,27 +53,27 @@
license_kinds: ["top_notice"],
visibility: ["//visibility:private"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
rule {
name: "arule",
licenses: ["top_allowed_as_notice"],
}`),
- "yetmore/Blueprints": []byte(`
+ "yetmore/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_allowed_as_notice"],
}`),
},
expectedErrors: []string{
- `other/Blueprints:2:5: module "arule": depends on //top:top_allowed_as_notice ` +
+ `other/Android.bp:2:5: module "arule": depends on //top:top_allowed_as_notice ` +
`which is not visible to this module`,
- `yetmore/Blueprints:2:5: module "//yetmore": depends on //top:top_allowed_as_notice ` +
+ `yetmore/Android.bp:2:5: module "//yetmore": depends on //top:top_allowed_as_notice ` +
`which is not visible to this module`,
},
},
{
name: "must reference license_kind module",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
rule {
name: "top_by_exception_only",
}
@@ -85,14 +85,14 @@
}`),
},
expectedErrors: []string{
- `top/Blueprints:6:5: module "top_proprietary": license_kinds property ` +
+ `top/Android.bp:6:5: module "top_proprietary": license_kinds property ` +
`"top_by_exception_only" is not a license_kind module`,
},
},
{
name: "license_kind module must exist",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license {
name: "top_notice_allowed",
license_kinds: ["top_notice"],
@@ -100,13 +100,13 @@
}`),
},
expectedErrors: []string{
- `top/Blueprints:2:5: "top_notice_allowed" depends on undefined module "top_notice"`,
+ `top/Android.bp:2:5: "top_notice_allowed" depends on undefined module "top_notice"`,
},
},
{
name: "public license",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_by_exception_only",
conditions: ["by_exception_only"],
@@ -118,12 +118,12 @@
license_kinds: ["top_by_exception_only"],
visibility: ["//visibility:public"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
rule {
name: "arule",
licenses: ["top_proprietary"],
}`),
- "yetmore/Blueprints": []byte(`
+ "yetmore/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_proprietary"],
}`),
@@ -132,7 +132,7 @@
{
name: "multiple licenses",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_proprietary"],
}
@@ -162,12 +162,12 @@
name: "myrule",
licenses: ["top_allowed_as_notice", "top_proprietary"]
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
rule {
name: "arule",
licenses: ["top_proprietary"],
}`),
- "yetmore/Blueprints": []byte(`
+ "yetmore/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_proprietary"],
}`),
diff --git a/android/licenses.go b/android/licenses.go
index 464ba49..d54f8f4 100644
--- a/android/licenses.go
+++ b/android/licenses.go
@@ -293,7 +293,7 @@
case "*android.soongConfigModuleTypeModule": // creates aliases for modules with licenses
case "*android.soongConfigModuleTypeImport": // creates aliases for modules with licenses
case "*android.soongConfigStringVariableDummyModule": // used for creating aliases
- case "*android.SoongConfigBoolVariableDummyModule": // used for creating aliases
+ case "*android.soongConfigBoolVariableDummyModule": // used for creating aliases
default:
return false
}
diff --git a/android/licenses_test.go b/android/licenses_test.go
index 8503310..d05b0a3 100644
--- a/android/licenses_test.go
+++ b/android/licenses_test.go
@@ -20,7 +20,7 @@
{
name: "invalid module type without licenses property",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_bad_module {
name: "libexample",
}`),
@@ -30,7 +30,7 @@
{
name: "license must exist",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
licenses: ["notice"],
@@ -41,7 +41,7 @@
{
name: "all good",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "notice",
conditions: ["shownotice"],
@@ -58,12 +58,12 @@
name: "libexample1",
licenses: ["top_Apache2"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
licenses: ["top_Apache2"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
licenses: ["top_Apache2"],
@@ -101,7 +101,7 @@
// Check that licenses is the union of the defaults modules.
name: "defaults union, basic",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license_kind {
name: "top_notice",
conditions: ["notice"],
@@ -125,7 +125,7 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
license_kind {
name: "nested_notice",
conditions: ["notice"],
@@ -140,7 +140,7 @@
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -174,7 +174,7 @@
{
name: "defaults union, multiple defaults",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
license {
name: "top",
}
@@ -194,7 +194,7 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
license {
name: "top_nested",
license_text: ["LICENSE.txt"],
@@ -203,7 +203,7 @@
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
license {
name: "other",
}
@@ -211,7 +211,7 @@
name: "libother",
deps: ["libexample"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -251,7 +251,7 @@
{
name: "defaults_licenses invalid",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "top_defaults",
licenses: ["notice"],
@@ -262,7 +262,7 @@
{
name: "defaults_licenses overrides package default",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_applicable_licenses: ["by_exception_only"],
}
@@ -298,7 +298,7 @@
{
name: "package default_applicable_licenses must exist",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_applicable_licenses: ["notice"],
}`),
@@ -309,7 +309,7 @@
// This test relies on the default licenses being legacy_public.
name: "package default_applicable_licenses property used when no licenses specified",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_notice"],
}
@@ -320,7 +320,7 @@
mock_library {
name: "libexample",
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -338,7 +338,7 @@
{
name: "package default_applicable_licenses not inherited to subpackages",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_applicable_licenses: ["top_notice"],
}
@@ -348,7 +348,7 @@
mock_library {
name: "libexample",
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
package {
default_applicable_licenses: ["outsider"],
}
@@ -356,11 +356,11 @@
mock_library {
name: "libnested",
}`),
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
mock_library {
name: "libother",
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
license {
name: "outsider",
}
@@ -385,7 +385,7 @@
{
name: "verify that prebuilt dependencies are included",
fs: map[string][]byte{
- "prebuilts/Blueprints": []byte(`
+ "prebuilts/Android.bp": []byte(`
license {
name: "prebuilt"
}
@@ -394,7 +394,7 @@
licenses: ["prebuilt"],
}`),
"top/sources/source_file": nil,
- "top/sources/Blueprints": []byte(`
+ "top/sources/Android.bp": []byte(`
license {
name: "top_sources"
}
@@ -403,7 +403,7 @@
licenses: ["top_sources"],
}`),
"top/other/source_file": nil,
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
source {
name: "other",
deps: [":module"],
@@ -419,7 +419,7 @@
{
name: "verify that prebuilt dependencies are ignored for licenses reasons (preferred)",
fs: map[string][]byte{
- "prebuilts/Blueprints": []byte(`
+ "prebuilts/Android.bp": []byte(`
license {
name: "prebuilt"
}
@@ -429,7 +429,7 @@
prefer: true,
}`),
"top/sources/source_file": nil,
- "top/sources/Blueprints": []byte(`
+ "top/sources/Android.bp": []byte(`
license {
name: "top_sources"
}
@@ -438,7 +438,7 @@
licenses: ["top_sources"],
}`),
"top/other/source_file": nil,
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
source {
name: "other",
deps: [":module"],
diff --git a/android/module.go b/android/module.go
index b571f15..dd6a25a 100644
--- a/android/module.go
+++ b/android/module.go
@@ -405,6 +405,7 @@
PackageFile(installPath InstallPath, name string, srcPath Path) PackagingSpec
CheckbuildFile(srcPath Path)
+ TidyFile(srcPath Path)
InstallInData() bool
InstallInTestcases() bool
@@ -521,62 +522,6 @@
TransitivePackagingSpecs() []PackagingSpec
}
-// BazelTargetModule is a lightweight wrapper interface around Module for
-// bp2build conversion purposes.
-//
-// In bp2build's bootstrap.Main execution, Soong runs an alternate pipeline of
-// mutators that creates BazelTargetModules from regular Module objects,
-// performing the mapping from Soong properties to Bazel rule attributes in the
-// process. This process may optionally create additional BazelTargetModules,
-// resulting in a 1:many mapping.
-//
-// bp2build.Codegen is then responsible for visiting all modules in the graph,
-// filtering for BazelTargetModules, and code-generating BUILD targets from
-// them.
-type BazelTargetModule interface {
- Module
-
- bazelTargetModuleProperties() *bazel.BazelTargetModuleProperties
- SetBazelTargetModuleProperties(props bazel.BazelTargetModuleProperties)
-
- RuleClass() string
- BzlLoadLocation() string
-}
-
-// InitBazelTargetModule is a wrapper function that decorates BazelTargetModule
-// with property structs containing metadata for bp2build conversion.
-func InitBazelTargetModule(module BazelTargetModule) {
- module.AddProperties(module.bazelTargetModuleProperties())
- InitAndroidModule(module)
-}
-
-// BazelTargetModuleBase contains the property structs with metadata for
-// bp2build conversion.
-type BazelTargetModuleBase struct {
- ModuleBase
- Properties bazel.BazelTargetModuleProperties
-}
-
-// bazelTargetModuleProperties getter.
-func (btmb *BazelTargetModuleBase) bazelTargetModuleProperties() *bazel.BazelTargetModuleProperties {
- return &btmb.Properties
-}
-
-// SetBazelTargetModuleProperties setter for BazelTargetModuleProperties
-func (btmb *BazelTargetModuleBase) SetBazelTargetModuleProperties(props bazel.BazelTargetModuleProperties) {
- btmb.Properties = props
-}
-
-// RuleClass returns the rule class for this Bazel target
-func (b *BazelTargetModuleBase) RuleClass() string {
- return b.bazelTargetModuleProperties().Rule_class
-}
-
-// BzlLoadLocation returns the rule class for this Bazel target
-func (b *BazelTargetModuleBase) BzlLoadLocation() string {
- return b.bazelTargetModuleProperties().Bzl_load_location
-}
-
// Qualified id for a module
type qualifiedModuleName struct {
// The package (i.e. directory) in which the module is defined, without trailing /
@@ -986,10 +931,13 @@
// Device is built by default. Host and HostCross are not supported.
DeviceSupported = deviceSupported | deviceDefault
- // Device is built by default. Host and HostCross are supported.
+ // By default, _only_ device variant is built. Device variant can be disabled with `device_supported: false`
+ // Host and HostCross are disabled by default and can be enabled with `host_supported: true`
HostAndDeviceSupported = hostSupported | hostCrossSupported | deviceSupported | deviceDefault
// Host, HostCross, and Device are built by default.
+ // Building Device can be disabled with `device_supported: false`
+ // Building Host and HostCross can be disabled with `host_supported: false`
HostAndDeviceDefault = hostSupported | hostCrossSupported | hostDefault |
deviceSupported | deviceDefault
@@ -1187,6 +1135,7 @@
installFiles InstallPaths
installFilesDepSet *installPathsDepSet
checkbuildFiles Paths
+ tidyFiles Paths
packagingSpecs []PackagingSpec
packagingSpecsDepSet *packagingSpecsDepSet
noticeFiles Paths
@@ -1199,6 +1148,7 @@
// Only set on the final variant of each module
installTarget WritablePath
checkbuildTarget WritablePath
+ tidyTarget WritablePath
blueprintDir string
hooks hooks
@@ -1724,10 +1674,12 @@
func (m *ModuleBase) generateModuleTarget(ctx ModuleContext) {
var allInstalledFiles InstallPaths
var allCheckbuildFiles Paths
+ var allTidyFiles Paths
ctx.VisitAllModuleVariants(func(module Module) {
a := module.base()
allInstalledFiles = append(allInstalledFiles, a.installFiles...)
allCheckbuildFiles = append(allCheckbuildFiles, a.checkbuildFiles...)
+ allTidyFiles = append(allTidyFiles, a.tidyFiles...)
})
var deps Paths
@@ -1751,6 +1703,13 @@
deps = append(deps, m.checkbuildTarget)
}
+ if len(allTidyFiles) > 0 {
+ name := namespacePrefix + ctx.ModuleName() + "-tidy"
+ ctx.Phony(name, allTidyFiles...)
+ m.tidyTarget = PathForPhony(ctx, name)
+ deps = append(deps, m.tidyTarget)
+ }
+
if len(deps) > 0 {
suffix := ""
if ctx.Config().KatiEnabled() {
@@ -1959,6 +1918,7 @@
m.installFiles = append(m.installFiles, ctx.installFiles...)
m.checkbuildFiles = append(m.checkbuildFiles, ctx.checkbuildFiles...)
+ m.tidyFiles = append(m.tidyFiles, ctx.tidyFiles...)
m.packagingSpecs = append(m.packagingSpecs, ctx.packagingSpecs...)
for k, v := range ctx.phonies {
m.phonies[k] = append(m.phonies[k], v...)
@@ -2157,6 +2117,7 @@
packagingSpecs []PackagingSpec
installFiles InstallPaths
checkbuildFiles Paths
+ tidyFiles Paths
module Module
phonies map[string]Paths
@@ -2889,6 +2850,10 @@
m.checkbuildFiles = append(m.checkbuildFiles, srcPath)
}
+func (m *moduleContext) TidyFile(srcPath Path) {
+ m.tidyFiles = append(m.tidyFiles, srcPath)
+}
+
func (m *moduleContext) blueprintModuleContext() blueprint.ModuleContext {
return m.bp
}
@@ -3147,19 +3112,49 @@
type buildTargetSingleton struct{}
+func addAncestors(ctx SingletonContext, dirMap map[string]Paths, mmName func(string) string) []string {
+ // Ensure ancestor directories are in dirMap
+ // Make directories build their direct subdirectories
+ dirs := SortedStringKeys(dirMap)
+ for _, dir := range dirs {
+ dir := parentDir(dir)
+ for dir != "." && dir != "/" {
+ if _, exists := dirMap[dir]; exists {
+ break
+ }
+ dirMap[dir] = nil
+ dir = parentDir(dir)
+ }
+ }
+ dirs = SortedStringKeys(dirMap)
+ for _, dir := range dirs {
+ p := parentDir(dir)
+ if p != "." && p != "/" {
+ dirMap[p] = append(dirMap[p], PathForPhony(ctx, mmName(dir)))
+ }
+ }
+ return SortedStringKeys(dirMap)
+}
+
func (c *buildTargetSingleton) GenerateBuildActions(ctx SingletonContext) {
var checkbuildDeps Paths
+ var tidyDeps Paths
mmTarget := func(dir string) string {
return "MODULES-IN-" + strings.Replace(filepath.Clean(dir), "/", "-", -1)
}
+ mmTidyTarget := func(dir string) string {
+ return "tidy-" + strings.Replace(filepath.Clean(dir), "/", "-", -1)
+ }
modulesInDir := make(map[string]Paths)
+ tidyModulesInDir := make(map[string]Paths)
ctx.VisitAllModules(func(module Module) {
blueprintDir := module.base().blueprintDir
installTarget := module.base().installTarget
checkbuildTarget := module.base().checkbuildTarget
+ tidyTarget := module.base().tidyTarget
if checkbuildTarget != nil {
checkbuildDeps = append(checkbuildDeps, checkbuildTarget)
@@ -3169,6 +3164,16 @@
if installTarget != nil {
modulesInDir[blueprintDir] = append(modulesInDir[blueprintDir], installTarget)
}
+
+ if tidyTarget != nil {
+ tidyDeps = append(tidyDeps, tidyTarget)
+ // tidyTarget is in modulesInDir so it will be built with "mm".
+ modulesInDir[blueprintDir] = append(modulesInDir[blueprintDir], tidyTarget)
+ // tidyModulesInDir contains tidyTarget but not checkbuildTarget
+ // or installTarget, so tidy targets in a directory can be built
+ // without other checkbuild or install targets.
+ tidyModulesInDir[blueprintDir] = append(tidyModulesInDir[blueprintDir], tidyTarget)
+ }
})
suffix := ""
@@ -3179,31 +3184,24 @@
// Create a top-level checkbuild target that depends on all modules
ctx.Phony("checkbuild"+suffix, checkbuildDeps...)
+ // Create a top-level tidy target that depends on all modules
+ ctx.Phony("tidy"+suffix, tidyDeps...)
+
+ dirs := addAncestors(ctx, tidyModulesInDir, mmTidyTarget)
+
+ // Kati does not generate tidy-* phony targets yet.
+ // Create a tidy-<directory> target that depends on all subdirectories
+ // and modules in the directory.
+ for _, dir := range dirs {
+ ctx.Phony(mmTidyTarget(dir), tidyModulesInDir[dir]...)
+ }
+
// Make will generate the MODULES-IN-* targets
if ctx.Config().KatiEnabled() {
return
}
- // Ensure ancestor directories are in modulesInDir
- dirs := SortedStringKeys(modulesInDir)
- for _, dir := range dirs {
- dir := parentDir(dir)
- for dir != "." && dir != "/" {
- if _, exists := modulesInDir[dir]; exists {
- break
- }
- modulesInDir[dir] = nil
- dir = parentDir(dir)
- }
- }
-
- // Make directories build their direct subdirectories
- for _, dir := range dirs {
- p := parentDir(dir)
- if p != "." && p != "/" {
- modulesInDir[p] = append(modulesInDir[p], PathForPhony(ctx, mmTarget(dir)))
- }
- }
+ dirs = addAncestors(ctx, modulesInDir, mmTarget)
// Create a MODULES-IN-<directory> target that depends on all modules in a directory, and
// depends on the MODULES-IN-* targets of all of its subdirectories that contain Android.bp
diff --git a/android/override_module.go b/android/override_module.go
index e72cb78..51e74d4 100644
--- a/android/override_module.go
+++ b/android/override_module.go
@@ -295,7 +295,7 @@
}
func overridableModuleDepsMutator(ctx BottomUpMutatorContext) {
- if b, ok := ctx.Module().(OverridableModule); ok {
+ if b, ok := ctx.Module().(OverridableModule); ok && b.Enabled() {
b.OverridablePropertiesDepsMutator(ctx)
}
}
diff --git a/android/package_test.go b/android/package_test.go
index 3bd30cc..7ea10a4 100644
--- a/android/package_test.go
+++ b/android/package_test.go
@@ -13,7 +13,7 @@
{
name: "package must not accept visibility and name properties",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
name: "package",
visibility: ["//visibility:private"],
@@ -21,21 +21,21 @@
}`),
},
expectedErrors: []string{
- `top/Blueprints:5:14: unrecognized property "licenses"`,
- `top/Blueprints:3:10: unrecognized property "name"`,
- `top/Blueprints:4:16: unrecognized property "visibility"`,
+ `top/Android.bp:5:14: unrecognized property "licenses"`,
+ `top/Android.bp:3:10: unrecognized property "name"`,
+ `top/Android.bp:4:16: unrecognized property "visibility"`,
},
},
{
name: "multiple packages in separate directories",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
package {
}`),
- "other/nested/Blueprints": []byte(`
+ "other/nested/Android.bp": []byte(`
package {
}`),
},
@@ -43,7 +43,7 @@
{
name: "package must not be specified more than once per package",
fs: map[string][]byte{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:private"],
default_applicable_licenses: ["license"],
diff --git a/android/paths.go b/android/paths.go
index 71caaab..763cd7c 100644
--- a/android/paths.go
+++ b/android/paths.go
@@ -186,13 +186,13 @@
// A standard build has the following structure:
// ../top/
// out/ - make install files go here.
- // out/soong - this is the buildDir passed to NewTestConfig()
+ // out/soong - this is the soongOutDir passed to NewTestConfig()
// ... - the source files
//
// This function converts a path so that it appears relative to the ../top/ directory, i.e.
- // * Make install paths, which have the pattern "buildDir/../<path>" are converted into the top
+ // * Make install paths, which have the pattern "soongOutDir/../<path>" are converted into the top
// relative path "out/<path>"
- // * Soong install paths and other writable paths, which have the pattern "buildDir/<path>" are
+ // * Soong install paths and other writable paths, which have the pattern "soongOutDir/<path>" are
// converted into the top relative path "out/soong/<path>".
// * Source paths are already relative to the top.
// * Phony paths are not relative to anything.
@@ -211,7 +211,7 @@
Path
// return the path to the build directory.
- getBuildDir() string
+ getSoongOutDir() string
// the writablePath method doesn't directly do anything,
// but it allows a struct to distinguish between whether or not it implements the WritablePath interface
@@ -992,7 +992,7 @@
}
// absolute path already checked by validateSafePath
- if strings.HasPrefix(ret.String(), ctx.Config().buildDir) {
+ if strings.HasPrefix(ret.String(), ctx.Config().soongOutDir) {
return ret, fmt.Errorf("source path %q is in output", ret.String())
}
@@ -1008,7 +1008,7 @@
}
// absolute path already checked by validatePath
- if strings.HasPrefix(ret.String(), ctx.Config().buildDir) {
+ if strings.HasPrefix(ret.String(), ctx.Config().soongOutDir) {
return ret, fmt.Errorf("source path %q is in output", ret.String())
}
@@ -1150,8 +1150,8 @@
type OutputPath struct {
basePath
- // The soong build directory, i.e. Config.BuildDir()
- buildDir string
+ // The soong build directory, i.e. Config.SoongOutDir()
+ soongOutDir string
fullPath string
}
@@ -1167,8 +1167,8 @@
return p
}
-func (p OutputPath) getBuildDir() string {
- return p.buildDir
+func (p OutputPath) getSoongOutDir() string {
+ return p.soongOutDir
}
func (p OutputPath) RelativeToTop() Path {
@@ -1176,8 +1176,8 @@
}
func (p OutputPath) outputPathRelativeToTop() OutputPath {
- p.fullPath = StringPathRelativeToTop(p.buildDir, p.fullPath)
- p.buildDir = OutSoongDir
+ p.fullPath = StringPathRelativeToTop(p.soongOutDir, p.fullPath)
+ p.soongOutDir = OutSoongDir
return p
}
@@ -1218,12 +1218,12 @@
if err != nil {
reportPathError(ctx, err)
}
- fullPath := filepath.Join(ctx.Config().buildDir, path)
+ fullPath := filepath.Join(ctx.Config().soongOutDir, path)
path = fullPath[len(fullPath)-len(path):]
- return OutputPath{basePath{path, ""}, ctx.Config().buildDir, fullPath}
+ return OutputPath{basePath{path, ""}, ctx.Config().soongOutDir, fullPath}
}
-// PathsForOutput returns Paths rooted from buildDir
+// PathsForOutput returns Paths rooted from soongOutDir
func PathsForOutput(ctx PathContext, paths []string) WritablePaths {
ret := make(WritablePaths, len(paths))
for i, path := range paths {
@@ -1544,8 +1544,8 @@
type InstallPath struct {
basePath
- // The soong build directory, i.e. Config.BuildDir()
- buildDir string
+ // The soong build directory, i.e. Config.SoongOutDir()
+ soongOutDir string
// partitionDir is the part of the InstallPath that is automatically determined according to the context.
// For example, it is host/<os>-<arch> for host modules, and target/product/<device>/<partition> for device modules.
@@ -1565,12 +1565,12 @@
func (p InstallPath) RelativeToTop() Path {
ensureTestOnly()
- p.buildDir = OutSoongDir
+ p.soongOutDir = OutSoongDir
return p
}
-func (p InstallPath) getBuildDir() string {
- return p.buildDir
+func (p InstallPath) getSoongOutDir() string {
+ return p.soongOutDir
}
func (p InstallPath) ReplaceExtension(ctx PathContext, ext string) OutputPath {
@@ -1585,9 +1585,9 @@
func (p InstallPath) String() string {
if p.makePath {
// Make path starts with out/ instead of out/soong.
- return filepath.Join(p.buildDir, "../", p.path)
+ return filepath.Join(p.soongOutDir, "../", p.path)
} else {
- return filepath.Join(p.buildDir, p.path)
+ return filepath.Join(p.soongOutDir, p.path)
}
}
@@ -1596,9 +1596,9 @@
// The ./soong is dropped if the install path is for Make.
func (p InstallPath) PartitionDir() string {
if p.makePath {
- return filepath.Join(p.buildDir, "../", p.partitionDir)
+ return filepath.Join(p.soongOutDir, "../", p.partitionDir)
} else {
- return filepath.Join(p.buildDir, p.partitionDir)
+ return filepath.Join(p.soongOutDir, p.partitionDir)
}
}
@@ -1694,7 +1694,7 @@
base := InstallPath{
basePath: basePath{partionPath, ""},
- buildDir: ctx.Config().buildDir,
+ soongOutDir: ctx.Config().soongOutDir,
partitionDir: partionPath,
makePath: false,
}
@@ -1705,7 +1705,7 @@
func pathForNdkOrSdkInstall(ctx PathContext, prefix string, paths []string) InstallPath {
base := InstallPath{
basePath: basePath{prefix, ""},
- buildDir: ctx.Config().buildDir,
+ soongOutDir: ctx.Config().soongOutDir,
partitionDir: prefix,
makePath: false,
}
@@ -1851,7 +1851,7 @@
func (p PhonyPath) writablePath() {}
-func (p PhonyPath) getBuildDir() string {
+func (p PhonyPath) getSoongOutDir() string {
// A phone path cannot contain any / so cannot be relative to the build directory.
return ""
}
diff --git a/android/queryview.go b/android/queryview.go
deleted file mode 100644
index 224652e..0000000
--- a/android/queryview.go
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2020 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package android
-
-import (
- "fmt"
- "os"
- "strings"
-
- "github.com/google/blueprint"
-)
-
-// The Bazel QueryView singleton is responsible for generating the Ninja actions
-// for calling the soong_build primary builder in the main build.ninja file.
-func init() {
- RegisterSingletonType("bazel_queryview", BazelQueryViewSingleton)
-}
-
-// BazelQueryViewSingleton is the singleton responsible for registering the
-// soong_build build statement that will convert the Soong module graph after
-// applying *all* mutators, enabing the feature to query the final state of the
-// Soong graph. This mode is meant for querying the build graph state, and not meant
-// for generating BUILD files to be checked in.
-func BazelQueryViewSingleton() Singleton {
- return &bazelQueryViewSingleton{}
-}
-
-// BazelConverterSingleton is the singleton responsible for registering the soong_build
-// build statement that will convert the Soong module graph by applying an alternate
-// pipeline of mutators, with the goal of reaching semantic equivalence between the original
-// Blueprint and final BUILD files. Using this mode, the goal is to be able to
-// build with these BUILD files directly in the source tree.
-func BazelConverterSingleton() Singleton {
- return &bazelConverterSingleton{}
-}
-
-type bazelQueryViewSingleton struct{}
-type bazelConverterSingleton struct{}
-
-func generateBuildActionsForBazelConversion(ctx SingletonContext, converterMode bool) {
- name := "queryview"
- descriptionTemplate := "[EXPERIMENTAL, PRE-PRODUCTION] Creating the Bazel QueryView workspace with %s at $outDir"
-
- // Create a build and rule statement, using the Bazel QueryView's WORKSPACE
- // file as the output file marker.
- var deps Paths
- moduleListFilePath := pathForBuildToolDep(ctx, ctx.Config().moduleListFile)
- deps = append(deps, moduleListFilePath)
- deps = append(deps, pathForBuildToolDep(ctx, ctx.Config().ProductVariablesFileName))
-
- bazelQueryViewDirectory := PathForOutput(ctx, name)
- bazelQueryViewWorkspaceFile := bazelQueryViewDirectory.Join(ctx, "WORKSPACE")
- primaryBuilder := primaryBuilderPath(ctx)
- bazelQueryView := ctx.Rule(pctx, "bazelQueryView",
- blueprint.RuleParams{
- Command: fmt.Sprintf(
- `rm -rf "${outDir}/"* && `+
- `mkdir -p "${outDir}" && `+
- `echo WORKSPACE: $$(cat "%s") > "${outDir}/.queryview-depfile.d" && `+
- `BUILDER="%s" && `+
- `echo BUILDER=$$BUILDER && `+
- `cd "$$(dirname "$$BUILDER")" && `+
- `echo PWD=$$PWD && `+
- `ABSBUILDER="$$PWD/$$(basename "$$BUILDER")" && `+
- `echo ABSBUILDER=$$ABSBUILDER && `+
- `cd / && `+
- `env -i "$$ABSBUILDER" --bazel_queryview_dir "${outDir}" "%s"`,
- moduleListFilePath.String(), // Use the contents of Android.bp.list as the depfile.
- primaryBuilder.String(),
- strings.Join(os.Args[1:], "\" \""),
- ),
- CommandDeps: []string{primaryBuilder.String()},
- Description: fmt.Sprintf(
- descriptionTemplate,
- primaryBuilder.Base()),
- Deps: blueprint.DepsGCC,
- Depfile: "${outDir}/.queryview-depfile.d",
- },
- "outDir")
-
- ctx.Build(pctx, BuildParams{
- Rule: bazelQueryView,
- Output: bazelQueryViewWorkspaceFile,
- Inputs: deps,
- Args: map[string]string{
- "outDir": bazelQueryViewDirectory.String(),
- },
- })
-
- // Add a phony target for generating the workspace
- ctx.Phony(name, bazelQueryViewWorkspaceFile)
-}
-
-func (c *bazelQueryViewSingleton) GenerateBuildActions(ctx SingletonContext) {
- generateBuildActionsForBazelConversion(ctx, false)
-}
-
-func (c *bazelConverterSingleton) GenerateBuildActions(ctx SingletonContext) {
- generateBuildActionsForBazelConversion(ctx, true)
-}
diff --git a/android/sdk.go b/android/sdk.go
index da740f3..b8f76c1 100644
--- a/android/sdk.go
+++ b/android/sdk.go
@@ -401,26 +401,26 @@
ExportMember() bool
}
-var _ SdkMemberTypeDependencyTag = (*sdkMemberDependencyTag)(nil)
-var _ ReplaceSourceWithPrebuilt = (*sdkMemberDependencyTag)(nil)
+var _ SdkMemberTypeDependencyTag = (*sdkMemberTypeDependencyTag)(nil)
+var _ ReplaceSourceWithPrebuilt = (*sdkMemberTypeDependencyTag)(nil)
-type sdkMemberDependencyTag struct {
+type sdkMemberTypeDependencyTag struct {
blueprint.BaseDependencyTag
memberType SdkMemberType
export bool
}
-func (t *sdkMemberDependencyTag) SdkMemberType(_ Module) SdkMemberType {
+func (t *sdkMemberTypeDependencyTag) SdkMemberType(_ Module) SdkMemberType {
return t.memberType
}
-func (t *sdkMemberDependencyTag) ExportMember() bool {
+func (t *sdkMemberTypeDependencyTag) ExportMember() bool {
return t.export
}
// Prevent dependencies from the sdk/module_exports onto their members from being
// replaced with a preferred prebuilt.
-func (t *sdkMemberDependencyTag) ReplaceSourceWithPrebuilt() bool {
+func (t *sdkMemberTypeDependencyTag) ReplaceSourceWithPrebuilt() bool {
return false
}
@@ -428,7 +428,7 @@
// dependencies added by the tag to be added to the sdk as the specified SdkMemberType and exported
// (or not) as specified by the export parameter.
func DependencyTagForSdkMemberType(memberType SdkMemberType, export bool) SdkMemberTypeDependencyTag {
- return &sdkMemberDependencyTag{memberType: memberType, export: export}
+ return &sdkMemberTypeDependencyTag{memberType: memberType, export: export}
}
// Interface that must be implemented for every type that can be a member of an
@@ -475,7 +475,7 @@
// properties. The dependencies must be added with the supplied tag.
//
// The BottomUpMutatorContext provided is for the SDK module.
- AddDependencies(mctx BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string)
+ AddDependencies(ctx SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string)
// Return true if the supplied module is an instance of this member type.
//
@@ -529,6 +529,12 @@
CreateVariantPropertiesStruct() SdkMemberProperties
}
+// SdkDependencyContext provides access to information needed by the SdkMemberType.AddDependencies()
+// implementations.
+type SdkDependencyContext interface {
+ BottomUpMutatorContext
+}
+
// Base type for SdkMemberType implementations.
type SdkMemberTypeBase struct {
PropertyName string
@@ -570,9 +576,6 @@
type SdkMemberTypesRegistry struct {
// The list of types sorted by property name.
list []SdkMemberType
-
- // The key that uniquely identifies this registry instance.
- key OnceKey
}
func (r *SdkMemberTypesRegistry) copyAndAppend(memberType SdkMemberType) *SdkMemberTypesRegistry {
@@ -592,18 +595,9 @@
return t1.SdkPropertyName() < t2.SdkPropertyName()
})
- // Generate a key that identifies the slice of SdkMemberTypes by joining the property names
- // from all the SdkMemberType .
- var properties []string
- for _, t := range list {
- properties = append(properties, t.SdkPropertyName())
- }
- key := NewOnceKey(strings.Join(properties, "|"))
-
// Create a new registry so the pointer uniquely identifies the set of registered types.
return &SdkMemberTypesRegistry{
list: list,
- key: key,
}
}
@@ -616,8 +610,10 @@
return NewCustomOnceKey(r)
}
-// The set of registered SdkMemberTypes, one for sdk module and one for module_exports.
+// The set of registered SdkMemberTypes for module_exports modules.
var ModuleExportsMemberTypes = &SdkMemberTypesRegistry{}
+
+// The set of registered SdkMemberTypes for sdk modules.
var SdkMemberTypes = &SdkMemberTypesRegistry{}
// Register an SdkMemberType object to allow them to be used in the sdk and sdk_snapshot module
diff --git a/android/singleton.go b/android/singleton.go
index bb6614d..7ff96c9 100644
--- a/android/singleton.go
+++ b/android/singleton.go
@@ -54,10 +54,10 @@
RequireNinjaVersion(major, minor, micro int)
- // SetNinjaBuildDir sets the value of the top-level "builddir" Ninja variable
+ // SetOutDir sets the value of the top-level "builddir" Ninja variable
// that controls where Ninja stores its build log files. This value can be
// set at most one time for a single build, later calls are ignored.
- SetNinjaBuildDir(pctx PackageContext, value string)
+ SetOutDir(pctx PackageContext, value string)
// Eval takes a string with embedded ninja variables, and returns a string
// with all of the variables recursively expanded. Any variables references
@@ -180,8 +180,8 @@
addPhony(s.Config(), name, deps...)
}
-func (s *singletonContextAdaptor) SetNinjaBuildDir(pctx PackageContext, value string) {
- s.SingletonContext.SetNinjaBuildDir(pctx.PackageContext, value)
+func (s *singletonContextAdaptor) SetOutDir(pctx PackageContext, value string) {
+ s.SingletonContext.SetOutDir(pctx.PackageContext, value)
}
func (s *singletonContextAdaptor) Eval(pctx PackageContext, ninjaStr string) (string, error) {
diff --git a/android/soong_config_modules.go b/android/soong_config_modules.go
index 289e910..17f6d66 100644
--- a/android/soong_config_modules.go
+++ b/android/soong_config_modules.go
@@ -122,15 +122,10 @@
// }
//
// If an acme BoardConfig.mk file contained:
-//
-// SOONG_CONFIG_NAMESPACES += acme
-// SOONG_CONFIG_acme += \
-// board \
-// feature \
-//
-// SOONG_CONFIG_acme_board := soc_a
-// SOONG_CONFIG_acme_feature := true
-// SOONG_CONFIG_acme_width := 200
+// $(call add_sonng_config_namespace, acme)
+// $(call add_soong_config_var_value, acme, board, soc_a)
+// $(call add_soong_config_var_value, acme, feature, true)
+// $(call add_soong_config_var_value, acme, width, 200)
//
// Then libacme_foo would build with cflags "-DGENERIC -DSOC_A -DFEATURE -DWIDTH=200".
//
diff --git a/android/test_asserts.go b/android/test_asserts.go
index edeb408..064f656 100644
--- a/android/test_asserts.go
+++ b/android/test_asserts.go
@@ -77,14 +77,14 @@
// StringPathRelativeToTop on the actual string path.
func AssertStringPathRelativeToTopEquals(t *testing.T, message string, config Config, expected string, actual string) {
t.Helper()
- AssertStringEquals(t, message, expected, StringPathRelativeToTop(config.buildDir, actual))
+ AssertStringEquals(t, message, expected, StringPathRelativeToTop(config.soongOutDir, actual))
}
// AssertStringPathsRelativeToTopEquals checks if the expected value is equal to the result of
// calling StringPathsRelativeToTop on the actual string paths.
func AssertStringPathsRelativeToTopEquals(t *testing.T, message string, config Config, expected []string, actual []string) {
t.Helper()
- AssertDeepEquals(t, message, expected, StringPathsRelativeToTop(config.buildDir, actual))
+ AssertDeepEquals(t, message, expected, StringPathsRelativeToTop(config.soongOutDir, actual))
}
// AssertErrorMessageEquals checks if the error is not nil and has the expected message. If it does
diff --git a/android/testing.go b/android/testing.go
index 6ba8e3c..e25e5c5 100644
--- a/android/testing.go
+++ b/android/testing.go
@@ -664,15 +664,15 @@
// containing at most one instance of the temporary build directory at the start of the path while
// this assumes that there can be any number at any position.
func normalizeStringRelativeToTop(config Config, s string) string {
- // The buildDir usually looks something like: /tmp/testFoo2345/001
+ // The soongOutDir usually looks something like: /tmp/testFoo2345/001
//
- // Replace any usage of the buildDir with out/soong, e.g. replace "/tmp/testFoo2345/001" with
+ // Replace any usage of the soongOutDir with out/soong, e.g. replace "/tmp/testFoo2345/001" with
// "out/soong".
- outSoongDir := filepath.Clean(config.buildDir)
+ outSoongDir := filepath.Clean(config.soongOutDir)
re := regexp.MustCompile(`\Q` + outSoongDir + `\E\b`)
s = re.ReplaceAllString(s, "out/soong")
- // Replace any usage of the buildDir/.. with out, e.g. replace "/tmp/testFoo2345" with
+ // Replace any usage of the soongOutDir/.. with out, e.g. replace "/tmp/testFoo2345" with
// "out". This must come after the previous replacement otherwise this would replace
// "/tmp/testFoo2345/001" with "out/001" instead of "out/soong".
outDir := filepath.Dir(outSoongDir)
@@ -991,7 +991,7 @@
}
p := path.String()
if w, ok := path.(WritablePath); ok {
- rel, err := filepath.Rel(w.getBuildDir(), p)
+ rel, err := filepath.Rel(w.getSoongOutDir(), p)
if err != nil {
panic(err)
}
diff --git a/android/variable.go b/android/variable.go
index 5cf9aa8..a1af527 100644
--- a/android/variable.go
+++ b/android/variable.go
@@ -46,6 +46,10 @@
Java_resource_dirs []string
}
+ Platform_sdk_extension_version struct {
+ Cmd *string
+ }
+
// unbundled_build is a catch-all property to annotate modules that don't build in one or
// more unbundled branches, usually due to dependencies missing from the manifest.
Unbundled_build struct {
@@ -108,6 +112,8 @@
Static_libs []string
Whole_static_libs []string
Shared_libs []string
+
+ Cmdline []string
}
// eng is true for -eng builds, and can be used to turn on additionaly heavyweight debugging
@@ -170,6 +176,7 @@
Platform_sdk_codename *string `json:",omitempty"`
Platform_sdk_version_or_codename *string `json:",omitempty"`
Platform_sdk_final *bool `json:",omitempty"`
+ Platform_sdk_extension_version *int `json:",omitempty"`
Platform_version_active_codenames []string `json:",omitempty"`
Platform_vndk_version *string `json:",omitempty"`
Platform_systemsdk_versions []string `json:",omitempty"`
diff --git a/android/visibility_test.go b/android/visibility_test.go
index ffd7909..714c92a 100644
--- a/android/visibility_test.go
+++ b/android/visibility_test.go
@@ -16,7 +16,7 @@
{
name: "invalid visibility: empty list",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: [],
@@ -27,7 +27,7 @@
{
name: "invalid visibility: empty rule",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: [""],
@@ -38,7 +38,7 @@
{
name: "invalid visibility: unqualified",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["target"],
@@ -49,7 +49,7 @@
{
name: "invalid visibility: empty namespace",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//"],
@@ -60,7 +60,7 @@
{
name: "invalid visibility: empty module",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: [":"],
@@ -71,7 +71,7 @@
{
name: "invalid visibility: empty namespace and module",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//:"],
@@ -82,7 +82,7 @@
{
name: "//visibility:unknown",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//visibility:unknown"],
@@ -93,7 +93,7 @@
{
name: "//visibility:xxx mixed",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//visibility:public", "//namespace"],
@@ -114,7 +114,7 @@
{
name: "//visibility:legacy_public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//visibility:legacy_public"],
@@ -130,7 +130,7 @@
// the current directory, a nested directory and a directory in a separate tree.
name: "//visibility:public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//visibility:public"],
@@ -140,12 +140,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -157,7 +157,7 @@
// directory only.
name: "//visibility:private",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//visibility:private"],
@@ -167,12 +167,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -189,7 +189,7 @@
// Verify that :__pkg__ allows the module to be referenced from the current directory only.
name: ":__pkg__",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: [":__pkg__"],
@@ -199,12 +199,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -222,7 +222,7 @@
// the top/nested directory only, not a subdirectory of top/nested and not peak directory.
name: "//top/nested",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//top/nested"],
@@ -232,17 +232,17 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "top/nested/again/Blueprints": []byte(`
+ "top/nested/again/Android.bp": []byte(`
mock_library {
name: "libnestedagain",
deps: ["libexample"],
}`),
- "peak/Blueprints": []byte(`
+ "peak/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -260,7 +260,7 @@
// and sub directories but nowhere else.
name: ":__subpackages__",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: [":__subpackages__"],
@@ -270,12 +270,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "peak/other/Blueprints": []byte(`
+ "peak/other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -291,7 +291,7 @@
// directory and sub directories but nowhere else.
name: "//top/nested:__subpackages__",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//top/nested:__subpackages__", "//other"],
@@ -301,12 +301,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -322,7 +322,7 @@
// the current directory, top/nested and peak and all its subpackages.
name: `["//top/nested", "//peak:__subpackages__"]`,
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//top/nested", "//peak:__subpackages__"],
@@ -332,12 +332,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "peak/other/Blueprints": []byte(`
+ "peak/other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -348,7 +348,7 @@
// Verify that //vendor... cannot be used outside vendor apart from //vendor:__subpackages__
name: `//vendor`,
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//vendor:__subpackages__"],
@@ -358,13 +358,13 @@
name: "libsamepackage",
visibility: ["//vendor/apps/AcmeSettings"],
}`),
- "vendor/Blueprints": []byte(`
+ "vendor/Android.bp": []byte(`
mock_library {
name: "libvendorexample",
deps: ["libexample"],
visibility: ["//vendor/nested"],
}`),
- "vendor/nested/Blueprints": []byte(`
+ "vendor/nested/Android.bp": []byte(`
mock_library {
name: "libvendornested",
deps: ["libexample", "libvendorexample"],
@@ -382,7 +382,7 @@
// Check that visibility is the union of the defaults modules.
name: "defaults union, basic",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//other"],
@@ -396,17 +396,17 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -420,7 +420,7 @@
{
name: "defaults union, multiple defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults_1",
visibility: ["//other"],
@@ -437,17 +437,17 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -461,7 +461,7 @@
{
name: "//visibility:public mixed with other in defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:public", "//namespace"],
@@ -479,7 +479,7 @@
{
name: "//visibility:public overriding defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//namespace"],
@@ -489,7 +489,7 @@
visibility: ["//visibility:public"],
defaults: ["libexample_defaults"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -502,7 +502,7 @@
{
name: "//visibility:public mixed with other from different defaults 1",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults_1",
visibility: ["//namespace"],
@@ -515,7 +515,7 @@
name: "libexample",
defaults: ["libexample_defaults_1", "libexample_defaults_2"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -525,7 +525,7 @@
{
name: "//visibility:public mixed with other from different defaults 2",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults_1",
visibility: ["//visibility:public"],
@@ -538,7 +538,7 @@
name: "libexample",
defaults: ["libexample_defaults_1", "libexample_defaults_2"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -548,7 +548,7 @@
{
name: "//visibility:private in defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private"],
@@ -561,12 +561,12 @@
name: "libsamepackage",
deps: ["libexample"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -582,7 +582,7 @@
{
name: "//visibility:private mixed with other in defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private", "//namespace"],
@@ -600,7 +600,7 @@
{
name: "//visibility:private overriding defaults",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//namespace"],
@@ -619,7 +619,7 @@
{
name: "//visibility:private in defaults overridden",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private"],
@@ -638,7 +638,7 @@
{
name: "//visibility:private override //visibility:public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:public"],
@@ -656,7 +656,7 @@
{
name: "//visibility:public override //visibility:private",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private"],
@@ -674,7 +674,7 @@
{
name: "//visibility:override must be first in the list",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_library {
name: "libexample",
visibility: ["//other", "//visibility:override", "//namespace"],
@@ -687,7 +687,7 @@
{
name: "//visibility:override discards //visibility:private",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private"],
@@ -698,7 +698,7 @@
visibility: ["//visibility:override", "//other"],
defaults: ["libexample_defaults"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
@@ -708,7 +708,7 @@
{
name: "//visibility:override discards //visibility:public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:public"],
@@ -719,12 +719,12 @@
visibility: ["//visibility:override", "//other"],
defaults: ["libexample_defaults"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
}`),
- "namespace/Blueprints": []byte(`
+ "namespace/Android.bp": []byte(`
mock_library {
name: "libnamespace",
deps: ["libexample"],
@@ -737,7 +737,7 @@
{
name: "//visibility:override discards defaults supplied rules",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//namespace"],
@@ -748,12 +748,12 @@
visibility: ["//visibility:override", "//other"],
defaults: ["libexample_defaults"],
}`),
- "other/Blueprints": []byte(`
+ "other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libexample"],
}`),
- "namespace/Blueprints": []byte(`
+ "namespace/Android.bp": []byte(`
mock_library {
name: "libnamespace",
deps: ["libexample"],
@@ -766,7 +766,7 @@
{
name: "//visibility:override can override //visibility:public with //visibility:private",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:public"],
@@ -776,7 +776,7 @@
visibility: ["//visibility:override", "//visibility:private"],
defaults: ["libexample_defaults"],
}`),
- "namespace/Blueprints": []byte(`
+ "namespace/Android.bp": []byte(`
mock_library {
name: "libnamespace",
deps: ["libexample"],
@@ -789,7 +789,7 @@
{
name: "//visibility:override can override //visibility:private with //visibility:public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults",
visibility: ["//visibility:private"],
@@ -799,7 +799,7 @@
visibility: ["//visibility:override", "//visibility:public"],
defaults: ["libexample_defaults"],
}`),
- "namespace/Blueprints": []byte(`
+ "namespace/Android.bp": []byte(`
mock_library {
name: "libnamespace",
deps: ["libexample"],
@@ -809,7 +809,7 @@
{
name: "//visibility:private mixed with itself",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "libexample_defaults_1",
visibility: ["//visibility:private"],
@@ -823,7 +823,7 @@
visibility: ["//visibility:private"],
defaults: ["libexample_defaults_1", "libexample_defaults_2"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -839,7 +839,7 @@
{
name: "defaults_visibility invalid",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_defaults {
name: "top_defaults",
defaults_visibility: ["//visibility:invalid"],
@@ -852,7 +852,7 @@
{
name: "defaults_visibility overrides package default",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:private"],
}
@@ -860,7 +860,7 @@
name: "top_defaults",
defaults_visibility: ["//visibility:public"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
defaults: ["top_defaults"],
@@ -872,7 +872,7 @@
{
name: "package default_visibility property is checked",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:invalid"],
}`),
@@ -883,7 +883,7 @@
// This test relies on the default visibility being legacy_public.
name: "package default_visibility property used when no visibility specified",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:private"],
}
@@ -891,7 +891,7 @@
mock_library {
name: "libexample",
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -905,7 +905,7 @@
{
name: "package default_visibility public does not override visibility private",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:public"],
}
@@ -914,7 +914,7 @@
name: "libexample",
visibility: ["//visibility:private"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -928,7 +928,7 @@
{
name: "package default_visibility private does not override visibility public",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:private"],
}
@@ -937,7 +937,7 @@
name: "libexample",
visibility: ["//visibility:public"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -947,7 +947,7 @@
{
name: "package default_visibility :__subpackages__",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: [":__subpackages__"],
}
@@ -955,12 +955,12 @@
mock_library {
name: "libexample",
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample"],
@@ -974,7 +974,7 @@
{
name: "package default_visibility inherited to subpackages",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//outsider"],
}
@@ -983,12 +983,12 @@
name: "libexample",
visibility: [":__subpackages__"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libexample"],
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libexample", "libnested"],
@@ -1002,11 +1002,11 @@
{
name: "package default_visibility inherited to subpackages",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
package {
default_visibility: ["//visibility:private"],
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
package {
default_visibility: ["//outsider"],
}
@@ -1014,11 +1014,11 @@
mock_library {
name: "libnested",
}`),
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
mock_library {
name: "libother",
}`),
- "outsider/Blueprints": []byte(`
+ "outsider/Android.bp": []byte(`
mock_library {
name: "liboutsider",
deps: ["libother", "libnested"],
@@ -1032,19 +1032,19 @@
{
name: "verify that prebuilt dependencies are ignored for visibility reasons (not preferred)",
fs: MockFS{
- "prebuilts/Blueprints": []byte(`
+ "prebuilts/Android.bp": []byte(`
prebuilt {
name: "module",
visibility: ["//top/other"],
}`),
"top/sources/source_file": nil,
- "top/sources/Blueprints": []byte(`
+ "top/sources/Android.bp": []byte(`
source {
name: "module",
visibility: ["//top/other"],
}`),
"top/other/source_file": nil,
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
source {
name: "other",
deps: [":module"],
@@ -1054,20 +1054,20 @@
{
name: "verify that prebuilt dependencies are ignored for visibility reasons (preferred)",
fs: MockFS{
- "prebuilts/Blueprints": []byte(`
+ "prebuilts/Android.bp": []byte(`
prebuilt {
name: "module",
visibility: ["//top/other"],
prefer: true,
}`),
"top/sources/source_file": nil,
- "top/sources/Blueprints": []byte(`
+ "top/sources/Android.bp": []byte(`
source {
name: "module",
visibility: ["//top/other"],
}`),
"top/other/source_file": nil,
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
source {
name: "other",
deps: [":module"],
@@ -1077,7 +1077,7 @@
{
name: "ensure visibility properties are checked for correctness",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_parent {
name: "parent",
visibility: ["//top/nested"],
@@ -1094,7 +1094,7 @@
{
name: "invalid visibility added to child detected during gather phase",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_parent {
name: "parent",
visibility: ["//top/nested"],
@@ -1116,7 +1116,7 @@
{
name: "automatic visibility inheritance enabled",
fs: MockFS{
- "top/Blueprints": []byte(`
+ "top/Android.bp": []byte(`
mock_parent {
name: "parent",
visibility: ["//top/nested"],
@@ -1125,12 +1125,12 @@
visibility: ["//top/other"],
},
}`),
- "top/nested/Blueprints": []byte(`
+ "top/nested/Android.bp": []byte(`
mock_library {
name: "libnested",
deps: ["libchild"],
}`),
- "top/other/Blueprints": []byte(`
+ "top/other/Android.bp": []byte(`
mock_library {
name: "libother",
deps: ["libchild"],
diff --git a/android/writedocs.go b/android/writedocs.go
deleted file mode 100644
index 67b9aa3..0000000
--- a/android/writedocs.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2015 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package android
-
-import (
- "fmt"
- "os"
- "path/filepath"
- "strings"
-
- "github.com/google/blueprint"
-)
-
-func init() {
- RegisterSingletonType("writedocs", DocsSingleton)
-}
-
-func DocsSingleton() Singleton {
- return &docsSingleton{}
-}
-
-type docsSingleton struct{}
-
-func primaryBuilderPath(ctx SingletonContext) Path {
- buildDir := absolutePath(ctx.Config().BuildDir())
- binary := absolutePath(os.Args[0])
- primaryBuilder, err := filepath.Rel(buildDir, binary)
- if err != nil {
- ctx.Errorf("path to primary builder %q is not in build dir %q (%q)",
- os.Args[0], ctx.Config().BuildDir(), err)
- }
-
- return PathForOutput(ctx, primaryBuilder)
-}
-
-func (c *docsSingleton) GenerateBuildActions(ctx SingletonContext) {
- var deps Paths
- deps = append(deps, pathForBuildToolDep(ctx, ctx.Config().moduleListFile))
- deps = append(deps, pathForBuildToolDep(ctx, ctx.Config().ProductVariablesFileName))
-
- // The dexpreopt configuration may not exist, but if it does, it's a dependency
- // of soong_build.
- dexpreoptConfigPath := ctx.Config().DexpreoptGlobalConfigPath(ctx)
- if dexpreoptConfigPath.Valid() {
- deps = append(deps, dexpreoptConfigPath.Path())
- }
-
- // Generate build system docs for the primary builder. Generating docs reads the source
- // files used to build the primary builder, but that dependency will be picked up through
- // the dependency on the primary builder itself. There are no dependencies on the
- // Blueprints files, as any relevant changes to the Blueprints files would have caused
- // a rebuild of the primary builder.
- docsFile := PathForOutput(ctx, "docs", "soong_build.html")
- primaryBuilder := primaryBuilderPath(ctx)
- soongDocs := ctx.Rule(pctx, "soongDocs",
- blueprint.RuleParams{
- Command: fmt.Sprintf("rm -f ${outDir}/* && %s --soong_docs %s %s",
- primaryBuilder.String(),
- docsFile.String(),
- "\""+strings.Join(os.Args[1:], "\" \"")+"\""),
- CommandDeps: []string{primaryBuilder.String()},
- Description: fmt.Sprintf("%s docs $out", primaryBuilder.Base()),
- },
- "outDir")
-
- ctx.Build(pctx, BuildParams{
- Rule: soongDocs,
- Output: docsFile,
- Inputs: deps,
- Args: map[string]string{
- "outDir": PathForOutput(ctx, "docs").String(),
- },
- })
-
- // Add a phony target for building the documentation
- ctx.Phony("soong_docs", docsFile)
-}
diff --git a/androidmk/androidmk/android.go b/androidmk/androidmk/android.go
index 08616a9..80801b2 100644
--- a/androidmk/androidmk/android.go
+++ b/androidmk/androidmk/android.go
@@ -201,6 +201,7 @@
"LOCAL_VENDOR_MODULE": "vendor",
"LOCAL_ODM_MODULE": "device_specific",
"LOCAL_PRODUCT_MODULE": "product_specific",
+ "LOCAL_PRODUCT_SERVICES_MODULE": "product_specific",
"LOCAL_SYSTEM_EXT_MODULE": "system_ext_specific",
"LOCAL_EXPORT_PACKAGE_RESOURCES": "export_package_resources",
"LOCAL_PRIVILEGED_MODULE": "privileged",
diff --git a/apex/androidmk.go b/apex/androidmk.go
index ebf0833..94b8116 100644
--- a/apex/androidmk.go
+++ b/apex/androidmk.go
@@ -212,7 +212,7 @@
}
if host {
makeOs := fi.module.Target().Os.String()
- if fi.module.Target().Os == android.Linux || fi.module.Target().Os == android.LinuxBionic {
+ if fi.module.Target().Os == android.Linux || fi.module.Target().Os == android.LinuxBionic || fi.module.Target().Os == android.LinuxMusl {
makeOs = "linux"
}
fmt.Fprintln(w, "LOCAL_MODULE_HOST_OS :=", makeOs)
@@ -382,7 +382,7 @@
fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", a.installDir.ToMakePath().String())
stemSuffix := apexType.suffix()
if a.isCompressed {
- stemSuffix = ".capex"
+ stemSuffix = imageCapexSuffix
}
fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", name+stemSuffix)
fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !a.installable())
diff --git a/apex/apex.go b/apex/apex.go
index e1fca67..e3edc68 100644
--- a/apex/apex.go
+++ b/apex/apex.go
@@ -111,15 +111,9 @@
// List of java libraries that are embedded inside this APEX bundle.
Java_libs []string
- // List of prebuilt files that are embedded inside this APEX bundle.
- Prebuilts []string
-
// List of platform_compat_config files that are embedded inside this APEX bundle.
Compat_configs []string
- // List of BPF programs inside this APEX bundle.
- Bpfs []string
-
// List of filesystem images that are embedded inside this APEX bundle.
Filesystems []string
@@ -294,9 +288,15 @@
// List of APKs that are embedded inside this APEX.
Apps []string
+ // List of prebuilt files that are embedded inside this APEX bundle.
+ Prebuilts []string
+
// List of runtime resource overlays (RROs) that are embedded inside this APEX.
Rros []string
+ // List of BPF programs inside this APEX bundle.
+ Bpfs []string
+
// Names of modules to be overridden. Listed modules can only be other binaries (in Make or
// Soong). This does not completely prevent installation of the overridden binaries, but if
// both binaries would be installed by default (in PRODUCT_PACKAGES) the other binary will
@@ -684,7 +684,6 @@
// each target os/architectures, appropriate dependencies are selected by their
// target.<os>.multilib.<type> groups and are added as (direct) dependencies.
targets := ctx.MultiTargets()
- config := ctx.DeviceConfig()
imageVariation := a.getImageVariation(ctx)
a.combineProperties(ctx)
@@ -758,29 +757,11 @@
}
}
- if prebuilts := a.properties.Prebuilts; len(prebuilts) > 0 {
- // For prebuilt_etc, use the first variant (64 on 64/32bit device, 32 on 32bit device)
- // regardless of the TARGET_PREFER_* setting. See b/144532908
- archForPrebuiltEtc := config.Arches()[0]
- for _, arch := range config.Arches() {
- // Prefer 64-bit arch if there is any
- if arch.ArchType.Multilib == "lib64" {
- archForPrebuiltEtc = arch
- break
- }
- }
- ctx.AddFarVariationDependencies([]blueprint.Variation{
- {Mutator: "os", Variation: ctx.Os().String()},
- {Mutator: "arch", Variation: archForPrebuiltEtc.String()},
- }, prebuiltTag, prebuilts...)
- }
-
// Common-arch dependencies come next
commonVariation := ctx.Config().AndroidCommonTarget.Variations()
ctx.AddFarVariationDependencies(commonVariation, bcpfTag, a.properties.Bootclasspath_fragments...)
ctx.AddFarVariationDependencies(commonVariation, sscpfTag, a.properties.Systemserverclasspath_fragments...)
ctx.AddFarVariationDependencies(commonVariation, javaLibTag, a.properties.Java_libs...)
- ctx.AddFarVariationDependencies(commonVariation, bpfTag, a.properties.Bpfs...)
ctx.AddFarVariationDependencies(commonVariation, fsTag, a.properties.Filesystems...)
ctx.AddFarVariationDependencies(commonVariation, compatConfigTag, a.properties.Compat_configs...)
@@ -813,7 +794,27 @@
commonVariation := ctx.Config().AndroidCommonTarget.Variations()
ctx.AddFarVariationDependencies(commonVariation, androidAppTag, a.overridableProperties.Apps...)
+ ctx.AddFarVariationDependencies(commonVariation, bpfTag, a.overridableProperties.Bpfs...)
ctx.AddFarVariationDependencies(commonVariation, rroTag, a.overridableProperties.Rros...)
+ if prebuilts := a.overridableProperties.Prebuilts; len(prebuilts) > 0 {
+ // For prebuilt_etc, use the first variant (64 on 64/32bit device, 32 on 32bit device)
+ // regardless of the TARGET_PREFER_* setting. See b/144532908
+ arches := ctx.DeviceConfig().Arches()
+ if len(arches) != 0 {
+ archForPrebuiltEtc := arches[0]
+ for _, arch := range arches {
+ // Prefer 64-bit arch if there is any
+ if arch.ArchType.Multilib == "lib64" {
+ archForPrebuiltEtc = arch
+ break
+ }
+ }
+ ctx.AddFarVariationDependencies([]blueprint.Variation{
+ {Mutator: "os", Variation: ctx.Os().String()},
+ {Mutator: "arch", Variation: archForPrebuiltEtc.String()},
+ }, prebuiltTag, prebuilts...)
+ }
+ }
// Dependencies for signing
if String(a.overridableProperties.Key) == "" {
@@ -1151,9 +1152,10 @@
const (
// File extensions of an APEX for different packaging methods
- imageApexSuffix = ".apex"
- zipApexSuffix = ".zipapex"
- flattenedSuffix = ".flattened"
+ imageApexSuffix = ".apex"
+ imageCapexSuffix = ".capex"
+ zipApexSuffix = ".zipapex"
+ flattenedSuffix = ".flattened"
// variant names each of which is for a packaging method
imageApexType = "image"
@@ -3282,7 +3284,7 @@
nativeSharedLibsLabelList := android.BazelLabelForModuleDeps(ctx, nativeSharedLibs)
nativeSharedLibsLabelListAttribute := bazel.MakeLabelListAttribute(nativeSharedLibsLabelList)
- prebuilts := module.properties.Prebuilts
+ prebuilts := module.overridableProperties.Prebuilts
prebuiltsLabelList := android.BazelLabelForModuleDeps(ctx, prebuilts)
prebuiltsLabelListAttribute := bazel.MakeLabelListAttribute(prebuiltsLabelList)
diff --git a/apex/apex_test.go b/apex/apex_test.go
index f07bf63..6027f9b 100644
--- a/apex/apex_test.go
+++ b/apex/apex_test.go
@@ -1874,6 +1874,45 @@
expectNoLink("libx", "shared_apex10000", "libz", "shared")
}
+func TestApexMinSdkVersion_crtobjectInVendorApex(t *testing.T) {
+ ctx := testApex(t, `
+ apex {
+ name: "myapex",
+ key: "myapex.key",
+ native_shared_libs: ["mylib"],
+ updatable: false,
+ vendor: true,
+ min_sdk_version: "29",
+ }
+
+ apex_key {
+ name: "myapex.key",
+ public_key: "testkey.avbpubkey",
+ private_key: "testkey.pem",
+ }
+
+ cc_library {
+ name: "mylib",
+ vendor_available: true,
+ system_shared_libs: [],
+ stl: "none",
+ apex_available: [ "myapex" ],
+ min_sdk_version: "29",
+ }
+ `)
+
+ vendorVariant := "android_vendor.29_arm64_armv8-a"
+
+ // First check that the correct variant of crtbegin_so is used.
+ ldRule := ctx.ModuleForTests("mylib", vendorVariant+"_shared_apex29").Rule("ld")
+ crtBegin := names(ldRule.Args["crtBegin"])
+ ensureListContains(t, crtBegin, "out/soong/.intermediates/"+cc.DefaultCcCommonTestModulesDir+"crtbegin_so/"+vendorVariant+"_apex29/crtbegin_so.o")
+
+ // Ensure that the crtbegin_so used by the APEX is targeting 29
+ cflags := ctx.ModuleForTests("crtbegin_so", vendorVariant+"_apex29").Rule("cc").Args["cFlags"]
+ android.AssertStringDoesContain(t, "cflags", cflags, "-target aarch64-linux-android29")
+}
+
func TestPlatformUsesLatestStubsFromApexes(t *testing.T) {
ctx := testApex(t, `
apex {
@@ -4601,6 +4640,35 @@
}
}
+func TestApexSetFilenameOverride(t *testing.T) {
+ testApex(t, `
+ apex_set {
+ name: "com.company.android.myapex",
+ apex_name: "com.android.myapex",
+ set: "company-myapex.apks",
+ filename: "com.company.android.myapex.apex"
+ }
+ `).ModuleForTests("com.company.android.myapex", "android_common_com.android.myapex")
+
+ testApex(t, `
+ apex_set {
+ name: "com.company.android.myapex",
+ apex_name: "com.android.myapex",
+ set: "company-myapex.apks",
+ filename: "com.company.android.myapex.capex"
+ }
+ `).ModuleForTests("com.company.android.myapex", "android_common_com.android.myapex")
+
+ testApexError(t, `filename should end in .apex or .capex for apex_set`, `
+ apex_set {
+ name: "com.company.android.myapex",
+ apex_name: "com.android.myapex",
+ set: "company-myapex.apks",
+ filename: "some-random-suffix"
+ }
+ `)
+}
+
func TestPrebuiltOverrides(t *testing.T) {
ctx := testApex(t, `
prebuilt_apex {
@@ -4891,7 +4959,7 @@
}
}
if !foundLibfooJar {
- t.Errorf("Rule for libfoo.jar missing in dex_bootjars singleton outputs %q", android.StringPathsRelativeToTop(ctx.Config().BuildDir(), s.AllOutputs()))
+ t.Errorf("Rule for libfoo.jar missing in dex_bootjars singleton outputs %q", android.StringPathsRelativeToTop(ctx.Config().SoongOutDir(), s.AllOutputs()))
}
}
@@ -5090,6 +5158,12 @@
// find the dex boot jar in it. We either need to disable the source libfoo
// or make the prebuilt libfoo preferred.
testDexpreoptWithApexes(t, bp, "module libfoo does not provide a dex boot jar", preparer, fragment)
+ // dexbootjar check is skipped if AllowMissingDependencies is true
+ preparerAllowMissingDeps := android.GroupFixturePreparers(
+ preparer,
+ android.PrepareForTestWithAllowMissingDependencies,
+ )
+ testDexpreoptWithApexes(t, bp, "", preparerAllowMissingDeps, fragment)
})
t.Run("prebuilt library preferred with source", func(t *testing.T) {
@@ -6034,6 +6108,8 @@
name: "myapex",
key: "myapex.key",
apps: ["app"],
+ bpfs: ["bpf"],
+ prebuilts: ["myetc"],
overrides: ["oldapex"],
updatable: false,
}
@@ -6042,6 +6118,8 @@
name: "override_myapex",
base: "myapex",
apps: ["override_app"],
+ bpfs: ["override_bpf"],
+ prebuilts: ["override_myetc"],
overrides: ["unknownapex"],
logging_parent: "com.foo.bar",
package_name: "test.overridden.package",
@@ -6080,6 +6158,26 @@
base: "app",
package_name: "bar",
}
+
+ bpf {
+ name: "bpf",
+ srcs: ["bpf.c"],
+ }
+
+ bpf {
+ name: "override_bpf",
+ srcs: ["override_bpf.c"],
+ }
+
+ prebuilt_etc {
+ name: "myetc",
+ src: "myprebuilt",
+ }
+
+ prebuilt_etc {
+ name: "override_myetc",
+ src: "override_myprebuilt",
+ }
`, withManifestPackageNameOverrides([]string{"myapex:com.android.myapex"}))
originalVariant := ctx.ModuleForTests("myapex", "android_common_myapex_image").Module().(android.OverridableModule)
@@ -6098,6 +6196,12 @@
ensureNotContains(t, copyCmds, "image.apex/app/app/app.apk")
ensureContains(t, copyCmds, "image.apex/app/override_app/override_app.apk")
+ ensureNotContains(t, copyCmds, "image.apex/etc/bpf/bpf.o")
+ ensureContains(t, copyCmds, "image.apex/etc/bpf/override_bpf.o")
+
+ ensureNotContains(t, copyCmds, "image.apex/etc/myetc")
+ ensureContains(t, copyCmds, "image.apex/etc/override_myetc")
+
apexBundle := module.Module().(*apexBundle)
name := apexBundle.Name()
if name != "override_myapex" {
@@ -6120,10 +6224,12 @@
data.Custom(&builder, name, "TARGET_", "", data)
androidMk := builder.String()
ensureContains(t, androidMk, "LOCAL_MODULE := override_app.override_myapex")
+ ensureContains(t, androidMk, "LOCAL_MODULE := override_bpf.o.override_myapex")
ensureContains(t, androidMk, "LOCAL_MODULE := apex_manifest.pb.override_myapex")
ensureContains(t, androidMk, "LOCAL_MODULE_STEM := override_myapex.apex")
ensureContains(t, androidMk, "LOCAL_OVERRIDES_MODULES := unknownapex myapex")
ensureNotContains(t, androidMk, "LOCAL_MODULE := app.myapex")
+ ensureNotContains(t, androidMk, "LOCAL_MODULE := bpf.myapex")
ensureNotContains(t, androidMk, "LOCAL_MODULE := override_app.myapex")
ensureNotContains(t, androidMk, "LOCAL_MODULE := apex_manifest.pb.myapex")
ensureNotContains(t, androidMk, "LOCAL_MODULE_STEM := myapex.apex")
diff --git a/apex/builder.go b/apex/builder.go
index 5baa5c0..3177ee0 100644
--- a/apex/builder.go
+++ b/apex/builder.go
@@ -786,7 +786,7 @@
if apexType == imageApex && (compressionEnabled || a.testOnlyShouldForceCompression()) {
a.isCompressed = true
- unsignedCompressedOutputFile := android.PathForModuleOut(ctx, a.Name()+".capex.unsigned")
+ unsignedCompressedOutputFile := android.PathForModuleOut(ctx, a.Name()+imageCapexSuffix+".unsigned")
compressRule := android.NewRuleBuilder(pctx, ctx)
compressRule.Command().
@@ -800,7 +800,7 @@
FlagWithOutput("--output ", unsignedCompressedOutputFile)
compressRule.Build("compressRule", "Generate unsigned compressed APEX file")
- signedCompressedOutputFile := android.PathForModuleOut(ctx, a.Name()+".capex")
+ signedCompressedOutputFile := android.PathForModuleOut(ctx, a.Name()+imageCapexSuffix)
if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_SIGNAPK") {
args["outCommaList"] = signedCompressedOutputFile.String()
}
diff --git a/apex/classpath_element_test.go b/apex/classpath_element_test.go
index e2d8465..60f18bd 100644
--- a/apex/classpath_element_test.go
+++ b/apex/classpath_element_test.go
@@ -159,11 +159,6 @@
],
}
- bootclasspath_fragment {
- name: "non-apex-fragment",
- contents: ["othersdklibrary"],
- }
-
apex {
name: "otherapex",
key: "otherapex.key",
@@ -213,7 +208,6 @@
myFragment := result.Module("mybootclasspath-fragment", "android_common_apex10000")
myBar := result.Module("bar", "android_common_apex10000")
- nonApexFragment := result.Module("non-apex-fragment", "android_common")
other := result.Module("othersdklibrary", "android_common_apex10000")
otherApexLibrary := result.Module("otherapexlibrary", "android_common_apex10000")
@@ -253,15 +247,6 @@
assertElementsEquals(t, "elements", expectedElements, elements)
})
- // Verify that CreateClasspathElements detects when a fragment does not have an associated apex.
- t.Run("non apex fragment", func(t *testing.T) {
- ctx := newCtx()
- elements := java.CreateClasspathElements(ctx, []android.Module{}, []android.Module{nonApexFragment})
- android.FailIfNoMatchingErrors(t, "fragment non-apex-fragment{.*} is not part of an apex", ctx.errs)
- expectedElements := java.ClasspathElements{}
- assertElementsEquals(t, "elements", expectedElements, elements)
- })
-
// Verify that CreateClasspathElements detects when an apex has multiple fragments.
t.Run("multiple fragments for same apex", func(t *testing.T) {
ctx := newCtx()
diff --git a/apex/platform_bootclasspath_test.go b/apex/platform_bootclasspath_test.go
index e0421f6..513ddc0 100644
--- a/apex/platform_bootclasspath_test.go
+++ b/apex/platform_bootclasspath_test.go
@@ -543,3 +543,140 @@
"out/soong/target/product/test_device/system/etc/classpaths",
)
}
+
+func TestBootJarNotInApex(t *testing.T) {
+ android.GroupFixturePreparers(
+ prepareForTestWithPlatformBootclasspath,
+ PrepareForTestWithApexBuildComponents,
+ prepareForTestWithMyapex,
+ java.FixtureConfigureApexBootJars("myapex:foo"),
+ ).ExtendWithErrorHandler(android.FixtureExpectsAtLeastOneErrorMatchingPattern(
+ `dependency "foo" of "myplatform-bootclasspath" missing variant`)).
+ RunTestWithBp(t, `
+ apex {
+ name: "myapex",
+ key: "myapex.key",
+ updatable: false,
+ }
+
+ apex_key {
+ name: "myapex.key",
+ public_key: "testkey.avbpubkey",
+ private_key: "testkey.pem",
+ }
+
+ java_library {
+ name: "foo",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: [
+ "myapex",
+ ],
+ }
+
+ bootclasspath_fragment {
+ name: "not-in-apex-fragment",
+ contents: [
+ "foo",
+ ],
+ }
+
+ platform_bootclasspath {
+ name: "myplatform-bootclasspath",
+ }
+ `)
+}
+
+func TestBootFragmentNotInApex(t *testing.T) {
+ android.GroupFixturePreparers(
+ prepareForTestWithPlatformBootclasspath,
+ PrepareForTestWithApexBuildComponents,
+ prepareForTestWithMyapex,
+ java.FixtureConfigureApexBootJars("myapex:foo"),
+ ).ExtendWithErrorHandler(android.FixtureExpectsAtLeastOneErrorMatchingPattern(
+ `library foo.*have no corresponding fragment.*`)).RunTestWithBp(t, `
+ apex {
+ name: "myapex",
+ key: "myapex.key",
+ java_libs: ["foo"],
+ updatable: false,
+ }
+
+ apex_key {
+ name: "myapex.key",
+ public_key: "testkey.avbpubkey",
+ private_key: "testkey.pem",
+ }
+
+ java_library {
+ name: "foo",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: ["myapex"],
+ permitted_packages: ["foo"],
+ }
+
+ bootclasspath_fragment {
+ name: "not-in-apex-fragment",
+ contents: ["foo"],
+ }
+
+ platform_bootclasspath {
+ name: "myplatform-bootclasspath",
+ }
+ `)
+}
+
+func TestNonBootJarInFragment(t *testing.T) {
+ android.GroupFixturePreparers(
+ prepareForTestWithPlatformBootclasspath,
+ PrepareForTestWithApexBuildComponents,
+ prepareForTestWithMyapex,
+ java.FixtureConfigureApexBootJars("myapex:foo"),
+ ).ExtendWithErrorHandler(android.FixtureExpectsAtLeastOneErrorMatchingPattern(
+ `in contents must also be declared in PRODUCT_APEX_BOOT_JARS`)).
+ RunTestWithBp(t, `
+ apex {
+ name: "myapex",
+ key: "myapex.key",
+ bootclasspath_fragments: ["apex-fragment"],
+ updatable: false,
+ }
+
+ apex_key {
+ name: "myapex.key",
+ public_key: "testkey.avbpubkey",
+ private_key: "testkey.pem",
+ }
+
+ java_library {
+ name: "foo",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: ["myapex"],
+ permitted_packages: ["foo"],
+ }
+
+ java_library {
+ name: "bar",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: ["myapex"],
+ permitted_packages: ["bar"],
+ }
+
+ bootclasspath_fragment {
+ name: "apex-fragment",
+ contents: ["foo", "bar"],
+ apex_available:[ "myapex" ],
+ }
+
+ platform_bootclasspath {
+ name: "myplatform-bootclasspath",
+ fragments: [{
+ apex: "myapex",
+ module:"apex-fragment",
+ }],
+ }
+ `)
+}
diff --git a/apex/prebuilt.go b/apex/prebuilt.go
index 1bb0fb5..c4794dc 100644
--- a/apex/prebuilt.go
+++ b/apex/prebuilt.go
@@ -924,8 +924,8 @@
func (a *ApexSet) GenerateAndroidBuildActions(ctx android.ModuleContext) {
a.installFilename = a.InstallFilename()
- if !strings.HasSuffix(a.installFilename, imageApexSuffix) {
- ctx.ModuleErrorf("filename should end in %s for apex_set", imageApexSuffix)
+ if !strings.HasSuffix(a.installFilename, imageApexSuffix) && !strings.HasSuffix(a.installFilename, imageCapexSuffix) {
+ ctx.ModuleErrorf("filename should end in %s or %s for apex_set", imageApexSuffix, imageCapexSuffix)
}
inputApex := android.OptionalPathForModuleSrc(ctx, a.prebuiltCommonProperties.Selected_apex).Path()
diff --git a/apex/systemserver_classpath_fragment_test.go b/apex/systemserver_classpath_fragment_test.go
index a64c6f4..a8d5931 100644
--- a/apex/systemserver_classpath_fragment_test.go
+++ b/apex/systemserver_classpath_fragment_test.go
@@ -130,3 +130,54 @@
`mysystemserverclasspathfragment`,
})
}
+
+func TestSystemServerClasspathFragmentWithContentNotInMake(t *testing.T) {
+ android.GroupFixturePreparers(
+ prepareForTestWithSystemserverclasspathFragment,
+ prepareForTestWithMyapex,
+ dexpreopt.FixtureSetApexSystemServerJars("myapex:foo"),
+ ).
+ ExtendWithErrorHandler(android.FixtureExpectsAtLeastOneErrorMatchingPattern(
+ `in contents must also be declared in PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS`)).
+ RunTestWithBp(t, `
+ apex {
+ name: "myapex",
+ key: "myapex.key",
+ systemserverclasspath_fragments: [
+ "mysystemserverclasspathfragment",
+ ],
+ updatable: false,
+ }
+
+ apex_key {
+ name: "myapex.key",
+ public_key: "testkey.avbpubkey",
+ private_key: "testkey.pem",
+ }
+
+ java_library {
+ name: "foo",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: ["myapex"],
+ }
+
+ java_library {
+ name: "bar",
+ srcs: ["b.java"],
+ installable: true,
+ apex_available: ["myapex"],
+ }
+
+ systemserverclasspath_fragment {
+ name: "mysystemserverclasspathfragment",
+ contents: [
+ "foo",
+ "bar",
+ ],
+ apex_available: [
+ "myapex",
+ ],
+ }
+ `)
+}
diff --git a/bloaty/bloaty_merger.py b/bloaty/bloaty_merger.py
index 1034462..46ce57f 100644
--- a/bloaty/bloaty_merger.py
+++ b/bloaty/bloaty_merger.py
@@ -24,58 +24,63 @@
import csv
import gzip
+# pylint: disable=import-error
import ninja_rsp
import file_sections_pb2
BLOATY_EXTENSION = ".bloaty.csv"
+
def parse_csv(path):
- """Parses a Bloaty-generated CSV file into a protobuf.
+ """Parses a Bloaty-generated CSV file into a protobuf.
- Args:
- path: The filepath to the CSV file, relative to $ANDROID_TOP.
+ Args:
+ path: The filepath to the CSV file, relative to $ANDROID_TOP.
- Returns:
- A file_sections_pb2.File if the file was found; None otherwise.
- """
- file_proto = None
- with open(path, newline='') as csv_file:
- file_proto = file_sections_pb2.File()
- if path.endswith(BLOATY_EXTENSION):
- file_proto.path = path[:-len(BLOATY_EXTENSION)]
- section_reader = csv.DictReader(csv_file)
- for row in section_reader:
- section = file_proto.sections.add()
- section.name = row["sections"]
- section.vm_size = int(row["vmsize"])
- section.file_size = int(row["filesize"])
- return file_proto
+ Returns:
+ A file_sections_pb2.File if the file was found; None otherwise.
+ """
+ file_proto = None
+ with open(path, newline='') as csv_file:
+ file_proto = file_sections_pb2.File()
+ if path.endswith(BLOATY_EXTENSION):
+ file_proto.path = path[: -len(BLOATY_EXTENSION)]
+ section_reader = csv.DictReader(csv_file)
+ for row in section_reader:
+ section = file_proto.sections.add()
+ section.name = row["sections"]
+ section.vm_size = int(row["vmsize"])
+ section.file_size = int(row["filesize"])
+ return file_proto
+
def create_file_size_metrics(input_list, output_proto):
- """Creates a FileSizeMetrics proto from a list of CSV files.
+ """Creates a FileSizeMetrics proto from a list of CSV files.
- Args:
- input_list: The path to the file which contains the list of CSV files. Each
- filepath is separated by a space.
- output_proto: The path for the output protobuf. It will be compressed using
- gzip.
- """
- metrics = file_sections_pb2.FileSizeMetrics()
- reader = ninja_rsp.NinjaRspFileReader(input_list)
- for csv_path in reader:
- file_proto = parse_csv(csv_path)
- if file_proto:
- metrics.files.append(file_proto)
- with gzip.open(output_proto, "wb") as output:
- output.write(metrics.SerializeToString())
+ Args:
+ input_list: The path to the file which contains the list of CSV files.
+ Each filepath is separated by a space.
+ output_proto: The path for the output protobuf. It will be compressed
+ using gzip.
+ """
+ metrics = file_sections_pb2.FileSizeMetrics()
+ reader = ninja_rsp.NinjaRspFileReader(input_list)
+ for csv_path in reader:
+ file_proto = parse_csv(csv_path)
+ if file_proto:
+ metrics.files.append(file_proto)
+ with gzip.open(output_proto, "wb") as output:
+ output.write(metrics.SerializeToString())
+
def main():
- parser = argparse.ArgumentParser()
- parser.add_argument("input_list_file", help="List of bloaty csv files.")
- parser.add_argument("output_proto", help="Output proto.")
- args = parser.parse_args()
- create_file_size_metrics(args.input_list_file, args.output_proto)
+ parser = argparse.ArgumentParser()
+ parser.add_argument("input_list_file", help="List of bloaty csv files.")
+ parser.add_argument("output_proto", help="Output proto.")
+ args = parser.parse_args()
+ create_file_size_metrics(args.input_list_file, args.output_proto)
+
if __name__ == '__main__':
- main()
+ main()
diff --git a/bloaty/bloaty_merger_test.py b/bloaty/bloaty_merger_test.py
index 9de049a..83680b9 100644
--- a/bloaty/bloaty_merger_test.py
+++ b/bloaty/bloaty_merger_test.py
@@ -14,6 +14,7 @@
import gzip
import unittest
+# pylint: disable=import-error
from pyfakefs import fake_filesystem_unittest
import bloaty_merger
@@ -21,46 +22,46 @@
class BloatyMergerTestCase(fake_filesystem_unittest.TestCase):
- def setUp(self):
- self.setUpPyfakefs()
+ def setUp(self):
+ self.setUpPyfakefs()
- def test_parse_csv(self):
- csv_content = "sections,vmsize,filesize\nsection1,2,3\n"
- self.fs.create_file("file1.bloaty.csv", contents=csv_content)
- pb = bloaty_merger.parse_csv("file1.bloaty.csv")
- self.assertEqual(pb.path, "file1")
- self.assertEqual(len(pb.sections), 1)
- s = pb.sections[0]
- self.assertEqual(s.name, "section1")
- self.assertEqual(s.vm_size, 2)
- self.assertEqual(s.file_size, 3)
+ def test_parse_csv(self):
+ csv_content = "sections,vmsize,filesize\nsection1,2,3\n"
+ self.fs.create_file("file1.bloaty.csv", contents=csv_content)
+ pb = bloaty_merger.parse_csv("file1.bloaty.csv")
+ self.assertEqual(pb.path, "file1")
+ self.assertEqual(len(pb.sections), 1)
+ s = pb.sections[0]
+ self.assertEqual(s.name, "section1")
+ self.assertEqual(s.vm_size, 2)
+ self.assertEqual(s.file_size, 3)
- def test_missing_file(self):
- with self.assertRaises(FileNotFoundError):
- bloaty_merger.parse_csv("missing.bloaty.csv")
+ def test_missing_file(self):
+ with self.assertRaises(FileNotFoundError):
+ bloaty_merger.parse_csv("missing.bloaty.csv")
- def test_malformed_csv(self):
- csv_content = "header1,heaVder2,header3\n4,5,6\n"
- self.fs.create_file("file1.bloaty.csv", contents=csv_content)
- with self.assertRaises(KeyError):
- bloaty_merger.parse_csv("file1.bloaty.csv")
+ def test_malformed_csv(self):
+ csv_content = "header1,heaVder2,header3\n4,5,6\n"
+ self.fs.create_file("file1.bloaty.csv", contents=csv_content)
+ with self.assertRaises(KeyError):
+ bloaty_merger.parse_csv("file1.bloaty.csv")
- def test_create_file_metrics(self):
- file_list = "file1.bloaty.csv file2.bloaty.csv"
- file1_content = "sections,vmsize,filesize\nsection1,2,3\nsection2,7,8"
- file2_content = "sections,vmsize,filesize\nsection1,4,5\n"
+ def test_create_file_metrics(self):
+ file_list = "file1.bloaty.csv file2.bloaty.csv"
+ file1_content = "sections,vmsize,filesize\nsection1,2,3\nsection2,7,8"
+ file2_content = "sections,vmsize,filesize\nsection1,4,5\n"
- self.fs.create_file("files.lst", contents=file_list)
- self.fs.create_file("file1.bloaty.csv", contents=file1_content)
- self.fs.create_file("file2.bloaty.csv", contents=file2_content)
+ self.fs.create_file("files.lst", contents=file_list)
+ self.fs.create_file("file1.bloaty.csv", contents=file1_content)
+ self.fs.create_file("file2.bloaty.csv", contents=file2_content)
- bloaty_merger.create_file_size_metrics("files.lst", "output.pb.gz")
+ bloaty_merger.create_file_size_metrics("files.lst", "output.pb.gz")
- metrics = file_sections_pb2.FileSizeMetrics()
- with gzip.open("output.pb.gz", "rb") as output:
- metrics.ParseFromString(output.read())
+ metrics = file_sections_pb2.FileSizeMetrics()
+ with gzip.open("output.pb.gz", "rb") as output:
+ metrics.ParseFromString(output.read())
if __name__ == '__main__':
- suite = unittest.TestLoader().loadTestsFromTestCase(BloatyMergerTestCase)
- unittest.TextTestRunner(verbosity=2).run(suite)
+ suite = unittest.TestLoader().loadTestsFromTestCase(BloatyMergerTestCase)
+ unittest.TextTestRunner(verbosity=2).run(suite)
diff --git a/bp2build/Android.bp b/bp2build/Android.bp
index 78e3a74..5ee04f9 100644
--- a/bp2build/Android.bp
+++ b/bp2build/Android.bp
@@ -39,9 +39,12 @@
"cc_library_static_conversion_test.go",
"cc_object_conversion_test.go",
"conversion_test.go",
+ "filegroup_conversion_test.go",
+ "genrule_conversion_test.go",
"performance_test.go",
"prebuilt_etc_conversion_test.go",
"python_binary_conversion_test.go",
+ "python_library_conversion_test.go",
"sh_conversion_test.go",
"testing.go",
],
diff --git a/bp2build/build_conversion.go b/bp2build/build_conversion.go
index a64d474..f652a35 100644
--- a/bp2build/build_conversion.go
+++ b/bp2build/build_conversion.go
@@ -14,14 +14,20 @@
package bp2build
+/*
+For shareable/common functionality for conversion from soong-module to build files
+for queryview/bp2build
+*/
+
import (
- "android/soong/android"
- "android/soong/bazel"
"fmt"
"reflect"
"sort"
"strings"
+ "android/soong/android"
+ "android/soong/bazel"
+
"github.com/google/blueprint"
"github.com/google/blueprint/proptools"
)
diff --git a/bp2build/build_conversion_test.go b/bp2build/build_conversion_test.go
index 0d9106c..ecea6b2 100644
--- a/bp2build/build_conversion_test.go
+++ b/bp2build/build_conversion_test.go
@@ -16,7 +16,6 @@
import (
"android/soong/android"
- "android/soong/genrule"
"strings"
"testing"
)
@@ -218,13 +217,9 @@
}
func TestGenerateBazelTargetModules(t *testing.T) {
- testCases := []struct {
- name string
- bp string
- expectedBazelTargets []string
- }{
+ testCases := []bp2buildTestCase{
{
- bp: `custom {
+ blueprint: `custom {
name: "foo",
string_list_prop: ["a", "b"],
string_prop: "a",
@@ -241,7 +236,7 @@
},
},
{
- bp: `custom {
+ blueprint: `custom {
name: "control_characters",
string_list_prop: ["\t", "\n"],
string_prop: "a\t\n\r",
@@ -258,7 +253,7 @@
},
},
{
- bp: `custom {
+ blueprint: `custom {
name: "has_dep",
arch_paths: [":dep"],
bazel_module: { bp2build_available: true },
@@ -280,7 +275,7 @@
},
},
{
- bp: `custom {
+ blueprint: `custom {
name: "arch_paths",
arch: {
x86: {
@@ -299,7 +294,7 @@
},
},
{
- bp: `custom {
+ blueprint: `custom {
name: "has_dep",
arch: {
x86: {
@@ -331,17 +326,17 @@
dir := "."
for _, testCase := range testCases {
- config := android.TestConfig(buildDir, nil, testCase.bp, nil)
+ config := android.TestConfig(buildDir, nil, testCase.blueprint, nil)
ctx := android.NewTestContext(config)
registerCustomModuleForBp2buildConversion(ctx)
_, errs := ctx.ParseFileList(dir, []string{"Android.bp"})
- if errored(t, "", errs) {
+ if errored(t, testCase, errs) {
continue
}
_, errs = ctx.ResolveDependencies(config)
- if errored(t, "", errs) {
+ if errored(t, testCase, errs) {
continue
}
@@ -533,38 +528,13 @@
}
func TestModuleTypeBp2Build(t *testing.T) {
- otherGenruleBp := map[string]string{
- "other/Android.bp": `genrule {
- name: "foo.tool",
- out: ["foo_tool.out"],
- srcs: ["foo_tool.in"],
- cmd: "cp $(in) $(out)",
-}
-genrule {
- name: "other.tool",
- out: ["other_tool.out"],
- srcs: ["other_tool.in"],
- cmd: "cp $(in) $(out)",
-}`,
- }
-
- testCases := []struct {
- description string
- moduleTypeUnderTest string
- moduleTypeUnderTestFactory android.ModuleFactory
- moduleTypeUnderTestBp2BuildMutator func(android.TopDownMutatorContext)
- preArchMutators []android.RegisterMutatorFunc
- bp string
- expectedBazelTargets []string
- fs map[string]string
- dir string
- }{
+ testCases := []bp2buildTestCase{
{
description: "filegroup with does not specify srcs",
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
bazel_module: { bp2build_available: true },
}`,
@@ -579,7 +549,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
srcs: [],
bazel_module: { bp2build_available: true },
@@ -595,7 +565,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
srcs: ["a", "b"],
bazel_module: { bp2build_available: true },
@@ -614,7 +584,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
srcs: ["a", "b"],
exclude_srcs: ["a"],
@@ -631,7 +601,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "foo",
srcs: ["**/*.txt"],
bazel_module: { bp2build_available: true },
@@ -645,7 +615,7 @@
],
)`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/a.txt": "",
"other/b.txt": "",
"other/subdir/a.txt": "",
@@ -657,7 +627,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "foo",
srcs: ["a.txt"],
bazel_module: { bp2build_available: true },
@@ -672,7 +642,7 @@
],
)`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/Android.bp": `filegroup {
name: "fg_foo",
srcs: ["**/*.txt"],
@@ -689,7 +659,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "foobar",
srcs: [
":foo",
@@ -705,207 +675,13 @@
],
)`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/Android.bp": `filegroup {
name: "foo",
srcs: ["a", "b"],
}`,
},
},
- {
- description: "genrule with command line variable replacements",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo.tool",
- out: ["foo_tool.out"],
- srcs: ["foo_tool.in"],
- cmd: "cp $(in) $(out)",
- bazel_module: { bp2build_available: true },
-}
-
-genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- tools: [":foo.tool"],
- cmd: "$(location :foo.tool) --genDir=$(genDir) arg $(in) $(out)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{
- `genrule(
- name = "foo",
- cmd = "$(location :foo.tool) --genDir=$(GENDIR) arg $(SRCS) $(OUTS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
- tools = [":foo.tool"],
-)`,
- `genrule(
- name = "foo.tool",
- cmd = "cp $(SRCS) $(OUTS)",
- outs = ["foo_tool.out"],
- srcs = ["foo_tool.in"],
-)`,
- },
- },
- {
- description: "genrule using $(locations :label)",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo.tools",
- out: ["foo_tool.out", "foo_tool2.out"],
- srcs: ["foo_tool.in"],
- cmd: "cp $(in) $(out)",
- bazel_module: { bp2build_available: true },
-}
-
-genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- tools: [":foo.tools"],
- cmd: "$(locations :foo.tools) -s $(out) $(in)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "$(locations :foo.tools) -s $(OUTS) $(SRCS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
- tools = [":foo.tools"],
-)`,
- `genrule(
- name = "foo.tools",
- cmd = "cp $(SRCS) $(OUTS)",
- outs = [
- "foo_tool.out",
- "foo_tool2.out",
- ],
- srcs = ["foo_tool.in"],
-)`,
- },
- },
- {
- description: "genrule using $(locations //absolute:label)",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- tool_files: [":foo.tool"],
- cmd: "$(locations :foo.tool) -s $(out) $(in)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "$(locations //other:foo.tool) -s $(OUTS) $(SRCS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
- tools = ["//other:foo.tool"],
-)`,
- },
- fs: otherGenruleBp,
- },
- {
- description: "genrule srcs using $(locations //absolute:label)",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: [":other.tool"],
- tool_files: [":foo.tool"],
- cmd: "$(locations :foo.tool) -s $(out) $(location :other.tool)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "$(locations //other:foo.tool) -s $(OUTS) $(location //other:other.tool)",
- outs = ["foo.out"],
- srcs = ["//other:other.tool"],
- tools = ["//other:foo.tool"],
-)`,
- },
- fs: otherGenruleBp,
- },
- {
- description: "genrule using $(location) label should substitute first tool label automatically",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- tool_files: [":foo.tool", ":other.tool"],
- cmd: "$(location) -s $(out) $(in)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "$(location //other:foo.tool) -s $(OUTS) $(SRCS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
- tools = [
- "//other:foo.tool",
- "//other:other.tool",
- ],
-)`,
- },
- fs: otherGenruleBp,
- },
- {
- description: "genrule using $(locations) label should substitute first tool label automatically",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- tools: [":foo.tool", ":other.tool"],
- cmd: "$(locations) -s $(out) $(in)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "$(locations //other:foo.tool) -s $(OUTS) $(SRCS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
- tools = [
- "//other:foo.tool",
- "//other:other.tool",
- ],
-)`,
- },
- fs: otherGenruleBp,
- },
- {
- description: "genrule without tools or tool_files can convert successfully",
- moduleTypeUnderTest: "genrule",
- moduleTypeUnderTestFactory: genrule.GenRuleFactory,
- moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
- bp: `genrule {
- name: "foo",
- out: ["foo.out"],
- srcs: ["foo.in"],
- cmd: "cp $(in) $(out)",
- bazel_module: { bp2build_available: true },
-}`,
- expectedBazelTargets: []string{`genrule(
- name = "foo",
- cmd = "cp $(SRCS) $(OUTS)",
- outs = ["foo.out"],
- srcs = ["foo.in"],
-)`,
- },
- },
}
dir := "."
@@ -914,24 +690,24 @@
toParse := []string{
"Android.bp",
}
- for f, content := range testCase.fs {
+ for f, content := range testCase.filesystem {
if strings.HasSuffix(f, "Android.bp") {
toParse = append(toParse, f)
}
fs[f] = []byte(content)
}
- config := android.TestConfig(buildDir, nil, testCase.bp, fs)
+ config := android.TestConfig(buildDir, nil, testCase.blueprint, fs)
ctx := android.NewTestContext(config)
ctx.RegisterModuleType(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestFactory)
ctx.RegisterBp2BuildMutator(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestBp2BuildMutator)
ctx.RegisterForBazelConversion()
_, errs := ctx.ParseFileList(dir, toParse)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
continue
}
_, errs = ctx.ResolveDependencies(config)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
continue
}
@@ -961,199 +737,6 @@
type bp2buildMutator = func(android.TopDownMutatorContext)
-func TestBp2BuildInlinesDefaults(t *testing.T) {
- testCases := []struct {
- moduleTypesUnderTest map[string]android.ModuleFactory
- bp2buildMutatorsUnderTest map[string]bp2buildMutator
- bp string
- expectedBazelTarget string
- description string
- }{
- {
- moduleTypesUnderTest: map[string]android.ModuleFactory{
- "genrule": genrule.GenRuleFactory,
- "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
- },
- bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
- "genrule": genrule.GenruleBp2Build,
- },
- bp: `genrule_defaults {
- name: "gen_defaults",
- cmd: "do-something $(in) $(out)",
-}
-genrule {
- name: "gen",
- out: ["out"],
- srcs: ["in1"],
- defaults: ["gen_defaults"],
- bazel_module: { bp2build_available: true },
-}
-`,
- expectedBazelTarget: `genrule(
- name = "gen",
- cmd = "do-something $(SRCS) $(OUTS)",
- outs = ["out"],
- srcs = ["in1"],
-)`,
- description: "genrule applies properties from a genrule_defaults dependency if not specified",
- },
- {
- moduleTypesUnderTest: map[string]android.ModuleFactory{
- "genrule": genrule.GenRuleFactory,
- "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
- },
- bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
- "genrule": genrule.GenruleBp2Build,
- },
- bp: `genrule_defaults {
- name: "gen_defaults",
- out: ["out-from-defaults"],
- srcs: ["in-from-defaults"],
- cmd: "cmd-from-defaults",
-}
-genrule {
- name: "gen",
- out: ["out"],
- srcs: ["in1"],
- defaults: ["gen_defaults"],
- cmd: "do-something $(in) $(out)",
- bazel_module: { bp2build_available: true },
-}
-`,
- expectedBazelTarget: `genrule(
- name = "gen",
- cmd = "do-something $(SRCS) $(OUTS)",
- outs = [
- "out-from-defaults",
- "out",
- ],
- srcs = [
- "in-from-defaults",
- "in1",
- ],
-)`,
- description: "genrule does merges properties from a genrule_defaults dependency, latest-first",
- },
- {
- moduleTypesUnderTest: map[string]android.ModuleFactory{
- "genrule": genrule.GenRuleFactory,
- "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
- },
- bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
- "genrule": genrule.GenruleBp2Build,
- },
- bp: `genrule_defaults {
- name: "gen_defaults1",
- cmd: "cp $(in) $(out)",
-}
-
-genrule_defaults {
- name: "gen_defaults2",
- srcs: ["in1"],
-}
-
-genrule {
- name: "gen",
- out: ["out"],
- defaults: ["gen_defaults1", "gen_defaults2"],
- bazel_module: { bp2build_available: true },
-}
-`,
- expectedBazelTarget: `genrule(
- name = "gen",
- cmd = "cp $(SRCS) $(OUTS)",
- outs = ["out"],
- srcs = ["in1"],
-)`,
- description: "genrule applies properties from list of genrule_defaults",
- },
- {
- moduleTypesUnderTest: map[string]android.ModuleFactory{
- "genrule": genrule.GenRuleFactory,
- "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
- },
- bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
- "genrule": genrule.GenruleBp2Build,
- },
- bp: `genrule_defaults {
- name: "gen_defaults1",
- defaults: ["gen_defaults2"],
- cmd: "cmd1 $(in) $(out)", // overrides gen_defaults2's cmd property value.
-}
-
-genrule_defaults {
- name: "gen_defaults2",
- defaults: ["gen_defaults3"],
- cmd: "cmd2 $(in) $(out)",
- out: ["out-from-2"],
- srcs: ["in1"],
-}
-
-genrule_defaults {
- name: "gen_defaults3",
- out: ["out-from-3"],
- srcs: ["srcs-from-3"],
-}
-
-genrule {
- name: "gen",
- out: ["out"],
- defaults: ["gen_defaults1"],
- bazel_module: { bp2build_available: true },
-}
-`,
- expectedBazelTarget: `genrule(
- name = "gen",
- cmd = "cmd1 $(SRCS) $(OUTS)",
- outs = [
- "out-from-3",
- "out-from-2",
- "out",
- ],
- srcs = [
- "srcs-from-3",
- "in1",
- ],
-)`,
- description: "genrule applies properties from genrule_defaults transitively",
- },
- }
-
- dir := "."
- for _, testCase := range testCases {
- config := android.TestConfig(buildDir, nil, testCase.bp, nil)
- ctx := android.NewTestContext(config)
- for m, factory := range testCase.moduleTypesUnderTest {
- ctx.RegisterModuleType(m, factory)
- }
- for mutator, f := range testCase.bp2buildMutatorsUnderTest {
- ctx.RegisterBp2BuildMutator(mutator, f)
- }
- ctx.RegisterForBazelConversion()
-
- _, errs := ctx.ParseFileList(dir, []string{"Android.bp"})
- android.FailIfErrored(t, errs)
- _, errs = ctx.ResolveDependencies(config)
- android.FailIfErrored(t, errs)
-
- codegenCtx := NewCodegenContext(config, *ctx.Context, Bp2Build)
- bazelTargets := generateBazelTargetsForDir(codegenCtx, dir)
- if actualCount := len(bazelTargets); actualCount != 1 {
- t.Fatalf("%s: Expected 1 bazel target, got %d", testCase.description, actualCount)
- }
-
- actualBazelTarget := bazelTargets[0]
- if actualBazelTarget.content != testCase.expectedBazelTarget {
- t.Errorf(
- "%s: Expected generated Bazel target to be '%s', got '%s'",
- testCase.description,
- testCase.expectedBazelTarget,
- actualBazelTarget.content,
- )
- }
- }
-}
-
func TestAllowlistingBp2buildTargetsExplicitly(t *testing.T) {
testCases := []struct {
moduleTypeUnderTest string
@@ -1353,30 +936,20 @@
}
func TestCombineBuildFilesBp2buildTargets(t *testing.T) {
- testCases := []struct {
- description string
- moduleTypeUnderTest string
- moduleTypeUnderTestFactory android.ModuleFactory
- moduleTypeUnderTestBp2BuildMutator func(android.TopDownMutatorContext)
- preArchMutators []android.RegisterMutatorFunc
- bp string
- expectedBazelTargets []string
- fs map[string]string
- dir string
- }{
+ testCases := []bp2buildTestCase{
{
description: "filegroup bazel_module.label",
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
bazel_module: { label: "//other:fg_foo" },
}`,
expectedBazelTargets: []string{
`// BUILD file`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/BUILD.bazel": `// BUILD file`,
},
},
@@ -1385,7 +958,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
bazel_module: { label: "//other:fg_foo" },
}
@@ -1397,7 +970,7 @@
expectedBazelTargets: []string{
`// BUILD file`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/BUILD.bazel": `// BUILD file`,
},
},
@@ -1407,8 +980,8 @@
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
dir: "other",
- bp: ``,
- fs: map[string]string{
+ blueprint: ``,
+ filesystem: map[string]string{
"other/Android.bp": `filegroup {
name: "fg_foo",
bazel_module: {
@@ -1434,7 +1007,7 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
bazel_module: {
label: "//other:fg_foo",
@@ -1453,7 +1026,7 @@
)`,
`// BUILD file`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"other/BUILD.bazel": `// BUILD file`,
},
},
@@ -1466,24 +1039,24 @@
toParse := []string{
"Android.bp",
}
- for f, content := range testCase.fs {
+ for f, content := range testCase.filesystem {
if strings.HasSuffix(f, "Android.bp") {
toParse = append(toParse, f)
}
fs[f] = []byte(content)
}
- config := android.TestConfig(buildDir, nil, testCase.bp, fs)
+ config := android.TestConfig(buildDir, nil, testCase.blueprint, fs)
ctx := android.NewTestContext(config)
ctx.RegisterModuleType(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestFactory)
ctx.RegisterBp2BuildMutator(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestBp2BuildMutator)
ctx.RegisterForBazelConversion()
_, errs := ctx.ParseFileList(dir, toParse)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
return
}
_, errs = ctx.ResolveDependencies(config)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
return
}
@@ -1517,22 +1090,13 @@
}
func TestGlobExcludeSrcs(t *testing.T) {
- testCases := []struct {
- description string
- moduleTypeUnderTest string
- moduleTypeUnderTestFactory android.ModuleFactory
- moduleTypeUnderTestBp2BuildMutator func(android.TopDownMutatorContext)
- bp string
- expectedBazelTargets []string
- fs map[string]string
- dir string
- }{
+ testCases := []bp2buildTestCase{
{
description: "filegroup top level exclude_srcs",
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: `filegroup {
+ blueprint: `filegroup {
name: "fg_foo",
srcs: ["**/*.txt"],
exclude_srcs: ["c.txt"],
@@ -1548,7 +1112,7 @@
],
)`,
},
- fs: map[string]string{
+ filesystem: map[string]string{
"a.txt": "",
"b.txt": "",
"c.txt": "",
@@ -1562,9 +1126,9 @@
moduleTypeUnderTest: "filegroup",
moduleTypeUnderTestFactory: android.FileGroupFactory,
moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
- bp: "",
+ blueprint: "",
dir: "dir",
- fs: map[string]string{
+ filesystem: map[string]string{
"dir/Android.bp": `filegroup {
name: "fg_foo",
srcs: ["**/*.txt"],
@@ -1596,24 +1160,24 @@
toParse := []string{
"Android.bp",
}
- for f, content := range testCase.fs {
+ for f, content := range testCase.filesystem {
if strings.HasSuffix(f, "Android.bp") {
toParse = append(toParse, f)
}
fs[f] = []byte(content)
}
- config := android.TestConfig(buildDir, nil, testCase.bp, fs)
+ config := android.TestConfig(buildDir, nil, testCase.blueprint, fs)
ctx := android.NewTestContext(config)
ctx.RegisterModuleType(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestFactory)
ctx.RegisterBp2BuildMutator(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestBp2BuildMutator)
ctx.RegisterForBazelConversion()
_, errs := ctx.ParseFileList(dir, toParse)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
continue
}
_, errs = ctx.ResolveDependencies(config)
- if errored(t, testCase.description, errs) {
+ if errored(t, testCase, errs) {
continue
}
diff --git a/bp2build/cc_library_conversion_test.go b/bp2build/cc_library_conversion_test.go
index bff192f..c840016 100644
--- a/bp2build/cc_library_conversion_test.go
+++ b/bp2build/cc_library_conversion_test.go
@@ -15,10 +15,10 @@
package bp2build
import (
+ "testing"
+
"android/soong/android"
"android/soong/cc"
- "strings"
- "testing"
)
const (
@@ -54,59 +54,6 @@
ctx.RegisterModuleType("cc_library_headers", cc.LibraryHeaderFactory)
}
-func runBp2BuildTestCase(t *testing.T, registerModuleTypes func(ctx android.RegistrationContext), tc bp2buildTestCase) {
- t.Helper()
- dir := "."
- filesystem := make(map[string][]byte)
- toParse := []string{
- "Android.bp",
- }
- for f, content := range tc.filesystem {
- if strings.HasSuffix(f, "Android.bp") {
- toParse = append(toParse, f)
- }
- filesystem[f] = []byte(content)
- }
- config := android.TestConfig(buildDir, nil, tc.blueprint, filesystem)
- ctx := android.NewTestContext(config)
-
- registerModuleTypes(ctx)
- ctx.RegisterModuleType(tc.moduleTypeUnderTest, tc.moduleTypeUnderTestFactory)
- ctx.RegisterBp2BuildConfig(bp2buildConfig)
- ctx.RegisterBp2BuildMutator(tc.moduleTypeUnderTest, tc.moduleTypeUnderTestBp2BuildMutator)
- ctx.RegisterForBazelConversion()
-
- _, errs := ctx.ParseFileList(dir, toParse)
- if errored(t, tc.description, errs) {
- return
- }
- _, errs = ctx.ResolveDependencies(config)
- if errored(t, tc.description, errs) {
- return
- }
-
- checkDir := dir
- if tc.dir != "" {
- checkDir = tc.dir
- }
- codegenCtx := NewCodegenContext(config, *ctx.Context, Bp2Build)
- bazelTargets := generateBazelTargetsForDir(codegenCtx, checkDir)
- if actualCount, expectedCount := len(bazelTargets), len(tc.expectedBazelTargets); actualCount != expectedCount {
- t.Errorf("%s: Expected %d bazel target, got %d", tc.description, expectedCount, actualCount)
- } else {
- for i, target := range bazelTargets {
- if w, g := tc.expectedBazelTargets[i], target.content; w != g {
- t.Errorf(
- "%s: Expected generated Bazel target to be '%s', got '%s'",
- tc.description,
- w,
- g,
- )
- }
- }
- }
-}
-
func TestCcLibrarySimple(t *testing.T) {
runCcLibraryTestCase(t, bp2buildTestCase{
description: "cc_library - simple example",
diff --git a/bp2build/cc_library_headers_conversion_test.go b/bp2build/cc_library_headers_conversion_test.go
index 712d0bd..ea2c10a 100644
--- a/bp2build/cc_library_headers_conversion_test.go
+++ b/bp2build/cc_library_headers_conversion_test.go
@@ -40,17 +40,6 @@
}`
)
-type bp2buildTestCase struct {
- description string
- moduleTypeUnderTest string
- moduleTypeUnderTestFactory android.ModuleFactory
- moduleTypeUnderTestBp2BuildMutator func(android.TopDownMutatorContext)
- blueprint string
- expectedBazelTargets []string
- filesystem map[string]string
- dir string
-}
-
func TestCcLibraryHeadersLoadStatement(t *testing.T) {
testCases := []struct {
bazelTargets BazelTargets
diff --git a/bp2build/filegroup_conversion_test.go b/bp2build/filegroup_conversion_test.go
new file mode 100644
index 0000000..ad99236
--- /dev/null
+++ b/bp2build/filegroup_conversion_test.go
@@ -0,0 +1,62 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bp2build
+
+import (
+ "android/soong/android"
+ "fmt"
+
+ "testing"
+)
+
+func runFilegroupTestCase(t *testing.T, tc bp2buildTestCase) {
+ t.Helper()
+ runBp2BuildTestCase(t, registerFilegroupModuleTypes, tc)
+}
+
+func registerFilegroupModuleTypes(ctx android.RegistrationContext) {}
+
+func TestFilegroupSameNameAsFile_OneFile(t *testing.T) {
+ runFilegroupTestCase(t, bp2buildTestCase{
+ description: "filegroup - same name as file, with one file",
+ moduleTypeUnderTest: "filegroup",
+ moduleTypeUnderTestFactory: android.FileGroupFactory,
+ moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
+ filesystem: map[string]string{},
+ blueprint: `
+filegroup {
+ name: "foo",
+ srcs: ["foo"],
+}
+`,
+ expectedBazelTargets: []string{}})
+}
+
+func TestFilegroupSameNameAsFile_MultipleFiles(t *testing.T) {
+ runFilegroupTestCase(t, bp2buildTestCase{
+ description: "filegroup - same name as file, with multiple files",
+ moduleTypeUnderTest: "filegroup",
+ moduleTypeUnderTestFactory: android.FileGroupFactory,
+ moduleTypeUnderTestBp2BuildMutator: android.FilegroupBp2Build,
+ filesystem: map[string]string{},
+ blueprint: `
+filegroup {
+ name: "foo",
+ srcs: ["foo", "bar"],
+}
+`,
+ expectedErr: fmt.Errorf("filegroup 'foo' cannot contain a file with the same name"),
+ })
+}
diff --git a/bp2build/genrule_conversion_test.go b/bp2build/genrule_conversion_test.go
new file mode 100644
index 0000000..a991180
--- /dev/null
+++ b/bp2build/genrule_conversion_test.go
@@ -0,0 +1,479 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bp2build
+
+import (
+ "android/soong/android"
+ "android/soong/genrule"
+ "strings"
+ "testing"
+)
+
+func TestGenruleBp2Build(t *testing.T) {
+ otherGenruleBp := map[string]string{
+ "other/Android.bp": `genrule {
+ name: "foo.tool",
+ out: ["foo_tool.out"],
+ srcs: ["foo_tool.in"],
+ cmd: "cp $(in) $(out)",
+}
+genrule {
+ name: "other.tool",
+ out: ["other_tool.out"],
+ srcs: ["other_tool.in"],
+ cmd: "cp $(in) $(out)",
+}`,
+ }
+
+ testCases := []bp2buildTestCase{
+ {
+ description: "genrule with command line variable replacements",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo.tool",
+ out: ["foo_tool.out"],
+ srcs: ["foo_tool.in"],
+ cmd: "cp $(in) $(out)",
+ bazel_module: { bp2build_available: true },
+}
+
+genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ tools: [":foo.tool"],
+ cmd: "$(location :foo.tool) --genDir=$(genDir) arg $(in) $(out)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{
+ `genrule(
+ name = "foo",
+ cmd = "$(location :foo.tool) --genDir=$(GENDIR) arg $(SRCS) $(OUTS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+ tools = [":foo.tool"],
+)`,
+ `genrule(
+ name = "foo.tool",
+ cmd = "cp $(SRCS) $(OUTS)",
+ outs = ["foo_tool.out"],
+ srcs = ["foo_tool.in"],
+)`,
+ },
+ },
+ {
+ description: "genrule using $(locations :label)",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo.tools",
+ out: ["foo_tool.out", "foo_tool2.out"],
+ srcs: ["foo_tool.in"],
+ cmd: "cp $(in) $(out)",
+ bazel_module: { bp2build_available: true },
+}
+
+genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ tools: [":foo.tools"],
+ cmd: "$(locations :foo.tools) -s $(out) $(in)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "$(locations :foo.tools) -s $(OUTS) $(SRCS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+ tools = [":foo.tools"],
+)`,
+ `genrule(
+ name = "foo.tools",
+ cmd = "cp $(SRCS) $(OUTS)",
+ outs = [
+ "foo_tool.out",
+ "foo_tool2.out",
+ ],
+ srcs = ["foo_tool.in"],
+)`,
+ },
+ },
+ {
+ description: "genrule using $(locations //absolute:label)",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ tool_files: [":foo.tool"],
+ cmd: "$(locations :foo.tool) -s $(out) $(in)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "$(locations //other:foo.tool) -s $(OUTS) $(SRCS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+ tools = ["//other:foo.tool"],
+)`,
+ },
+ filesystem: otherGenruleBp,
+ },
+ {
+ description: "genrule srcs using $(locations //absolute:label)",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: [":other.tool"],
+ tool_files: [":foo.tool"],
+ cmd: "$(locations :foo.tool) -s $(out) $(location :other.tool)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "$(locations //other:foo.tool) -s $(OUTS) $(location //other:other.tool)",
+ outs = ["foo.out"],
+ srcs = ["//other:other.tool"],
+ tools = ["//other:foo.tool"],
+)`,
+ },
+ filesystem: otherGenruleBp,
+ },
+ {
+ description: "genrule using $(location) label should substitute first tool label automatically",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ tool_files: [":foo.tool", ":other.tool"],
+ cmd: "$(location) -s $(out) $(in)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "$(location //other:foo.tool) -s $(OUTS) $(SRCS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+ tools = [
+ "//other:foo.tool",
+ "//other:other.tool",
+ ],
+)`,
+ },
+ filesystem: otherGenruleBp,
+ },
+ {
+ description: "genrule using $(locations) label should substitute first tool label automatically",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ tools: [":foo.tool", ":other.tool"],
+ cmd: "$(locations) -s $(out) $(in)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "$(locations //other:foo.tool) -s $(OUTS) $(SRCS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+ tools = [
+ "//other:foo.tool",
+ "//other:other.tool",
+ ],
+)`,
+ },
+ filesystem: otherGenruleBp,
+ },
+ {
+ description: "genrule without tools or tool_files can convert successfully",
+ moduleTypeUnderTest: "genrule",
+ moduleTypeUnderTestFactory: genrule.GenRuleFactory,
+ moduleTypeUnderTestBp2BuildMutator: genrule.GenruleBp2Build,
+ blueprint: `genrule {
+ name: "foo",
+ out: ["foo.out"],
+ srcs: ["foo.in"],
+ cmd: "cp $(in) $(out)",
+ bazel_module: { bp2build_available: true },
+}`,
+ expectedBazelTargets: []string{`genrule(
+ name = "foo",
+ cmd = "cp $(SRCS) $(OUTS)",
+ outs = ["foo.out"],
+ srcs = ["foo.in"],
+)`,
+ },
+ },
+ }
+
+ dir := "."
+ for _, testCase := range testCases {
+ fs := make(map[string][]byte)
+ toParse := []string{
+ "Android.bp",
+ }
+ for f, content := range testCase.filesystem {
+ if strings.HasSuffix(f, "Android.bp") {
+ toParse = append(toParse, f)
+ }
+ fs[f] = []byte(content)
+ }
+ config := android.TestConfig(buildDir, nil, testCase.blueprint, fs)
+ ctx := android.NewTestContext(config)
+ ctx.RegisterModuleType(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestFactory)
+ ctx.RegisterBp2BuildMutator(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestBp2BuildMutator)
+ ctx.RegisterForBazelConversion()
+
+ _, errs := ctx.ParseFileList(dir, toParse)
+ if errored(t, testCase, errs) {
+ continue
+ }
+ _, errs = ctx.ResolveDependencies(config)
+ if errored(t, testCase, errs) {
+ continue
+ }
+
+ checkDir := dir
+ if testCase.dir != "" {
+ checkDir = testCase.dir
+ }
+
+ codegenCtx := NewCodegenContext(config, *ctx.Context, Bp2Build)
+ bazelTargets := generateBazelTargetsForDir(codegenCtx, checkDir)
+ if actualCount, expectedCount := len(bazelTargets), len(testCase.expectedBazelTargets); actualCount != expectedCount {
+ t.Errorf("%s: Expected %d bazel target, got %d", testCase.description, expectedCount, actualCount)
+ } else {
+ for i, target := range bazelTargets {
+ if w, g := testCase.expectedBazelTargets[i], target.content; w != g {
+ t.Errorf(
+ "%s: Expected generated Bazel target to be '%s', got '%s'",
+ testCase.description,
+ w,
+ g,
+ )
+ }
+ }
+ }
+ }
+}
+
+func TestBp2BuildInlinesDefaults(t *testing.T) {
+ testCases := []struct {
+ moduleTypesUnderTest map[string]android.ModuleFactory
+ bp2buildMutatorsUnderTest map[string]bp2buildMutator
+ bp string
+ expectedBazelTarget string
+ description string
+ }{
+ {
+ moduleTypesUnderTest: map[string]android.ModuleFactory{
+ "genrule": genrule.GenRuleFactory,
+ "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
+ },
+ bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
+ "genrule": genrule.GenruleBp2Build,
+ },
+ bp: `genrule_defaults {
+ name: "gen_defaults",
+ cmd: "do-something $(in) $(out)",
+}
+genrule {
+ name: "gen",
+ out: ["out"],
+ srcs: ["in1"],
+ defaults: ["gen_defaults"],
+ bazel_module: { bp2build_available: true },
+}
+`,
+ expectedBazelTarget: `genrule(
+ name = "gen",
+ cmd = "do-something $(SRCS) $(OUTS)",
+ outs = ["out"],
+ srcs = ["in1"],
+)`,
+ description: "genrule applies properties from a genrule_defaults dependency if not specified",
+ },
+ {
+ moduleTypesUnderTest: map[string]android.ModuleFactory{
+ "genrule": genrule.GenRuleFactory,
+ "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
+ },
+ bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
+ "genrule": genrule.GenruleBp2Build,
+ },
+ bp: `genrule_defaults {
+ name: "gen_defaults",
+ out: ["out-from-defaults"],
+ srcs: ["in-from-defaults"],
+ cmd: "cmd-from-defaults",
+}
+genrule {
+ name: "gen",
+ out: ["out"],
+ srcs: ["in1"],
+ defaults: ["gen_defaults"],
+ cmd: "do-something $(in) $(out)",
+ bazel_module: { bp2build_available: true },
+}
+`,
+ expectedBazelTarget: `genrule(
+ name = "gen",
+ cmd = "do-something $(SRCS) $(OUTS)",
+ outs = [
+ "out-from-defaults",
+ "out",
+ ],
+ srcs = [
+ "in-from-defaults",
+ "in1",
+ ],
+)`,
+ description: "genrule does merges properties from a genrule_defaults dependency, latest-first",
+ },
+ {
+ moduleTypesUnderTest: map[string]android.ModuleFactory{
+ "genrule": genrule.GenRuleFactory,
+ "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
+ },
+ bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
+ "genrule": genrule.GenruleBp2Build,
+ },
+ bp: `genrule_defaults {
+ name: "gen_defaults1",
+ cmd: "cp $(in) $(out)",
+}
+
+genrule_defaults {
+ name: "gen_defaults2",
+ srcs: ["in1"],
+}
+
+genrule {
+ name: "gen",
+ out: ["out"],
+ defaults: ["gen_defaults1", "gen_defaults2"],
+ bazel_module: { bp2build_available: true },
+}
+`,
+ expectedBazelTarget: `genrule(
+ name = "gen",
+ cmd = "cp $(SRCS) $(OUTS)",
+ outs = ["out"],
+ srcs = ["in1"],
+)`,
+ description: "genrule applies properties from list of genrule_defaults",
+ },
+ {
+ moduleTypesUnderTest: map[string]android.ModuleFactory{
+ "genrule": genrule.GenRuleFactory,
+ "genrule_defaults": func() android.Module { return genrule.DefaultsFactory() },
+ },
+ bp2buildMutatorsUnderTest: map[string]bp2buildMutator{
+ "genrule": genrule.GenruleBp2Build,
+ },
+ bp: `genrule_defaults {
+ name: "gen_defaults1",
+ defaults: ["gen_defaults2"],
+ cmd: "cmd1 $(in) $(out)", // overrides gen_defaults2's cmd property value.
+}
+
+genrule_defaults {
+ name: "gen_defaults2",
+ defaults: ["gen_defaults3"],
+ cmd: "cmd2 $(in) $(out)",
+ out: ["out-from-2"],
+ srcs: ["in1"],
+}
+
+genrule_defaults {
+ name: "gen_defaults3",
+ out: ["out-from-3"],
+ srcs: ["srcs-from-3"],
+}
+
+genrule {
+ name: "gen",
+ out: ["out"],
+ defaults: ["gen_defaults1"],
+ bazel_module: { bp2build_available: true },
+}
+`,
+ expectedBazelTarget: `genrule(
+ name = "gen",
+ cmd = "cmd1 $(SRCS) $(OUTS)",
+ outs = [
+ "out-from-3",
+ "out-from-2",
+ "out",
+ ],
+ srcs = [
+ "srcs-from-3",
+ "in1",
+ ],
+)`,
+ description: "genrule applies properties from genrule_defaults transitively",
+ },
+ }
+
+ dir := "."
+ for _, testCase := range testCases {
+ config := android.TestConfig(buildDir, nil, testCase.bp, nil)
+ ctx := android.NewTestContext(config)
+ for m, factory := range testCase.moduleTypesUnderTest {
+ ctx.RegisterModuleType(m, factory)
+ }
+ for mutator, f := range testCase.bp2buildMutatorsUnderTest {
+ ctx.RegisterBp2BuildMutator(mutator, f)
+ }
+ ctx.RegisterForBazelConversion()
+
+ _, errs := ctx.ParseFileList(dir, []string{"Android.bp"})
+ android.FailIfErrored(t, errs)
+ _, errs = ctx.ResolveDependencies(config)
+ android.FailIfErrored(t, errs)
+
+ codegenCtx := NewCodegenContext(config, *ctx.Context, Bp2Build)
+ bazelTargets := generateBazelTargetsForDir(codegenCtx, dir)
+ if actualCount := len(bazelTargets); actualCount != 1 {
+ t.Fatalf("%s: Expected 1 bazel target, got %d", testCase.description, actualCount)
+ }
+
+ actualBazelTarget := bazelTargets[0]
+ if actualBazelTarget.content != testCase.expectedBazelTarget {
+ t.Errorf(
+ "%s: Expected generated Bazel target to be '%s', got '%s'",
+ testCase.description,
+ testCase.expectedBazelTarget,
+ actualBazelTarget.content,
+ )
+ }
+ }
+}
diff --git a/bp2build/python_binary_conversion_test.go b/bp2build/python_binary_conversion_test.go
index 7bedf71..6f6fc11 100644
--- a/bp2build/python_binary_conversion_test.go
+++ b/bp2build/python_binary_conversion_test.go
@@ -7,13 +7,15 @@
"android/soong/python"
)
-func runPythonTestCase(t *testing.T, tc bp2buildTestCase) {
- t.Helper()
- runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {}, tc)
+func runBp2BuildTestCaseWithLibs(t *testing.T, tc bp2buildTestCase) {
+ runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {
+ ctx.RegisterModuleType("python_library", python.PythonLibraryFactory)
+ ctx.RegisterModuleType("python_library_host", python.PythonLibraryHostFactory)
+ }, tc)
}
func TestPythonBinaryHostSimple(t *testing.T) {
- runPythonTestCase(t, bp2buildTestCase{
+ runBp2BuildTestCaseWithLibs(t, bp2buildTestCase{
description: "simple python_binary_host converts to a native py_binary",
moduleTypeUnderTest: "python_binary_host",
moduleTypeUnderTestFactory: python.PythonBinaryHostFactory,
@@ -31,12 +33,18 @@
srcs: ["**/*.py"],
exclude_srcs: ["b/e.py"],
data: ["files/data.txt",],
+ libs: ["bar"],
bazel_module: { bp2build_available: true },
}
-`,
+ python_library_host {
+ name: "bar",
+ srcs: ["b/e.py"],
+ bazel_module: { bp2build_available: true },
+ }`,
expectedBazelTargets: []string{`py_binary(
name = "foo",
data = ["files/data.txt"],
+ deps = [":bar"],
main = "a.py",
srcs = [
"a.py",
@@ -49,7 +57,7 @@
}
func TestPythonBinaryHostPy2(t *testing.T) {
- runPythonTestCase(t, bp2buildTestCase{
+ runBp2BuildTestCaseSimple(t, bp2buildTestCase{
description: "py2 python_binary_host",
moduleTypeUnderTest: "python_binary_host",
moduleTypeUnderTestFactory: python.PythonBinaryHostFactory,
@@ -79,7 +87,7 @@
}
func TestPythonBinaryHostPy3(t *testing.T) {
- runPythonTestCase(t, bp2buildTestCase{
+ runBp2BuildTestCaseSimple(t, bp2buildTestCase{
description: "py3 python_binary_host",
moduleTypeUnderTest: "python_binary_host",
moduleTypeUnderTestFactory: python.PythonBinaryHostFactory,
diff --git a/bp2build/python_library_conversion_test.go b/bp2build/python_library_conversion_test.go
new file mode 100644
index 0000000..b6f45e5
--- /dev/null
+++ b/bp2build/python_library_conversion_test.go
@@ -0,0 +1,156 @@
+package bp2build
+
+import (
+ "fmt"
+ "testing"
+
+ "android/soong/android"
+ "android/soong/python"
+)
+
+// TODO(alexmarquez): Should be lifted into a generic Bp2Build file
+type PythonLibBp2Build func(ctx android.TopDownMutatorContext)
+
+func TestPythonLibrary(t *testing.T) {
+ testPythonLib(t, "python_library",
+ python.PythonLibraryFactory, python.PythonLibraryBp2Build,
+ func(ctx android.RegistrationContext) {})
+}
+
+func TestPythonLibraryHost(t *testing.T) {
+ testPythonLib(t, "python_library_host",
+ python.PythonLibraryHostFactory, python.PythonLibraryHostBp2Build,
+ func(ctx android.RegistrationContext) {
+ ctx.RegisterModuleType("python_library", python.PythonLibraryFactory)
+ })
+}
+
+func testPythonLib(t *testing.T, modType string,
+ factory android.ModuleFactory, mutator PythonLibBp2Build,
+ registration func(ctx android.RegistrationContext)) {
+ t.Helper()
+ // Simple
+ runBp2BuildTestCase(t, registration, bp2buildTestCase{
+ description: fmt.Sprintf("simple %s converts to a native py_library", modType),
+ moduleTypeUnderTest: modType,
+ moduleTypeUnderTestFactory: factory,
+ moduleTypeUnderTestBp2BuildMutator: mutator,
+ filesystem: map[string]string{
+ "a.py": "",
+ "b/c.py": "",
+ "b/d.py": "",
+ "b/e.py": "",
+ "files/data.txt": "",
+ },
+ blueprint: fmt.Sprintf(`%s {
+ name: "foo",
+ srcs: ["**/*.py"],
+ exclude_srcs: ["b/e.py"],
+ data: ["files/data.txt",],
+ libs: ["bar"],
+ bazel_module: { bp2build_available: true },
+}
+ python_library {
+ name: "bar",
+ srcs: ["b/e.py"],
+ bazel_module: { bp2build_available: false },
+ }`, modType),
+ expectedBazelTargets: []string{`py_library(
+ name = "foo",
+ data = ["files/data.txt"],
+ deps = [":bar"],
+ srcs = [
+ "a.py",
+ "b/c.py",
+ "b/d.py",
+ ],
+ srcs_version = "PY3",
+)`,
+ },
+ })
+
+ // PY2
+ runBp2BuildTestCaseSimple(t, bp2buildTestCase{
+ description: fmt.Sprintf("py2 %s converts to a native py_library", modType),
+ moduleTypeUnderTest: modType,
+ moduleTypeUnderTestFactory: factory,
+ moduleTypeUnderTestBp2BuildMutator: mutator,
+ blueprint: fmt.Sprintf(`%s {
+ name: "foo",
+ srcs: ["a.py"],
+ version: {
+ py2: {
+ enabled: true,
+ },
+ py3: {
+ enabled: false,
+ },
+ },
+
+ bazel_module: { bp2build_available: true },
+}`, modType),
+ expectedBazelTargets: []string{`py_library(
+ name = "foo",
+ srcs = ["a.py"],
+ srcs_version = "PY2",
+)`,
+ },
+ })
+
+ // PY3
+ runBp2BuildTestCaseSimple(t, bp2buildTestCase{
+ description: fmt.Sprintf("py3 %s converts to a native py_library", modType),
+ moduleTypeUnderTest: modType,
+ moduleTypeUnderTestFactory: factory,
+ moduleTypeUnderTestBp2BuildMutator: mutator,
+ blueprint: fmt.Sprintf(`%s {
+ name: "foo",
+ srcs: ["a.py"],
+ version: {
+ py2: {
+ enabled: false,
+ },
+ py3: {
+ enabled: true,
+ },
+ },
+
+ bazel_module: { bp2build_available: true },
+}`, modType),
+ expectedBazelTargets: []string{`py_library(
+ name = "foo",
+ srcs = ["a.py"],
+ srcs_version = "PY3",
+)`,
+ },
+ })
+
+ // Both
+ runBp2BuildTestCaseSimple(t, bp2buildTestCase{
+ description: fmt.Sprintf("py2&3 %s converts to a native py_library", modType),
+ moduleTypeUnderTest: modType,
+ moduleTypeUnderTestFactory: factory,
+ moduleTypeUnderTestBp2BuildMutator: mutator,
+ blueprint: fmt.Sprintf(`%s {
+ name: "foo",
+ srcs: ["a.py"],
+ version: {
+ py2: {
+ enabled: true,
+ },
+ py3: {
+ enabled: true,
+ },
+ },
+
+ bazel_module: { bp2build_available: true },
+}`, modType),
+ expectedBazelTargets: []string{
+ // srcs_version is PY2ANDPY3 by default.
+ `py_library(
+ name = "foo",
+ srcs = ["a.py"],
+)`,
+ },
+ })
+}
diff --git a/bp2build/testing.go b/bp2build/testing.go
index 266b817..3ebe63d 100644
--- a/bp2build/testing.go
+++ b/bp2build/testing.go
@@ -1,6 +1,26 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
package bp2build
+/*
+For shareable/common bp2build testing functionality and dumping ground for
+specific-but-shared functionality among tests in package
+*/
+
import (
+ "strings"
"testing"
"android/soong/android"
@@ -16,6 +36,114 @@
buildDir string
)
+func checkError(t *testing.T, errs []error, expectedErr error) bool {
+ t.Helper()
+
+ // expectedErr is not nil, find it in the list of errors
+ if len(errs) != 1 {
+ t.Errorf("Expected only 1 error, got %d: %q", len(errs), errs)
+ }
+ if errs[0].Error() == expectedErr.Error() {
+ return true
+ }
+
+ return false
+}
+
+func errored(t *testing.T, tc bp2buildTestCase, errs []error) bool {
+ t.Helper()
+ if tc.expectedErr != nil {
+ // Rely on checkErrors, as this test case is expected to have an error.
+ return false
+ }
+
+ if len(errs) > 0 {
+ for _, err := range errs {
+ t.Errorf("%s: %s", tc.description, err)
+ }
+ return true
+ }
+
+ // All good, continue execution.
+ return false
+}
+
+func runBp2BuildTestCaseSimple(t *testing.T, tc bp2buildTestCase) {
+ t.Helper()
+ runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {}, tc)
+}
+
+type bp2buildTestCase struct {
+ description string
+ moduleTypeUnderTest string
+ moduleTypeUnderTestFactory android.ModuleFactory
+ moduleTypeUnderTestBp2BuildMutator func(android.TopDownMutatorContext)
+ blueprint string
+ expectedBazelTargets []string
+ filesystem map[string]string
+ dir string
+ expectedErr error
+}
+
+func runBp2BuildTestCase(t *testing.T, registerModuleTypes func(ctx android.RegistrationContext), tc bp2buildTestCase) {
+ t.Helper()
+ dir := "."
+ filesystem := make(map[string][]byte)
+ toParse := []string{
+ "Android.bp",
+ }
+ for f, content := range tc.filesystem {
+ if strings.HasSuffix(f, "Android.bp") {
+ toParse = append(toParse, f)
+ }
+ filesystem[f] = []byte(content)
+ }
+ config := android.TestConfig(buildDir, nil, tc.blueprint, filesystem)
+ ctx := android.NewTestContext(config)
+
+ registerModuleTypes(ctx)
+ ctx.RegisterModuleType(tc.moduleTypeUnderTest, tc.moduleTypeUnderTestFactory)
+ ctx.RegisterBp2BuildConfig(bp2buildConfig)
+ ctx.RegisterBp2BuildMutator(tc.moduleTypeUnderTest, tc.moduleTypeUnderTestBp2BuildMutator)
+ ctx.RegisterForBazelConversion()
+
+ _, parseErrs := ctx.ParseFileList(dir, toParse)
+ if errored(t, tc, parseErrs) {
+ return
+ }
+ _, resolveDepsErrs := ctx.ResolveDependencies(config)
+ if errored(t, tc, resolveDepsErrs) {
+ return
+ }
+
+ errs := append(parseErrs, resolveDepsErrs...)
+ if tc.expectedErr != nil && checkError(t, errs, tc.expectedErr) {
+ return
+ }
+
+ checkDir := dir
+ if tc.dir != "" {
+ checkDir = tc.dir
+ }
+ codegenCtx := NewCodegenContext(config, *ctx.Context, Bp2Build)
+ bazelTargets := generateBazelTargetsForDir(codegenCtx, checkDir)
+ if actualCount, expectedCount := len(bazelTargets), len(tc.expectedBazelTargets); actualCount != expectedCount {
+ t.Errorf("%s: Expected %d bazel target, got %d; %v",
+ tc.description, expectedCount, actualCount, bazelTargets)
+ } else {
+ for i, target := range bazelTargets {
+ if w, g := tc.expectedBazelTargets[i], target.content; w != g {
+ t.Errorf(
+ "%s: Expected generated Bazel target to be '%s', got '%s'",
+ tc.description,
+ w,
+ g,
+ )
+ }
+ }
+ }
+}
+
type nestedProps struct {
Nested_prop string
}
@@ -44,17 +172,6 @@
props customProps
}
-func errored(t *testing.T, desc string, errs []error) bool {
- t.Helper()
- if len(errs) > 0 {
- for _, err := range errs {
- t.Errorf("%s: %s", desc, err)
- }
- return true
- }
- return false
-}
-
// OutputFiles is needed because some instances of this module use dist with a
// tag property which requires the module implements OutputFileProducer.
func (m *customModule) OutputFiles(tag string) (android.Paths, error) {
@@ -135,11 +252,6 @@
Arch_paths bazel.LabelListAttribute
}
-type customBazelModule struct {
- android.BazelTargetModuleBase
- customBazelModuleAttributes
-}
-
func customBp2BuildMutator(ctx android.TopDownMutatorContext) {
if m, ok := ctx.Module().(*customModule); ok {
if !m.ConvertWithBp2build(ctx) {
diff --git a/bpfix/cmd_lib/bpfix.go b/bpfix/cmd_lib/bpfix.go
index f90f65b..1106d4a 100644
--- a/bpfix/cmd_lib/bpfix.go
+++ b/bpfix/cmd_lib/bpfix.go
@@ -114,7 +114,7 @@
func makeFileVisitor(fixRequest bpfix.FixRequest) func(string, os.FileInfo, error) error {
return func(path string, f os.FileInfo, err error) error {
- if err == nil && (f.Name() == "Blueprints" || f.Name() == "Android.bp") {
+ if err == nil && f.Name() == "Android.bp" {
err = openAndProcess(path, os.Stdout, fixRequest)
}
if err != nil {
diff --git a/cc/androidmk.go b/cc/androidmk.go
index bda1006..cd52363 100644
--- a/cc/androidmk.go
+++ b/cc/androidmk.go
@@ -24,12 +24,12 @@
)
var (
- nativeBridgeSuffix = ".native_bridge"
- productSuffix = ".product"
+ NativeBridgeSuffix = ".native_bridge"
+ ProductSuffix = ".product"
VendorSuffix = ".vendor"
- ramdiskSuffix = ".ramdisk"
+ RamdiskSuffix = ".ramdisk"
VendorRamdiskSuffix = ".vendor_ramdisk"
- recoverySuffix = ".recovery"
+ RecoverySuffix = ".recovery"
sdkSuffix = ".sdk"
)
@@ -182,7 +182,7 @@
if ctx.Target().NativeBridge == android.NativeBridgeEnabled {
var result []string
for _, override := range overrides {
- result = append(result, override+nativeBridgeSuffix)
+ result = append(result, override+NativeBridgeSuffix)
}
return result
}
@@ -294,6 +294,9 @@
if library.buildStubs() {
entries.SetBool("LOCAL_NO_NOTICE_FILE", true)
}
+ if library.apiListCoverageXmlPath.String() != "" {
+ entries.SetString("SOONG_CC_API_XML", "$(SOONG_CC_API_XML) "+library.apiListCoverageXmlPath.String())
+ }
})
}
// If a library providing a stub is included in an APEX, the private APIs of the library
diff --git a/cc/binary.go b/cc/binary.go
index 763d2b9..b423c50 100644
--- a/cc/binary.go
+++ b/cc/binary.go
@@ -156,6 +156,10 @@
}
}
+ if binary.static() {
+ deps.StaticLibs = append(deps.StaticLibs, deps.SystemSharedLibs...)
+ }
+
if ctx.toolchain().Bionic() {
if binary.static() {
if ctx.selectedStl() == "libc++_static" {
@@ -208,7 +212,7 @@
func (binary *binaryDecorator) linkerInit(ctx BaseModuleContext) {
binary.baseLinker.linkerInit(ctx)
- if !ctx.toolchain().Bionic() {
+ if !ctx.toolchain().Bionic() && !ctx.toolchain().Musl() {
if ctx.Os() == android.Linux {
// Unless explicitly specified otherwise, host static binaries are built with -static
// if HostStaticBinaries is true for the product configuration.
diff --git a/cc/binary_sdk_member.go b/cc/binary_sdk_member.go
index ebf89ea..71e0cd8 100644
--- a/cc/binary_sdk_member.go
+++ b/cc/binary_sdk_member.go
@@ -38,16 +38,16 @@
android.SdkMemberTypeBase
}
-func (mt *binarySdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- targets := mctx.MultiTargets()
+func (mt *binarySdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ targets := ctx.MultiTargets()
for _, bin := range names {
for _, target := range targets {
variations := target.Variations()
- if mctx.Device() {
+ if ctx.Device() {
variations = append(variations,
blueprint.Variation{Mutator: "image", Variation: android.CoreVariation})
}
- mctx.AddFarVariationDependencies(variations, dependencyTag, bin)
+ ctx.AddFarVariationDependencies(variations, dependencyTag, bin)
}
}
}
diff --git a/cc/builder.go b/cc/builder.go
index 842ce85..a8219d7 100644
--- a/cc/builder.go
+++ b/cc/builder.go
@@ -205,11 +205,13 @@
Labels: map[string]string{"type": "lint", "tool": "clang-tidy", "lang": "cpp"},
ExecStrategy: "${config.REClangTidyExecStrategy}",
Inputs: []string{"$in"},
- // OutputFile here is $in for remote-execution since its possible that
- // clang-tidy modifies the given input file itself and $out refers to the
- // ".tidy" file generated for ninja-dependency reasons.
- OutputFiles: []string{"$in"},
- Platform: map[string]string{remoteexec.PoolKey: "${config.REClangTidyPool}"},
+ // Although clang-tidy has an option to "fix" source files, that feature is hardly useable
+ // under parallel compilation and RBE. So we assume no OutputFiles here.
+ // The clang-tidy fix option is best run locally in single thread.
+ // Copying source file back to local caused two problems:
+ // (1) New timestamps trigger clang and clang-tidy compilations again.
+ // (2) Changing source files caused concurrent clang or clang-tidy jobs to crash.
+ Platform: map[string]string{remoteexec.PoolKey: "${config.REClangTidyPool}"},
}, []string{"cFlags", "tidyFlags"}, []string{})
_ = pctx.SourcePathVariable("yasmCmd", "prebuilts/misc/${config.HostPrebuiltTag}/yasm/yasm")
@@ -384,9 +386,6 @@
systemIncludeFlags string
- // True if static libraries should be grouped (using `-Wl,--start-group` and `-Wl,--end-group`).
- groupStaticLibs bool
-
proto android.ProtoFlags
protoC bool // If true, compile protos as `.c` files. Otherwise, output as `.cc`.
protoOptionsFile bool // If true, output a proto options file.
@@ -634,6 +633,7 @@
rule = clangTidyRE
}
+ ctx.TidyFile(tidyFile)
ctx.Build(pctx, android.BuildParams{
Rule: rule,
Description: "clang-tidy " + srcFile.Rel(),
@@ -646,7 +646,7 @@
OrderOnly: pathDeps,
Args: map[string]string{
"cFlags": moduleToolingFlags,
- "tidyFlags": flags.tidyFlags,
+ "tidyFlags": config.TidyFlagsForSrcFile(srcFile, flags.tidyFlags),
},
})
}
@@ -752,13 +752,7 @@
}
}
- if flags.groupStaticLibs && !ctx.Darwin() && len(staticLibs) > 0 {
- libFlagsList = append(libFlagsList, "-Wl,--start-group")
- }
libFlagsList = append(libFlagsList, staticLibs.Strings()...)
- if flags.groupStaticLibs && !ctx.Darwin() && len(staticLibs) > 0 {
- libFlagsList = append(libFlagsList, "-Wl,--end-group")
- }
if groupLate && !ctx.Darwin() && len(lateStaticLibs) > 0 {
libFlagsList = append(libFlagsList, "-Wl,--start-group")
diff --git a/cc/cc.go b/cc/cc.go
index f65af30..b0c0299 100644
--- a/cc/cc.go
+++ b/cc/cc.go
@@ -217,8 +217,6 @@
// True if .s files should be processed with the c preprocessor.
AssemblerWithCpp bool
- // True if static libraries should be grouped (using `-Wl,--start-group` and `-Wl,--end-group`).
- GroupStaticLibs bool
proto android.ProtoFlags
protoC bool // Whether to use C instead of C++
@@ -822,6 +820,16 @@
}
func (c *Module) AddJSONData(d *map[string]interface{}) {
+ var hasAidl, hasLex, hasProto, hasRenderscript, hasSysprop, hasWinMsg, hasYacc bool
+ if b, ok := c.compiler.(*baseCompiler); ok {
+ hasAidl = b.hasSrcExt(".aidl")
+ hasLex = b.hasSrcExt(".l") || b.hasSrcExt(".ll")
+ hasProto = b.hasSrcExt(".proto")
+ hasRenderscript = b.hasSrcExt(".rscript") || b.hasSrcExt(".fs")
+ hasSysprop = b.hasSrcExt(".sysprop")
+ hasWinMsg = b.hasSrcExt(".mc")
+ hasYacc = b.hasSrcExt(".y") || b.hasSrcExt(".yy")
+ }
c.AndroidModuleBase().AddJSONData(d)
(*d)["Cc"] = map[string]interface{}{
"SdkVersion": c.SdkVersion(),
@@ -858,6 +866,14 @@
"IsVendorPublicLibrary": c.IsVendorPublicLibrary(),
"ApexSdkVersion": c.apexSdkVersion,
"TestFor": c.TestFor(),
+ "AidlSrcs": hasAidl,
+ "LexSrcs": hasLex,
+ "ProtoSrcs": hasProto,
+ "RenderscriptSrcs": hasRenderscript,
+ "SyspropSrcs": hasSysprop,
+ "WinMsgSrcs": hasWinMsg,
+ "YaccSrsc": hasYacc,
+ "OnlyCSrcs": !(hasAidl || hasLex || hasProto || hasRenderscript || hasSysprop || hasWinMsg || hasYacc),
}
}
@@ -957,16 +973,17 @@
return String(c.Properties.Min_sdk_version)
}
-func (c *Module) SplitPerApiLevel() bool {
- if !c.canUseSdk() {
- return false
- }
+func (c *Module) isCrt() bool {
if linker, ok := c.linker.(*objectLinker); ok {
return linker.isCrt()
}
return false
}
+func (c *Module) SplitPerApiLevel() bool {
+ return c.canUseSdk() && c.isCrt()
+}
+
func (c *Module) AlwaysSdk() bool {
return c.Properties.AlwaysSdk || Bool(c.Properties.Sdk_variant_only)
}
@@ -1428,12 +1445,20 @@
// create versioned variants for. For example, if min_sdk_version is 16, then sdk variant of
// the crt object has local variants of 16, 17, ..., up to the latest version. sdk_version
// and min_sdk_version properties of the variants are set to the corresponding version
- // numbers. However, the platform (non-sdk) variant of the crt object is left untouched.
- // min_sdk_version: 16 doesn't actually mean that the platform variant has to support such
- // an old version. Since the variant is for the platform, it's preferred to target the
- // latest version.
- if ctx.mod.SplitPerApiLevel() && !ctx.isSdkVariant() {
- ver = strconv.Itoa(android.FutureApiLevelInt)
+ // numbers. However, the non-sdk variant (for apex or platform) of the crt object is left
+ // untouched. min_sdk_version: 16 doesn't actually mean that the non-sdk variant has to
+ // support such an old version. The version is set to the later version in case when the
+ // non-sdk variant is for the platform, or the min_sdk_version of the containing APEX if
+ // it's for an APEX.
+ if ctx.mod.isCrt() && !ctx.isSdkVariant() {
+ if ctx.isForPlatform() {
+ ver = strconv.Itoa(android.FutureApiLevelInt)
+ } else { // for apex
+ ver = ctx.apexSdkVersion().String()
+ if ver == "" { // in case when min_sdk_version was not set by the APEX
+ ver = ctx.sdkVersion()
+ }
+ }
}
// Also make sure that minSdkVersion is not greater than sdkVersion, if they are both numbers
@@ -1632,7 +1657,7 @@
return ""
}
vndkVersion = ctx.DeviceConfig().ProductVndkVersion()
- nameSuffix = productSuffix
+ nameSuffix = ProductSuffix
} else {
vndkVersion = ctx.DeviceConfig().VndkVersion()
nameSuffix = VendorSuffix
@@ -1652,7 +1677,7 @@
c.Properties.SubName = ""
if c.Target().NativeBridge == android.NativeBridgeEnabled {
- c.Properties.SubName += nativeBridgeSuffix
+ c.Properties.SubName += NativeBridgeSuffix
}
llndk := c.IsLlndk()
@@ -1668,11 +1693,11 @@
// such suffixes are already hard-coded in prebuilts/vndk/.../Android.bp.
c.Properties.SubName += VendorSuffix
} else if c.InRamdisk() && !c.OnlyInRamdisk() {
- c.Properties.SubName += ramdiskSuffix
+ c.Properties.SubName += RamdiskSuffix
} else if c.InVendorRamdisk() && !c.OnlyInVendorRamdisk() {
c.Properties.SubName += VendorRamdiskSuffix
} else if c.InRecovery() && !c.OnlyInRecovery() {
- c.Properties.SubName += recoverySuffix
+ c.Properties.SubName += RecoverySuffix
} else if c.IsSdkVariant() && (c.Properties.SdkAndPlatformVariantVisibleToMake || c.SplitPerApiLevel()) {
c.Properties.SubName += sdkSuffix
if c.SplitPerApiLevel() {
@@ -3029,13 +3054,13 @@
// core module, so update the dependency name here accordingly.
return libName + ccDep.SubName()
} else if ccDep.InRamdisk() && !ccDep.OnlyInRamdisk() {
- return libName + ramdiskSuffix
+ return libName + RamdiskSuffix
} else if ccDep.InVendorRamdisk() && !ccDep.OnlyInVendorRamdisk() {
return libName + VendorRamdiskSuffix
} else if ccDep.InRecovery() && !ccDep.OnlyInRecovery() {
- return libName + recoverySuffix
+ return libName + RecoverySuffix
} else if ccDep.Target().NativeBridge == android.NativeBridgeEnabled {
- return libName + nativeBridgeSuffix
+ return libName + NativeBridgeSuffix
} else {
return libName
}
diff --git a/cc/cmakelists.go b/cc/cmakelists.go
index 04536fc..ad130ba 100644
--- a/cc/cmakelists.go
+++ b/cc/cmakelists.go
@@ -316,7 +316,7 @@
if strings.HasPrefix(parameter, "--sysroot") {
return systemRoot
}
- if strings.HasPrefix(parameter, "-fsanitize-blacklist") {
+ if strings.HasPrefix(parameter, "-fsanitize-ignorelist") {
return relativeFilePathFlag
}
if strings.HasPrefix(parameter, "-fprofile-sample-use") {
diff --git a/cc/config/OWNERS b/cc/config/OWNERS
index 701db92..580f215 100644
--- a/cc/config/OWNERS
+++ b/cc/config/OWNERS
@@ -1,3 +1,3 @@
per-file vndk.go = smoreland@google.com, victoryang@google.com
-per-file clang.go,global.go = srhines@google.com, chh@google.com, pirama@google.com, yikong@google.com
+per-file clang.go,global.go,tidy.go = srhines@google.com, chh@google.com, pirama@google.com, yikong@google.com
diff --git a/cc/config/tidy.go b/cc/config/tidy.go
index c4563e2..8682502 100644
--- a/cc/config/tidy.go
+++ b/cc/config/tidy.go
@@ -106,6 +106,7 @@
const tidyDefault = "${config.TidyDefaultGlobalChecks}"
const tidyExternalVendor = "${config.TidyExternalVendorChecks}"
+const tidyDefaultNoAnalyzer = "${config.TidyDefaultGlobalChecks},-clang-analyzer-*"
// This is a map of local path prefixes to the set of default clang-tidy checks
// to be used.
@@ -114,6 +115,7 @@
{"external/", tidyExternalVendor},
{"external/google", tidyDefault},
{"external/webrtc", tidyDefault},
+ {"external/googletest/", tidyExternalVendor},
{"frameworks/compile/mclinker/", tidyExternalVendor},
{"hardware/qcom", tidyExternalVendor},
{"vendor/", tidyExternalVendor},
@@ -132,6 +134,7 @@
}
func TidyChecksForDir(dir string) string {
+ dir = dir + "/"
for _, pathCheck := range reversedDefaultLocalTidyChecks {
if strings.HasPrefix(dir, pathCheck.PathPrefix) {
return pathCheck.Checks
@@ -139,3 +142,17 @@
}
return tidyDefault
}
+
+func TidyFlagsForSrcFile(srcFile android.Path, flags string) string {
+ // Disable clang-analyzer-* checks globally for generated source files
+ // because some of them are too huge. Local .bp files can add wanted
+ // clang-analyzer checks through the tidy_checks property.
+ // Need to do this patch per source file, because some modules
+ // have both generated and organic source files.
+ if _, ok := srcFile.(android.WritablePath); ok {
+ if strings.Contains(flags, tidyDefault) {
+ return strings.ReplaceAll(flags, tidyDefault, tidyDefaultNoAnalyzer)
+ }
+ }
+ return flags
+}
diff --git a/cc/config/vndk.go b/cc/config/vndk.go
index 24e8fa4..8c678a1 100644
--- a/cc/config/vndk.go
+++ b/cc/config/vndk.go
@@ -53,6 +53,8 @@
"android.hardware.power.stats-V1-ndk_platform",
"android.hardware.power.stats-ndk_platform",
"android.hardware.power.stats-unstable-ndk_platform",
+ "android.hardware.radio-V1-ndk",
+ "android.hardware.radio-V1-ndk_platform",
"android.hardware.rebootescrow-ndk_platform",
"android.hardware.security.keymint-V1-ndk",
"android.hardware.security.keymint-V1-ndk_platform",
@@ -74,6 +76,8 @@
"android.hardware.weaver-ndk_platform",
"android.hardware.weaver-unstable-ndk_platform",
"android.system.keystore2-V1-ndk",
+ "android.hardware.wifi.hostapd-V1-ndk",
+ "android.hardware.wifi.hostapd-V1-ndk_platform",
"android.system.keystore2-V1-ndk_platform",
"android.system.keystore2-ndk_platform",
"android.system.keystore2-unstable-ndk_platform",
diff --git a/cc/config/x86_linux_host.go b/cc/config/x86_linux_host.go
index e7fcfed..ac5d5f7 100644
--- a/cc/config/x86_linux_host.go
+++ b/cc/config/x86_linux_host.go
@@ -45,6 +45,7 @@
linuxMuslCflags = []string{
"-D_LIBCPP_HAS_MUSL_LIBC",
+ "-DANDROID_HOST_MUSL",
"-nostdlibinc",
}
@@ -106,7 +107,7 @@
"util",
}, "-l")
- muslCrtBeginStaticBinary, muslCrtEndStaticBinary = []string{"libc_musl_crtbegin_static"}, []string{"crtend_android"}
+ muslCrtBeginStaticBinary, muslCrtEndStaticBinary = []string{"libc_musl_crtbegin_static"}, []string{"libc_musl_crtend"}
muslCrtBeginSharedBinary, muslCrtEndSharedBinary = []string{"libc_musl_crtbegin_dynamic", "musl_linker_script"}, []string{"libc_musl_crtend"}
muslCrtBeginSharedLibrary, muslCrtEndSharedLibrary = []string{"libc_musl_crtbegin_so"}, []string{"libc_musl_crtend_so"}
diff --git a/cc/coverage.go b/cc/coverage.go
index baf4226..8dd2db1 100644
--- a/cc/coverage.go
+++ b/cc/coverage.go
@@ -244,3 +244,19 @@
m[1].(Coverage).EnableCoverageIfNeeded()
}
}
+
+func parseSymbolFileForAPICoverage(ctx ModuleContext, symbolFile string) android.ModuleOutPath {
+ apiLevelsJson := android.GetApiLevelsJson(ctx)
+ symbolFilePath := android.PathForModuleSrc(ctx, symbolFile)
+ outputFile := ctx.baseModuleName() + ".xml"
+ parsedApiCoveragePath := android.PathForModuleOut(ctx, outputFile)
+ rule := android.NewRuleBuilder(pctx, ctx)
+ rule.Command().
+ BuiltTool("ndk_api_coverage_parser").
+ Input(symbolFilePath).
+ Output(parsedApiCoveragePath).
+ Implicit(apiLevelsJson).
+ FlagWithArg("--api-map ", apiLevelsJson.String())
+ rule.Build("native_library_api_list", "Generate native API list based on symbol files for coverage measurement")
+ return parsedApiCoveragePath
+}
diff --git a/cc/fuzz.go b/cc/fuzz.go
index fbef12b..83f0037 100644
--- a/cc/fuzz.go
+++ b/cc/fuzz.go
@@ -123,7 +123,7 @@
// that should be installed in the fuzz target output directories. This function
// returns true, unless:
// - The module is not an installable shared library, or
-// - The module is a header, stub, or vendor-linked library, or
+// - The module is a header or stub, or
// - The module is a prebuilt and its source is available, or
// - The module is a versioned member of an SDK snapshot.
func isValidSharedDependency(dependency android.Module) bool {
@@ -141,11 +141,6 @@
return false
}
- if linkable.UseVndk() {
- // Discard vendor linked libraries.
- return false
- }
-
if lib := moduleLibraryInterface(dependency); lib != nil && lib.buildStubs() && linkable.CcLibrary() {
// Discard stubs libs (only CCLibrary variants). Prebuilt libraries should not
// be excluded on the basis of they're not CCLibrary()'s.
diff --git a/cc/library.go b/cc/library.go
index 1526f81..92d9771 100644
--- a/cc/library.go
+++ b/cc/library.go
@@ -536,6 +536,8 @@
*baseInstaller
collectedSnapshotHeaders android.Paths
+
+ apiListCoverageXmlPath android.ModuleOutPath
}
type ccLibraryBazelHandler struct {
@@ -951,6 +953,12 @@
objs := compileStubLibrary(ctx, flags, nativeAbiResult.stubSrc)
library.versionScriptPath = android.OptionalPathForPath(
nativeAbiResult.versionScript)
+
+ // Parse symbol file to get API list for coverage
+ if library.stubsVersion() == "current" && ctx.PrimaryArch() {
+ library.apiListCoverageXmlPath = parseSymbolFileForAPICoverage(ctx, symbolFile)
+ }
+
return objs
}
@@ -2333,18 +2341,6 @@
Static staticOrSharedAttributes
}
-type bazelCcLibraryStatic struct {
- android.BazelTargetModuleBase
- bazelCcLibraryStaticAttributes
-}
-
-func BazelCcLibraryStaticFactory() android.Module {
- module := &bazelCcLibraryStatic{}
- module.AddProperties(&module.bazelCcLibraryStaticAttributes)
- android.InitBazelTargetModule(module)
- return module
-}
-
func ccLibraryStaticBp2BuildInternal(ctx android.TopDownMutatorContext, module *Module) {
compilerAttrs := bp2BuildParseCompilerProps(ctx, module)
linkerAttrs := bp2BuildParseLinkerProps(ctx, module)
@@ -2415,9 +2411,3 @@
ccLibraryStaticBp2BuildInternal(ctx, module)
}
-
-func (m *bazelCcLibraryStatic) Name() string {
- return m.BaseModuleName()
-}
-
-func (m *bazelCcLibraryStatic) GenerateAndroidBuildActions(ctx android.ModuleContext) {}
diff --git a/cc/library_sdk_member.go b/cc/library_sdk_member.go
index 9010a1a..1866ff3 100644
--- a/cc/library_sdk_member.go
+++ b/cc/library_sdk_member.go
@@ -74,8 +74,8 @@
linkTypes []string
}
-func (mt *librarySdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- targets := mctx.MultiTargets()
+func (mt *librarySdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ targets := ctx.MultiTargets()
for _, lib := range names {
for _, target := range targets {
name, version := StubsLibNameAndVersion(lib)
@@ -83,21 +83,21 @@
version = "latest"
}
variations := target.Variations()
- if mctx.Device() {
+ if ctx.Device() {
variations = append(variations,
blueprint.Variation{Mutator: "image", Variation: android.CoreVariation})
}
if mt.linkTypes == nil {
- mctx.AddFarVariationDependencies(variations, dependencyTag, name)
+ ctx.AddFarVariationDependencies(variations, dependencyTag, name)
} else {
for _, linkType := range mt.linkTypes {
libVariations := append(variations,
blueprint.Variation{Mutator: "link", Variation: linkType})
- if mctx.Device() && linkType == "shared" {
+ if ctx.Device() && linkType == "shared" {
libVariations = append(libVariations,
blueprint.Variation{Mutator: "version", Variation: version})
}
- mctx.AddFarVariationDependencies(libVariations, dependencyTag, name)
+ ctx.AddFarVariationDependencies(libVariations, dependencyTag, name)
}
}
}
diff --git a/cc/linker.go b/cc/linker.go
index 7c710d7..0d612b5 100644
--- a/cc/linker.go
+++ b/cc/linker.go
@@ -86,8 +86,7 @@
// compiling crt or libc.
Nocrt *bool `android:"arch_variant"`
- // group static libraries. This can resolve missing symbols issues with interdependencies
- // between static libraries, but it is generally better to order them correctly instead.
+ // deprecated and ignored because lld makes it unnecessary. See b/189475744.
Group_static_libs *bool `android:"arch_variant"`
// list of modules that should be installed with this module. This is similar to 'required'
@@ -95,6 +94,9 @@
// vendor variants and this module uses VNDK.
Runtime_libs []string `android:"arch_variant"`
+ // list of runtime libs that should not be installed along with this module.
+ Exclude_runtime_libs []string `android:"arch_variant"`
+
Target struct {
Vendor, Product struct {
// list of shared libs that only should be used to build vendor or
@@ -121,8 +123,8 @@
// product variant of the C/C++ module.
Exclude_header_libs []string
- // list of runtime libs that should not be installed along with
- // vendor or variant of the C/C++ module.
+ // list of runtime libs that should not be installed along with the
+ // vendor or product variant of the C/C++ module.
Exclude_runtime_libs []string
// version script for vendor or product variant
@@ -148,6 +150,10 @@
// list of header libs that should not be used to build the recovery variant
// of the C/C++ module.
Exclude_header_libs []string
+
+ // list of runtime libs that should not be installed along with the
+ // recovery variant of the C/C++ module.
+ Exclude_runtime_libs []string
}
Ramdisk struct {
// list of static libs that only should be used to build the recovery
@@ -161,6 +167,10 @@
// list of static libs that should not be used to build
// the ramdisk variant of the C/C++ module.
Exclude_static_libs []string
+
+ // list of runtime libs that should not be installed along with the
+ // ramdisk variant of the C/C++ module.
+ Exclude_runtime_libs []string
}
Vendor_ramdisk struct {
// list of shared libs that should not be used to build
@@ -170,6 +180,10 @@
// list of static libs that should not be used to build
// the vendor ramdisk variant of the C/C++ module.
Exclude_static_libs []string
+
+ // list of runtime libs that should not be installed along with the
+ // vendor ramdisk variant of the C/C++ module.
+ Exclude_runtime_libs []string
}
Platform struct {
// list of shared libs that should be use to build the platform variant
@@ -191,7 +205,7 @@
// the C/C++ module.
Exclude_shared_libs []string
- // list of static libs that should not be used to build the apex ramdisk
+ // list of static libs that should not be used to build the apex
// variant of the C/C++ module.
Exclude_static_libs []string
}
@@ -275,6 +289,7 @@
deps.SharedLibs = removeListFromList(deps.SharedLibs, linker.Properties.Exclude_shared_libs)
deps.StaticLibs = removeListFromList(deps.StaticLibs, linker.Properties.Exclude_static_libs)
deps.WholeStaticLibs = removeListFromList(deps.WholeStaticLibs, linker.Properties.Exclude_static_libs)
+ deps.RuntimeLibs = removeListFromList(deps.RuntimeLibs, linker.Properties.Exclude_runtime_libs)
// Record the libraries that need to be excluded when building for APEX. Unlike other
// target.*.exclude_* properties, SharedLibs and StaticLibs are not modified here because
@@ -325,6 +340,7 @@
deps.ReexportHeaderLibHeaders = removeListFromList(deps.ReexportHeaderLibHeaders, linker.Properties.Target.Recovery.Exclude_header_libs)
deps.ReexportStaticLibHeaders = removeListFromList(deps.ReexportStaticLibHeaders, linker.Properties.Target.Recovery.Exclude_static_libs)
deps.WholeStaticLibs = removeListFromList(deps.WholeStaticLibs, linker.Properties.Target.Recovery.Exclude_static_libs)
+ deps.RuntimeLibs = removeListFromList(deps.RuntimeLibs, linker.Properties.Target.Recovery.Exclude_runtime_libs)
}
if ctx.inRamdisk() {
@@ -334,6 +350,7 @@
deps.StaticLibs = removeListFromList(deps.StaticLibs, linker.Properties.Target.Ramdisk.Exclude_static_libs)
deps.ReexportStaticLibHeaders = removeListFromList(deps.ReexportStaticLibHeaders, linker.Properties.Target.Ramdisk.Exclude_static_libs)
deps.WholeStaticLibs = removeListFromList(deps.WholeStaticLibs, linker.Properties.Target.Ramdisk.Exclude_static_libs)
+ deps.RuntimeLibs = removeListFromList(deps.RuntimeLibs, linker.Properties.Target.Ramdisk.Exclude_runtime_libs)
}
if ctx.inVendorRamdisk() {
@@ -342,6 +359,7 @@
deps.StaticLibs = removeListFromList(deps.StaticLibs, linker.Properties.Target.Vendor_ramdisk.Exclude_static_libs)
deps.ReexportStaticLibHeaders = removeListFromList(deps.ReexportStaticLibHeaders, linker.Properties.Target.Vendor_ramdisk.Exclude_static_libs)
deps.WholeStaticLibs = removeListFromList(deps.WholeStaticLibs, linker.Properties.Target.Vendor_ramdisk.Exclude_static_libs)
+ deps.RuntimeLibs = removeListFromList(deps.RuntimeLibs, linker.Properties.Target.Vendor_ramdisk.Exclude_runtime_libs)
}
if !ctx.useSdk() {
@@ -524,10 +542,6 @@
flags.Global.LdFlags = append(flags.Global.LdFlags, toolchain.ToolchainLdflags())
- if Bool(linker.Properties.Group_static_libs) {
- flags.GroupStaticLibs = true
- }
-
// Version_script is not needed when linking stubs lib where the version
// script is created from the symbol map file.
if !linker.dynamicProperties.BuildStubs {
diff --git a/cc/ndk_api_coverage_parser/__init__.py b/cc/ndk_api_coverage_parser/__init__.py
index 7817c78..8b9cd66 100755
--- a/cc/ndk_api_coverage_parser/__init__.py
+++ b/cc/ndk_api_coverage_parser/__init__.py
@@ -21,7 +21,12 @@
import sys
from xml.etree.ElementTree import Element, SubElement, tostring
-from symbolfile import ALL_ARCHITECTURES, FUTURE_API_LEVEL, MultiplyDefinedSymbolError, SymbolFileParser
+from symbolfile import (
+ ALL_ARCHITECTURES,
+ FUTURE_API_LEVEL,
+ MultiplyDefinedSymbolError,
+ SymbolFileParser,
+)
ROOT_ELEMENT_TAG = 'ndk-library'
@@ -63,6 +68,7 @@
class XmlGenerator(object):
"""Output generator that writes parsed symbol file to a xml file."""
+
def __init__(self, output_file):
self.output_file = output_file
@@ -74,10 +80,14 @@
continue
version_attributes = parse_tags(version.tags)
_, _, postfix = version.name.partition('_')
- is_platform = postfix == 'PRIVATE' or postfix == 'PLATFORM'
+ is_platform = postfix in ('PRIVATE' , 'PLATFORM')
is_deprecated = postfix == 'DEPRECATED'
- version_attributes.update({PLATFORM_ATTRIBUTE_KEY: str(is_platform)})
- version_attributes.update({DEPRECATED_ATTRIBUTE_KEY: str(is_deprecated)})
+ version_attributes.update(
+ {PLATFORM_ATTRIBUTE_KEY: str(is_platform)}
+ )
+ version_attributes.update(
+ {DEPRECATED_ATTRIBUTE_KEY: str(is_deprecated)}
+ )
for symbol in version.symbols:
if VARIABLE_TAG in symbol.tags:
continue
@@ -103,13 +113,20 @@
"""Parses and returns command line arguments."""
parser = argparse.ArgumentParser()
- parser.add_argument('symbol_file', type=os.path.realpath, help='Path to symbol file.')
parser.add_argument(
- 'output_file', type=os.path.realpath,
- help='The output parsed api coverage file.')
+ 'symbol_file', type=os.path.realpath, help='Path to symbol file.'
+ )
parser.add_argument(
- '--api-map', type=os.path.realpath, required=True,
- help='Path to the API level map JSON file.')
+ 'output_file',
+ type=os.path.realpath,
+ help='The output parsed api coverage file.',
+ )
+ parser.add_argument(
+ '--api-map',
+ type=os.path.realpath,
+ required=True,
+ help='Path to the API level map JSON file.',
+ )
return parser.parse_args()
@@ -122,13 +139,15 @@
with open(args.symbol_file) as symbol_file:
try:
- versions = SymbolFileParser(symbol_file, api_map, "", FUTURE_API_LEVEL,
- True, True).parse()
+ versions = SymbolFileParser(
+ symbol_file, api_map, "", FUTURE_API_LEVEL, True, True
+ ).parse()
except MultiplyDefinedSymbolError as ex:
sys.exit('{}: error: {}'.format(args.symbol_file, ex))
generator = XmlGenerator(args.output_file)
generator.write(versions)
+
if __name__ == '__main__':
main()
diff --git a/cc/ndk_api_coverage_parser/test_ndk_api_coverage_parser.py b/cc/ndk_api_coverage_parser/test_ndk_api_coverage_parser.py
index 3ec14c1..141059c 100644
--- a/cc/ndk_api_coverage_parser/test_ndk_api_coverage_parser.py
+++ b/cc/ndk_api_coverage_parser/test_ndk_api_coverage_parser.py
@@ -50,10 +50,12 @@
return False
return all(etree_equal(c1, c2) for c1, c2 in zip(elem1, elem2))
-
+# pylint: disable=line-too-long
class ApiCoverageSymbolFileParserTest(unittest.TestCase):
def test_parse(self):
- input_file = io.StringIO(textwrap.dedent(u"""\
+ input_file = io.StringIO(
+ textwrap.dedent(
+ u"""\
LIBLOG { # introduced-arm64=24 introduced-x86=24 introduced-x86_64=24
global:
android_name_to_log_id; # apex llndk introduced=23
@@ -64,22 +66,28 @@
local:
*;
};
-
+
LIBLOG_PLATFORM {
android_fdtrack; # llndk
android_net; # introduced=23
};
-
+
LIBLOG_FOO { # var
android_var;
};
- """))
- parser = SymbolFileParser(input_file, {}, "", FUTURE_API_LEVEL, True, True)
+ """
+ )
+ )
+ parser = SymbolFileParser(
+ input_file, {}, "", FUTURE_API_LEVEL, True, True
+ )
generator = nparser.XmlGenerator(io.StringIO())
result = generator.convertToXml(parser.parse())
- expected = fromstring('<ndk-library><symbol apex="True" arch="" introduced="23" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" llndk="True" name="android_name_to_log_id" /><symbol arch="arm" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" llndk="True" name="android_log_id_to_name" /><symbol arch="" introduced-arm64="24" introduced-x86="23" introduced-x86_64="24" is_deprecated="False" is_platform="False" name="__android_log_assert" /><symbol arch="" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" name="__android_log_buf_write" /><symbol arch="" is_deprecated="False" is_platform="True" llndk="True" name="android_fdtrack" /><symbol arch="" introduced="23" is_deprecated="False" is_platform="True" name="android_net" /></ndk-library>')
+ expected = fromstring(
+ '<ndk-library><symbol apex="True" arch="" introduced="23" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" llndk="True" name="android_name_to_log_id" /><symbol arch="arm" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" llndk="True" name="android_log_id_to_name" /><symbol arch="" introduced-arm64="24" introduced-x86="23" introduced-x86_64="24" is_deprecated="False" is_platform="False" name="__android_log_assert" /><symbol arch="" introduced-arm64="24" introduced-x86="24" introduced-x86_64="24" is_deprecated="False" is_platform="False" name="__android_log_buf_write" /><symbol arch="" is_deprecated="False" is_platform="True" llndk="True" name="android_fdtrack" /><symbol arch="" introduced="23" is_deprecated="False" is_platform="True" name="android_net" /></ndk-library>'
+ )
self.assertTrue(etree_equal(expected, result))
-
+# pylint: enable=line-too-long
def main():
suite = unittest.TestLoader().loadTestsFromName(__name__)
diff --git a/cc/ndk_library.go b/cc/ndk_library.go
index 63e8261..704b03a 100644
--- a/cc/ndk_library.go
+++ b/cc/ndk_library.go
@@ -29,7 +29,6 @@
func init() {
pctx.HostBinToolVariable("ndkStubGenerator", "ndkstubgen")
- pctx.HostBinToolVariable("ndk_api_coverage_parser", "ndk_api_coverage_parser")
pctx.HostBinToolVariable("abidiff", "abidiff")
pctx.HostBinToolVariable("abitidy", "abitidy")
pctx.HostBinToolVariable("abidw", "abidw")
@@ -43,20 +42,20 @@
CommandDeps: []string{"$ndkStubGenerator"},
}, "arch", "apiLevel", "apiMap", "flags")
- parseNdkApiRule = pctx.AndroidStaticRule("parseNdkApiRule",
- blueprint.RuleParams{
- Command: "$ndk_api_coverage_parser $in $out --api-map $apiMap",
- CommandDeps: []string{"$ndk_api_coverage_parser"},
- }, "apiMap")
-
abidw = pctx.AndroidStaticRule("abidw",
blueprint.RuleParams{
Command: "$abidw --type-id-style hash --no-corpus-path " +
- "--no-show-locs --no-comp-dir-path -w $symbolList $in | " +
- "$abitidy --all -o $out",
- CommandDeps: []string{"$abitidy", "$abidw"},
+ "--no-show-locs --no-comp-dir-path -w $symbolList " +
+ "$in --out-file $out",
+ CommandDeps: []string{"$abidw"},
}, "symbolList")
+ abitidy = pctx.AndroidStaticRule("abitidy",
+ blueprint.RuleParams{
+ Command: "$abitidy --all -i $in -o $out",
+ CommandDeps: []string{"$abitidy"},
+ })
+
abidiff = pctx.AndroidStaticRule("abidiff",
blueprint.RuleParams{
// Need to create *some* output for ninja. We don't want to use tee
@@ -276,24 +275,6 @@
android.Paths{src}, nil, nil)
}
-func parseSymbolFileForCoverage(ctx ModuleContext, symbolFile string) android.ModuleOutPath {
- apiLevelsJson := android.GetApiLevelsJson(ctx)
- symbolFilePath := android.PathForModuleSrc(ctx, symbolFile)
- outputFileName := strings.Split(symbolFilePath.Base(), ".")[0]
- parsedApiCoveragePath := android.PathForModuleOut(ctx, outputFileName+".xml")
- ctx.Build(pctx, android.BuildParams{
- Rule: parseNdkApiRule,
- Description: "parse ndk api symbol file for api coverage: " + symbolFilePath.Rel(),
- Outputs: []android.WritablePath{parsedApiCoveragePath},
- Input: symbolFilePath,
- Implicits: []android.Path{apiLevelsJson},
- Args: map[string]string{
- "apiMap": apiLevelsJson.String(),
- },
- })
- return parsedApiCoveragePath
-}
-
func (this *stubDecorator) findImplementationLibrary(ctx ModuleContext) android.Path {
dep := ctx.GetDirectDepWithTag(strings.TrimSuffix(ctx.ModuleName(), ndkLibrarySuffix),
stubImplementation)
@@ -338,19 +319,28 @@
func (this *stubDecorator) dumpAbi(ctx ModuleContext, symbolList android.Path) {
implementationLibrary := this.findImplementationLibrary(ctx)
- this.abiDumpPath = getNdkAbiDumpInstallBase(ctx).Join(ctx,
+ abiRawPath := getNdkAbiDumpInstallBase(ctx).Join(ctx,
this.apiLevel.String(), ctx.Arch().ArchType.String(),
- this.libraryName(ctx), "abi.xml")
+ this.libraryName(ctx), "abi.raw.xml")
ctx.Build(pctx, android.BuildParams{
Rule: abidw,
Description: fmt.Sprintf("abidw %s", implementationLibrary),
- Output: this.abiDumpPath,
Input: implementationLibrary,
+ Output: abiRawPath,
Implicit: symbolList,
Args: map[string]string{
"symbolList": symbolList.String(),
},
})
+ this.abiDumpPath = getNdkAbiDumpInstallBase(ctx).Join(ctx,
+ this.apiLevel.String(), ctx.Arch().ArchType.String(),
+ this.libraryName(ctx), "abi.xml")
+ ctx.Build(pctx, android.BuildParams{
+ Rule: abitidy,
+ Description: fmt.Sprintf("abitidy %s", implementationLibrary),
+ Input: abiRawPath,
+ Output: this.abiDumpPath,
+ })
}
func findNextApiLevel(ctx ModuleContext, apiLevel android.ApiLevel) *android.ApiLevel {
@@ -454,7 +444,7 @@
}
}
if c.apiLevel.IsCurrent() && ctx.PrimaryArch() {
- c.parsedCoverageXmlPath = parseSymbolFileForCoverage(ctx, symbolFile)
+ c.parsedCoverageXmlPath = parseSymbolFileForAPICoverage(ctx, symbolFile)
}
return objs
}
diff --git a/cc/object_test.go b/cc/object_test.go
index 0e5508a..259a892 100644
--- a/cc/object_test.go
+++ b/cc/object_test.go
@@ -15,8 +15,9 @@
package cc
import (
- "android/soong/android"
"testing"
+
+ "android/soong/android"
)
func TestMinSdkVersionsOfCrtObjects(t *testing.T) {
@@ -27,24 +28,23 @@
crt: true,
stl: "none",
min_sdk_version: "28",
-
+ vendor_available: true,
}`)
- arch := "android_arm64_armv8-a"
- for _, v := range []string{"", "28", "29", "30", "current"} {
- var variant string
- // platform variant
- if v == "" {
- variant = arch
- } else {
- variant = arch + "_sdk_" + v
- }
- cflags := ctx.ModuleForTests("crt_foo", variant).Rule("cc").Args["cFlags"]
- vNum := v
- if v == "current" || v == "" {
- vNum = "10000"
- }
- expected := "-target aarch64-linux-android" + vNum + " "
+ variants := []struct {
+ variant string
+ num string
+ }{
+ {"android_arm64_armv8-a", "10000"},
+ {"android_arm64_armv8-a_sdk_28", "28"},
+ {"android_arm64_armv8-a_sdk_29", "29"},
+ {"android_arm64_armv8-a_sdk_30", "30"},
+ {"android_arm64_armv8-a_sdk_current", "10000"},
+ {"android_vendor.29_arm64_armv8-a", "29"},
+ }
+ for _, v := range variants {
+ cflags := ctx.ModuleForTests("crt_foo", v.variant).Rule("cc").Args["cFlags"]
+ expected := "-target aarch64-linux-android" + v.num + " "
android.AssertStringDoesContain(t, "cflag", cflags, expected)
}
}
diff --git a/cc/sanitize.go b/cc/sanitize.go
index dd15ae1..f6a9d5b 100644
--- a/cc/sanitize.go
+++ b/cc/sanitize.go
@@ -41,7 +41,6 @@
hwasanCflags = []string{"-fno-omit-frame-pointer", "-Wno-frame-larger-than=",
"-fsanitize-hwaddress-abi=platform",
- "-fno-experimental-new-pass-manager",
// The following improves debug location information
// availability at the cost of its accuracy. It increases
// the likelihood of a stack variable's frame offset
@@ -57,7 +56,7 @@
}
cfiCflags = []string{"-flto", "-fsanitize-cfi-cross-dso",
- "-fsanitize-blacklist=external/compiler-rt/lib/cfi/cfi_blocklist.txt"}
+ "-fsanitize-ignorelist=external/compiler-rt/lib/cfi/cfi_blocklist.txt"}
// -flto and -fvisibility are required by clang when -fsanitize=cfi is
// used, but have no effect on assembly files
cfiAsflags = []string{"-flto", "-fvisibility=default"}
@@ -65,7 +64,7 @@
"-Wl,-plugin-opt,O1"}
cfiExportsMapPath = "build/soong/cc/config/cfi_exports.map"
- intOverflowCflags = []string{"-fsanitize-blacklist=build/soong/cc/config/integer_overflow_blocklist.txt"}
+ intOverflowCflags = []string{"-fsanitize-ignorelist=build/soong/cc/config/integer_overflow_blocklist.txt"}
minimalRuntimeFlags = []string{"-fsanitize-minimal-runtime", "-fno-sanitize-trap=integer,undefined",
"-fno-sanitize-recover=integer,undefined"}
@@ -261,7 +260,7 @@
// the first one
Recover []string
- // value to pass to -fsanitize-blacklist
+ // value to pass to -fsanitize-ignorelist
Blocklist *string
}
@@ -757,7 +756,7 @@
blocklist := android.OptionalPathForModuleSrc(ctx, sanitize.Properties.Sanitize.Blocklist)
if blocklist.Valid() {
- flags.Local.CFlags = append(flags.Local.CFlags, "-fsanitize-blacklist="+blocklist.String())
+ flags.Local.CFlags = append(flags.Local.CFlags, "-fsanitize-ignorelist="+blocklist.String())
flags.CFlagsDeps = append(flags.CFlagsDeps, blocklist.Path())
}
diff --git a/cc/snapshot_prebuilt.go b/cc/snapshot_prebuilt.go
index 9672c0f..9570664 100644
--- a/cc/snapshot_prebuilt.go
+++ b/cc/snapshot_prebuilt.go
@@ -61,7 +61,7 @@
}
func (recoverySnapshotImage) moduleNameSuffix() string {
- return recoverySuffix
+ return RecoverySuffix
}
// Override existing vendor and recovery snapshot for cc module specific extra functions
diff --git a/cc/test_data_test.go b/cc/test_data_test.go
index 426dfc5..a621166 100644
--- a/cc/test_data_test.go
+++ b/cc/test_data_test.go
@@ -127,7 +127,7 @@
ctx.RegisterModuleType("test", newTest)
ctx.Register()
- _, errs := ctx.ParseBlueprintsFiles("Blueprints")
+ _, errs := ctx.ParseBlueprintsFiles("Android.bp")
android.FailIfErrored(t, errs)
_, errs = ctx.PrepareBuildActions(config)
android.FailIfErrored(t, errs)
diff --git a/cc/testing.go b/cc/testing.go
index 071f1ec..d0dca6b 100644
--- a/cc/testing.go
+++ b/cc/testing.go
@@ -375,26 +375,42 @@
cc_object {
name: "crtbegin_so",
defaults: ["crt_defaults"],
+ srcs: ["crtbegin_so.c"],
+ objs: ["crtbrand"],
}
cc_object {
name: "crtbegin_dynamic",
defaults: ["crt_defaults"],
+ srcs: ["crtbegin.c"],
+ objs: ["crtbrand"],
}
cc_object {
name: "crtbegin_static",
defaults: ["crt_defaults"],
+ srcs: ["crtbegin.c"],
+ objs: ["crtbrand"],
}
cc_object {
name: "crtend_so",
defaults: ["crt_defaults"],
+ srcs: ["crtend_so.c"],
+ objs: ["crtbrand"],
}
cc_object {
name: "crtend_android",
defaults: ["crt_defaults"],
+ srcs: ["crtend.c"],
+ objs: ["crtbrand"],
+ }
+
+ cc_object {
+ name: "crtbrand",
+ defaults: ["crt_defaults"],
+ srcs: ["crtbrand.c"],
}
cc_library {
@@ -585,6 +601,11 @@
"defaults/cc/common/libm.map.txt": nil,
"defaults/cc/common/ndk_libandroid_support": nil,
"defaults/cc/common/ndk_libc++_shared": nil,
+ "defaults/cc/common/crtbegin_so.c": nil,
+ "defaults/cc/common/crtbegin.c": nil,
+ "defaults/cc/common/crtend_so.c": nil,
+ "defaults/cc/common/crtend.c": nil,
+ "defaults/cc/common/crtbrand.c": nil,
}.AddToFixture(),
// Place the default cc test modules that are common to all platforms in a location that will not
diff --git a/cc/util.go b/cc/util.go
index 9bba876..88b0aba 100644
--- a/cc/util.go
+++ b/cc/util.go
@@ -91,7 +91,6 @@
systemIncludeFlags: strings.Join(in.SystemIncludeFlags, " "),
assemblerWithCpp: in.AssemblerWithCpp,
- groupStaticLibs: in.GroupStaticLibs,
proto: in.proto,
protoC: in.protoC,
diff --git a/cmd/multiproduct_kati/main.go b/cmd/multiproduct_kati/main.go
index 2846387..fa63b46 100644
--- a/cmd/multiproduct_kati/main.go
+++ b/cmd/multiproduct_kati/main.go
@@ -20,6 +20,7 @@
"flag"
"fmt"
"io"
+ "io/ioutil"
"log"
"os"
"os/exec"
@@ -76,6 +77,36 @@
return nil
}
+const errorLeadingLines = 20
+const errorTrailingLines = 20
+
+func errMsgFromLog(filename string) string {
+ if filename == "" {
+ return ""
+ }
+
+ data, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return ""
+ }
+
+ lines := strings.Split(strings.TrimSpace(string(data)), "\n")
+ if len(lines) > errorLeadingLines+errorTrailingLines+1 {
+ lines[errorLeadingLines] = fmt.Sprintf("... skipping %d lines ...",
+ len(lines)-errorLeadingLines-errorTrailingLines)
+
+ lines = append(lines[:errorLeadingLines+1],
+ lines[len(lines)-errorTrailingLines:]...)
+ }
+ var buf strings.Builder
+ for _, line := range lines {
+ buf.WriteString("> ")
+ buf.WriteString(line)
+ buf.WriteString("\n")
+ }
+ return buf.String()
+}
+
// TODO(b/70370883): This tool uses a lot of open files -- over the default
// soft limit of 1024 on some systems. So bump up to the hard limit until I fix
// the algorithm.
@@ -170,6 +201,23 @@
}
}
+func outDirBase() string {
+ outDirBase := os.Getenv("OUT_DIR")
+ if outDirBase == "" {
+ return "out"
+ } else {
+ return outDirBase
+ }
+}
+
+func distDir(outDir string) string {
+ if distDir := os.Getenv("DIST_DIR"); distDir != "" {
+ return filepath.Clean(distDir)
+ } else {
+ return filepath.Join(outDir, "dist")
+ }
+}
+
func main() {
stdio := terminal.StdioImpl{}
@@ -177,6 +225,12 @@
log := logger.New(output)
defer log.Cleanup()
+ for _, v := range os.Environ() {
+ log.Println("Environment: " + v)
+ }
+
+ log.Printf("Argv: %v\n", os.Args)
+
flag.Parse()
_, cancel := context.WithCancel(context.Background())
@@ -208,13 +262,7 @@
if !*incremental {
name += "-" + time.Now().Format("20060102150405")
}
-
- outDirBase := os.Getenv("OUT_DIR")
- if outDirBase == "" {
- outDirBase = "out"
- }
-
- outputDir = filepath.Join(outDirBase, name)
+ outputDir = filepath.Join(outDirBase(), name)
}
log.Println("Output directory:", outputDir)
@@ -231,11 +279,13 @@
var configLogsDir string
if *alternateResultDir {
- configLogsDir = filepath.Join(outputDir, "dist/logs")
+ configLogsDir = filepath.Join(distDir(outDirBase()), "logs")
} else {
configLogsDir = outputDir
}
+ log.Println("Logs dir: " + configLogsDir)
+
os.MkdirAll(configLogsDir, 0777)
log.SetOutput(filepath.Join(configLogsDir, "soong.log"))
trace.SetOutput(filepath.Join(configLogsDir, "build.trace"))
@@ -348,10 +398,11 @@
FileArgs: []zip.FileArg{
{GlobDir: logsDir, SourcePrefixToStrip: logsDir},
},
- OutputFilePath: filepath.Join(outputDir, "dist/logs.zip"),
+ OutputFilePath: filepath.Join(distDir(outDirBase()), "logs.zip"),
NumParallelJobs: runtime.NumCPU(),
CompressionLevel: 5,
}
+ log.Printf("Logs zip: %v\n", args.OutputFilePath)
if err := zip.Zip(args); err != nil {
log.Fatalf("Error zipping logs: %v", err)
}
@@ -424,10 +475,6 @@
args = append(args, "--soong-only")
}
- if *alternateResultDir {
- args = append(args, "dist")
- }
-
cmd := exec.Command(mpctx.SoongUi, args...)
cmd.Stdout = consoleLogWriter
cmd.Stderr = consoleLogWriter
@@ -439,6 +486,11 @@
"TARGET_BUILD_APPS=",
"TARGET_BUILD_UNBUNDLED=")
+ if *alternateResultDir {
+ cmd.Env = append(cmd.Env,
+ "DIST_DIR="+filepath.Join(distDir(outDirBase()), "products/"+product))
+ }
+
action := &status.Action{
Description: product,
Outputs: []string{product},
@@ -459,9 +511,17 @@
}
}
}
+ var errOutput string
+ if err == nil {
+ errOutput = ""
+ } else {
+ errOutput = errMsgFromLog(consoleLogPath)
+ }
+
mpctx.Status.FinishAction(status.ActionResult{
Action: action,
Error: err,
+ Output: errOutput,
})
}
diff --git a/cmd/soong_build/Android.bp b/cmd/soong_build/Android.bp
index 9f09224..703a875 100644
--- a/cmd/soong_build/Android.bp
+++ b/cmd/soong_build/Android.bp
@@ -16,7 +16,7 @@
default_applicable_licenses: ["Android-Apache-2.0"],
}
-bootstrap_go_binary {
+blueprint_go_binary {
name: "soong_build",
deps: [
"blueprint",
diff --git a/cmd/soong_build/main.go b/cmd/soong_build/main.go
index af935db..09a2234 100644
--- a/cmd/soong_build/main.go
+++ b/cmd/soong_build/main.go
@@ -28,6 +28,7 @@
"github.com/google/blueprint/bootstrap"
"github.com/google/blueprint/deptools"
+ "github.com/google/blueprint/pathtools"
"android/soong/android"
)
@@ -35,12 +36,18 @@
var (
topDir string
outDir string
+ soongOutDir string
availableEnvFile string
usedEnvFile string
+ runGoTests bool
+
+ globFile string
+ globListDir string
delveListen string
delvePath string
+ moduleGraphFile string
docFile string
bazelQueryViewDir string
bp2buildMarker string
@@ -51,33 +58,33 @@
func init() {
// Flags that make sense in every mode
flag.StringVar(&topDir, "top", "", "Top directory of the Android source tree")
- flag.StringVar(&outDir, "out", "", "Soong output directory (usually $TOP/out/soong)")
+ flag.StringVar(&soongOutDir, "soong_out", "", "Soong output directory (usually $TOP/out/soong)")
flag.StringVar(&availableEnvFile, "available_env", "", "File containing available environment variables")
flag.StringVar(&usedEnvFile, "used_env", "", "File containing used environment variables")
+ flag.StringVar(&globFile, "globFile", "build-globs.ninja", "the Ninja file of globs to output")
+ flag.StringVar(&globListDir, "globListDir", "", "the directory containing the glob list files")
+ flag.StringVar(&outDir, "out", "", "the ninja builddir directory")
+ flag.StringVar(&cmdlineArgs.ModuleListFile, "l", "", "file that lists filepaths to parse")
// Debug flags
flag.StringVar(&delveListen, "delve_listen", "", "Delve port to listen on for debugging")
flag.StringVar(&delvePath, "delve_path", "", "Path to Delve. Only used if --delve_listen is set")
-
- // Flags representing various modes soong_build can run in
- flag.StringVar(&docFile, "soong_docs", "", "build documentation file to output")
- flag.StringVar(&bazelQueryViewDir, "bazel_queryview_dir", "", "path to the bazel queryview directory relative to --top")
- flag.StringVar(&bp2buildMarker, "bp2build_marker", "", "If set, run bp2build, touch the specified marker file then exit")
-
- flag.StringVar(&cmdlineArgs.OutFile, "o", "build.ninja", "the Ninja file to output")
- flag.StringVar(&cmdlineArgs.GlobFile, "globFile", "build-globs.ninja", "the Ninja file of globs to output")
- flag.StringVar(&cmdlineArgs.GlobListDir, "globListDir", "", "the directory containing the glob list files")
- flag.StringVar(&cmdlineArgs.BuildDir, "b", ".", "the build output directory")
- flag.StringVar(&cmdlineArgs.NinjaBuildDir, "n", "", "the ninja builddir directory")
- flag.StringVar(&cmdlineArgs.DepFile, "d", "", "the dependency file to output")
flag.StringVar(&cmdlineArgs.Cpuprofile, "cpuprofile", "", "write cpu profile to file")
flag.StringVar(&cmdlineArgs.TraceFile, "trace", "", "write trace to file")
flag.StringVar(&cmdlineArgs.Memprofile, "memprofile", "", "write memory profile to file")
flag.BoolVar(&cmdlineArgs.NoGC, "nogc", false, "turn off GC for debugging")
- flag.BoolVar(&cmdlineArgs.RunGoTests, "t", false, "build and run go tests during bootstrap")
- flag.BoolVar(&cmdlineArgs.UseValidations, "use-validations", false, "use validations to depend on go tests")
- flag.StringVar(&cmdlineArgs.ModuleListFile, "l", "", "file that lists filepaths to parse")
+
+ // Flags representing various modes soong_build can run in
+ flag.StringVar(&moduleGraphFile, "module_graph_file", "", "JSON module graph file to output")
+ flag.StringVar(&docFile, "soong_docs", "", "build documentation file to output")
+ flag.StringVar(&bazelQueryViewDir, "bazel_queryview_dir", "", "path to the bazel queryview directory relative to --top")
+ flag.StringVar(&bp2buildMarker, "bp2build_marker", "", "If set, run bp2build, touch the specified marker file then exit")
+ flag.StringVar(&cmdlineArgs.OutFile, "o", "build.ninja", "the Ninja file to output")
flag.BoolVar(&cmdlineArgs.EmptyNinjaFile, "empty-ninja-file", false, "write out a 0-byte ninja file")
+
+ // Flags that probably shouldn't be flags of soong_build but we haven't found
+ // the time to remove them yet
+ flag.BoolVar(&runGoTests, "t", false, "build and run go tests during bootstrap")
}
func newNameResolver(config android.Config) *android.NameResolver {
@@ -96,19 +103,16 @@
return android.NewNameResolver(exportFilter)
}
-func newContext(configuration android.Config, prepareBuildActions bool) *android.Context {
+func newContext(configuration android.Config) *android.Context {
ctx := android.NewContext(configuration)
ctx.Register()
- if !prepareBuildActions {
- configuration.SetStopBefore(bootstrap.StopBeforePrepareBuildActions)
- }
ctx.SetNameInterface(newNameResolver(configuration))
ctx.SetAllowMissingDependencies(configuration.AllowMissingDependencies())
return ctx
}
-func newConfig(outDir string, availableEnv map[string]string) android.Config {
- configuration, err := android.NewConfig(outDir, cmdlineArgs.ModuleListFile, availableEnv)
+func newConfig(availableEnv map[string]string) android.Config {
+ configuration, err := android.NewConfig(cmdlineArgs.ModuleListFile, runGoTests, outDir, soongOutDir, availableEnv)
if err != nil {
fmt.Fprintf(os.Stderr, "%s", err)
os.Exit(1)
@@ -122,11 +126,7 @@
// TODO(cparsons): Don't output any ninja file, as the second pass will overwrite
// the incorrect results from the first pass, and file I/O is expensive.
func runMixedModeBuild(configuration android.Config, firstCtx *android.Context, extraNinjaDeps []string) {
- var firstArgs, secondArgs bootstrap.Args
-
- firstArgs = cmdlineArgs
- configuration.SetStopBefore(bootstrap.StopBeforeWriteNinja)
- bootstrap.RunBlueprint(firstArgs, firstCtx.Context, configuration)
+ bootstrap.RunBlueprint(cmdlineArgs, bootstrap.StopBeforeWriteNinja, firstCtx.Context, configuration)
// Invoke bazel commands and save results for second pass.
if err := configuration.BazelContext.InvokeBazel(); err != nil {
@@ -139,39 +139,30 @@
fmt.Fprintf(os.Stderr, "%s", err)
os.Exit(1)
}
- secondCtx := newContext(secondConfig, true)
- secondArgs = cmdlineArgs
- ninjaDeps := bootstrap.RunBlueprint(secondArgs, secondCtx.Context, secondConfig)
+ secondCtx := newContext(secondConfig)
+ ninjaDeps := bootstrap.RunBlueprint(cmdlineArgs, bootstrap.DoEverything, secondCtx.Context, secondConfig)
ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
- err = deptools.WriteDepFile(shared.JoinPath(topDir, secondArgs.DepFile), secondArgs.OutFile, ninjaDeps)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Error writing depfile '%s': %s\n", secondArgs.DepFile, err)
- os.Exit(1)
- }
+
+ globListFiles := writeBuildGlobsNinjaFile(secondCtx.SrcDir(), configuration.SoongOutDir(), secondCtx.Globs, configuration)
+ ninjaDeps = append(ninjaDeps, globListFiles...)
+
+ writeDepFile(cmdlineArgs.OutFile, ninjaDeps)
}
// Run the code-generation phase to convert BazelTargetModules to BUILD files.
-func runQueryView(configuration android.Config, ctx *android.Context) {
+func runQueryView(queryviewDir, queryviewMarker string, configuration android.Config, ctx *android.Context) {
codegenContext := bp2build.NewCodegenContext(configuration, *ctx, bp2build.QueryView)
- absoluteQueryViewDir := shared.JoinPath(topDir, bazelQueryViewDir)
+ absoluteQueryViewDir := shared.JoinPath(topDir, queryviewDir)
if err := createBazelQueryView(codegenContext, absoluteQueryViewDir); err != nil {
fmt.Fprintf(os.Stderr, "%s", err)
os.Exit(1)
}
-}
-func runSoongDocs(configuration android.Config) {
- ctx := newContext(configuration, false)
- soongDocsArgs := cmdlineArgs
- bootstrap.RunBlueprint(soongDocsArgs, ctx.Context, configuration)
- if err := writeDocs(ctx, configuration, docFile); err != nil {
- fmt.Fprintf(os.Stderr, "%s", err)
- os.Exit(1)
- }
+ touch(shared.JoinPath(topDir, queryviewMarker))
}
func writeMetrics(configuration android.Config) {
- metricsFile := filepath.Join(configuration.BuildDir(), "soong_build_metrics.pb")
+ metricsFile := filepath.Join(configuration.SoongOutDir(), "soong_build_metrics.pb")
err := android.WriteMetrics(configuration, metricsFile)
if err != nil {
fmt.Fprintf(os.Stderr, "error writing soong_build metrics %s: %s", metricsFile, err)
@@ -179,8 +170,8 @@
}
}
-func writeJsonModuleGraph(configuration android.Config, ctx *android.Context, path string, extraNinjaDeps []string) {
- f, err := os.Create(path)
+func writeJsonModuleGraph(ctx *android.Context, path string) {
+ f, err := os.Create(shared.JoinPath(topDir, path))
if err != nil {
fmt.Fprintf(os.Stderr, "%s", err)
os.Exit(1)
@@ -188,51 +179,91 @@
defer f.Close()
ctx.Context.PrintJSONGraph(f)
- writeFakeNinjaFile(extraNinjaDeps, configuration.BuildDir())
+}
+
+func writeBuildGlobsNinjaFile(srcDir, buildDir string, globs func() pathtools.MultipleGlobResults, config interface{}) []string {
+ globDir := bootstrap.GlobDirectory(buildDir, globListDir)
+ bootstrap.WriteBuildGlobsNinjaFile(&bootstrap.GlobSingleton{
+ GlobLister: globs,
+ GlobFile: globFile,
+ GlobDir: globDir,
+ SrcDir: srcDir,
+ }, config)
+ return bootstrap.GlobFileListFiles(globDir)
+}
+
+func writeDepFile(outputFile string, ninjaDeps []string) {
+ depFile := shared.JoinPath(topDir, outputFile+".d")
+ err := deptools.WriteDepFile(depFile, outputFile, ninjaDeps)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error writing depfile '%s': %s\n", depFile, err)
+ os.Exit(1)
+ }
}
// doChosenActivity runs Soong for a specific activity, like bp2build, queryview
// or the actual Soong build for the build.ninja file. Returns the top level
// output file of the specific activity.
func doChosenActivity(configuration android.Config, extraNinjaDeps []string) string {
- bazelConversionRequested := bp2buildMarker != ""
mixedModeBuild := configuration.BazelContext.BazelEnabled()
+ generateBazelWorkspace := bp2buildMarker != ""
generateQueryView := bazelQueryViewDir != ""
- jsonModuleFile := configuration.Getenv("SOONG_DUMP_JSON_MODULE_GRAPH")
+ generateModuleGraphFile := moduleGraphFile != ""
+ generateDocFile := docFile != ""
blueprintArgs := cmdlineArgs
- prepareBuildActions := !generateQueryView && jsonModuleFile == ""
- if bazelConversionRequested {
+
+ var stopBefore bootstrap.StopBefore
+ if !generateModuleGraphFile && !generateQueryView && !generateDocFile {
+ stopBefore = bootstrap.DoEverything
+ } else {
+ stopBefore = bootstrap.StopBeforePrepareBuildActions
+ }
+
+ if generateBazelWorkspace {
// Run the alternate pipeline of bp2build mutators and singleton to convert
// Blueprint to BUILD files before everything else.
runBp2Build(configuration, extraNinjaDeps)
return bp2buildMarker
}
- ctx := newContext(configuration, prepareBuildActions)
+ ctx := newContext(configuration)
if mixedModeBuild {
runMixedModeBuild(configuration, ctx, extraNinjaDeps)
} else {
- ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, ctx.Context, configuration)
+ ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, stopBefore, ctx.Context, configuration)
ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
- err := deptools.WriteDepFile(shared.JoinPath(topDir, blueprintArgs.DepFile), blueprintArgs.OutFile, ninjaDeps)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Error writing depfile '%s': %s\n", blueprintArgs.DepFile, err)
- os.Exit(1)
+
+ globListFiles := writeBuildGlobsNinjaFile(ctx.SrcDir(), configuration.SoongOutDir(), ctx.Globs, configuration)
+ ninjaDeps = append(ninjaDeps, globListFiles...)
+
+ // Convert the Soong module graph into Bazel BUILD files.
+ if generateQueryView {
+ queryviewMarkerFile := bazelQueryViewDir + ".marker"
+ runQueryView(bazelQueryViewDir, queryviewMarkerFile, configuration, ctx)
+ writeDepFile(queryviewMarkerFile, ninjaDeps)
+ return queryviewMarkerFile
+ } else if generateModuleGraphFile {
+ writeJsonModuleGraph(ctx, moduleGraphFile)
+ writeDepFile(moduleGraphFile, ninjaDeps)
+ return moduleGraphFile
+ } else if generateDocFile {
+ // TODO: we could make writeDocs() return the list of documentation files
+ // written and add them to the .d file. Then soong_docs would be re-run
+ // whenever one is deleted.
+ if err := writeDocs(ctx, shared.JoinPath(topDir, docFile)); err != nil {
+ fmt.Fprintf(os.Stderr, "error building Soong documentation: %s\n", err)
+ os.Exit(1)
+ }
+ writeDepFile(docFile, ninjaDeps)
+ return docFile
+ } else {
+ // The actual output (build.ninja) was written in the RunBlueprint() call
+ // above
+ writeDepFile(cmdlineArgs.OutFile, ninjaDeps)
}
}
- // Convert the Soong module graph into Bazel BUILD files.
- if generateQueryView {
- runQueryView(configuration, ctx)
- return cmdlineArgs.OutFile // TODO: This is a lie
- }
-
- if jsonModuleFile != "" {
- writeJsonModuleGraph(configuration, ctx, jsonModuleFile, extraNinjaDeps)
- return cmdlineArgs.OutFile // TODO: This is a lie
- }
-
writeMetrics(configuration)
return cmdlineArgs.OutFile
}
@@ -273,7 +304,7 @@
availableEnv := parseAvailableEnv()
- configuration := newConfig(outDir, availableEnv)
+ configuration := newConfig(availableEnv)
extraNinjaDeps := []string{
configuration.ProductVariablesFileName,
usedEnvFile,
@@ -286,17 +317,7 @@
if shared.IsDebugging() {
// Add a non-existent file to the dependencies so that soong_build will rerun when the debugger is
// enabled even if it completed successfully.
- extraNinjaDeps = append(extraNinjaDeps, filepath.Join(configuration.BuildDir(), "always_rerun_for_delve"))
- }
-
- if docFile != "" {
- // We don't write an used variables file when generating documentation
- // because that is done from within the actual builds as a Ninja action and
- // thus it would overwrite the actual used variables file so this is
- // special-cased.
- // TODO: Fix this by not passing --used_env to the soong_docs invocation
- runSoongDocs(configuration)
- return
+ extraNinjaDeps = append(extraNinjaDeps, filepath.Join(configuration.SoongOutDir(), "always_rerun_for_delve"))
}
finalOutputFile := doChosenActivity(configuration, extraNinjaDeps)
@@ -327,29 +348,6 @@
touch(shared.JoinPath(topDir, finalOutputFile))
}
-// Workarounds to support running bp2build in a clean AOSP checkout with no
-// prior builds, and exiting early as soon as the BUILD files get generated,
-// therefore not creating build.ninja files that soong_ui and callers of
-// soong_build expects.
-//
-// These files are: build.ninja and build.ninja.d. Since Kati hasn't been
-// ran as well, and `nothing` is defined in a .mk file, there isn't a ninja
-// target called `nothing`, so we manually create it here.
-func writeFakeNinjaFile(extraNinjaDeps []string, buildDir string) {
- extraNinjaDepsString := strings.Join(extraNinjaDeps, " \\\n ")
-
- ninjaFileName := "build.ninja"
- ninjaFile := shared.JoinPath(topDir, buildDir, ninjaFileName)
- ninjaFileD := shared.JoinPath(topDir, buildDir, ninjaFileName+".d")
- // A workaround to create the 'nothing' ninja target so `m nothing` works,
- // since bp2build runs without Kati, and the 'nothing' target is declared in
- // a Makefile.
- ioutil.WriteFile(ninjaFile, []byte("build nothing: phony\n phony_output = true\n"), 0666)
- ioutil.WriteFile(ninjaFileD,
- []byte(fmt.Sprintf("%s: \\\n %s\n", ninjaFile, extraNinjaDepsString)),
- 0666)
-}
-
func touch(path string) {
f, err := os.OpenFile(path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666)
if err != nil {
@@ -474,31 +472,23 @@
extraNinjaDeps = append(extraNinjaDeps, modulePaths...)
- // No need to generate Ninja build rules/statements from Modules and Singletons.
- configuration.SetStopBefore(bootstrap.StopBeforePrepareBuildActions)
-
// Run the loading and analysis pipeline to prepare the graph of regular
// Modules parsed from Android.bp files, and the BazelTargetModules mapped
// from the regular Modules.
blueprintArgs := cmdlineArgs
- ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, bp2buildCtx.Context, configuration)
+ ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, bootstrap.StopBeforePrepareBuildActions, bp2buildCtx.Context, configuration)
ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
- // Generate out/soong/.bootstrap/build-globs.ninja with the actions to generate flattened globfiles
- // containing the globs seen during bp2build conversion
- if blueprintArgs.GlobFile != "" {
- bootstrap.WriteBuildGlobsNinjaFile(blueprintArgs.GlobListDir, bp2buildCtx.Context, blueprintArgs, configuration)
- }
- // Add the depfile on the expanded globs in out/soong/.primary/globs
- ninjaDeps = append(ninjaDeps, bootstrap.GlobFileListFiles(configuration, blueprintArgs.GlobListDir)...)
+ globListFiles := writeBuildGlobsNinjaFile(bp2buildCtx.SrcDir(), configuration.SoongOutDir(), bp2buildCtx.Globs, configuration)
+ ninjaDeps = append(ninjaDeps, globListFiles...)
// Run the code-generation phase to convert BazelTargetModules to BUILD files
// and print conversion metrics to the user.
codegenContext := bp2build.NewCodegenContext(configuration, *bp2buildCtx, bp2build.Bp2Build)
metrics := bp2build.Codegen(codegenContext)
- generatedRoot := shared.JoinPath(configuration.BuildDir(), "bp2build")
- workspaceRoot := shared.JoinPath(configuration.BuildDir(), "workspace")
+ generatedRoot := shared.JoinPath(configuration.SoongOutDir(), "bp2build")
+ workspaceRoot := shared.JoinPath(configuration.SoongOutDir(), "workspace")
excludes := []string{
"bazel-bin",
@@ -508,8 +498,8 @@
"bazel-" + filepath.Base(topDir),
}
- if cmdlineArgs.NinjaBuildDir[0] != '/' {
- excludes = append(excludes, cmdlineArgs.NinjaBuildDir)
+ if outDir[0] != '/' {
+ excludes = append(excludes, outDir)
}
existingBazelRelatedFiles, err := getExistingBazelRelatedFiles(topDir)
@@ -534,12 +524,7 @@
ninjaDeps = append(ninjaDeps, codegenContext.AdditionalNinjaDeps()...)
ninjaDeps = append(ninjaDeps, symlinkForestDeps...)
- depFile := bp2buildMarker + ".d"
- err = deptools.WriteDepFile(shared.JoinPath(topDir, depFile), bp2buildMarker, ninjaDeps)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Cannot write depfile '%s': %s\n", depFile, err)
- os.Exit(1)
- }
+ writeDepFile(bp2buildMarker, ninjaDeps)
// Create an empty bp2build marker file.
touch(shared.JoinPath(topDir, bp2buildMarker))
diff --git a/cmd/soong_build/queryview.go b/cmd/soong_build/queryview.go
index a8602de..98e27c6 100644
--- a/cmd/soong_build/queryview.go
+++ b/cmd/soong_build/queryview.go
@@ -15,14 +15,16 @@
package main
import (
- "android/soong/android"
- "android/soong/bp2build"
"io/ioutil"
"os"
"path/filepath"
+
+ "android/soong/android"
+ "android/soong/bp2build"
)
func createBazelQueryView(ctx *bp2build.CodegenContext, bazelQueryViewDir string) error {
+ os.RemoveAll(bazelQueryViewDir)
ruleShims := bp2build.CreateRuleShims(android.ModuleTypeFactories())
// Ignore metrics reporting and compat layers for queryview, since queryview
diff --git a/cmd/soong_build/writedocs.go b/cmd/soong_build/writedocs.go
index b7c260c..8d8f37f 100644
--- a/cmd/soong_build/writedocs.go
+++ b/cmd/soong_build/writedocs.go
@@ -15,13 +15,14 @@
package main
import (
- "android/soong/android"
"bytes"
"html/template"
"io/ioutil"
"path/filepath"
"sort"
+ "android/soong/android"
+
"github.com/google/blueprint/bootstrap"
"github.com/google/blueprint/bootstrap/bpdoc"
)
@@ -95,13 +96,13 @@
return result
}
-func getPackages(ctx *android.Context, config interface{}) ([]*bpdoc.Package, error) {
+func getPackages(ctx *android.Context) ([]*bpdoc.Package, error) {
moduleTypeFactories := android.ModuleTypeFactoriesForDocs()
- return bootstrap.ModuleTypeDocs(ctx.Context, config, moduleTypeFactories)
+ return bootstrap.ModuleTypeDocs(ctx.Context, moduleTypeFactories)
}
-func writeDocs(ctx *android.Context, config interface{}, filename string) error {
- packages, err := getPackages(ctx, config)
+func writeDocs(ctx *android.Context, filename string) error {
+ packages, err := getPackages(ctx)
if err != nil {
return err
}
diff --git a/dexpreopt/class_loader_context.go b/dexpreopt/class_loader_context.go
index ebb8959..1bdd040 100644
--- a/dexpreopt/class_loader_context.go
+++ b/dexpreopt/class_loader_context.go
@@ -196,6 +196,10 @@
// If the library is optional or required.
Optional bool
+ // If the library is implicitly infered by Soong (as opposed to explicitly added via `uses_libs`
+ // or `optional_uses_libs`.
+ Implicit bool
+
// On-host build path to the library dex file (used in dex2oat argument --class-loader-context).
Host android.Path
@@ -258,8 +262,9 @@
const AnySdkVersion int = android.FutureApiLevelInt
// Add class loader context for the given library to the map entry for the given SDK version.
-func (clcMap ClassLoaderContextMap) addContext(ctx android.ModuleInstallPathContext, sdkVer int, lib string,
- optional bool, hostPath, installPath android.Path, nestedClcMap ClassLoaderContextMap) error {
+func (clcMap ClassLoaderContextMap) addContext(ctx android.ModuleInstallPathContext, sdkVer int,
+ lib string, optional, implicit bool, hostPath, installPath android.Path,
+ nestedClcMap ClassLoaderContextMap) error {
// For prebuilts, library should have the same name as the source module.
lib = android.RemoveOptionalPrebuiltPrefix(lib)
@@ -308,6 +313,7 @@
clcMap[sdkVer] = append(clcMap[sdkVer], &ClassLoaderContext{
Name: lib,
Optional: optional,
+ Implicit: implicit,
Host: hostPath,
Device: devicePath,
Subcontexts: subcontexts,
@@ -320,9 +326,10 @@
// about paths). For the subset of libraries that are used in dexpreopt, their build/install paths
// are validated later before CLC is used (in validateClassLoaderContext).
func (clcMap ClassLoaderContextMap) AddContext(ctx android.ModuleInstallPathContext, sdkVer int,
- lib string, optional bool, hostPath, installPath android.Path, nestedClcMap ClassLoaderContextMap) {
+ lib string, optional, implicit bool, hostPath, installPath android.Path,
+ nestedClcMap ClassLoaderContextMap) {
- err := clcMap.addContext(ctx, sdkVer, lib, optional, hostPath, installPath, nestedClcMap)
+ err := clcMap.addContext(ctx, sdkVer, lib, optional, implicit, hostPath, installPath, nestedClcMap)
if err != nil {
ctx.ModuleErrorf(err.Error())
}
@@ -366,13 +373,15 @@
// included). This is the list of libraries that should be in the <uses-library> tags in the
// manifest. Some of them may be present in the source manifest, others are added by manifest_fixer.
// Required and optional libraries are in separate lists.
-func (clcMap ClassLoaderContextMap) UsesLibs() (required []string, optional []string) {
+func (clcMap ClassLoaderContextMap) usesLibs(implicit bool) (required []string, optional []string) {
if clcMap != nil {
clcs := clcMap[AnySdkVersion]
required = make([]string, 0, len(clcs))
optional = make([]string, 0, len(clcs))
for _, clc := range clcs {
- if clc.Optional {
+ if implicit && !clc.Implicit {
+ // Skip, this is an explicit library and we need only the implicit ones.
+ } else if clc.Optional {
optional = append(optional, clc.Name)
} else {
required = append(required, clc.Name)
@@ -382,6 +391,14 @@
return required, optional
}
+func (clcMap ClassLoaderContextMap) UsesLibs() ([]string, []string) {
+ return clcMap.usesLibs(false)
+}
+
+func (clcMap ClassLoaderContextMap) ImplicitUsesLibs() ([]string, []string) {
+ return clcMap.usesLibs(true)
+}
+
func (clcMap ClassLoaderContextMap) Dump() string {
jsonCLC := toJsonClassLoaderContext(clcMap)
bytes, err := json.MarshalIndent(jsonCLC, "", " ")
@@ -524,6 +541,8 @@
// the same as Soong representation except that SDK versions and paths are represented with strings.
type jsonClassLoaderContext struct {
Name string
+ Optional bool
+ Implicit bool
Host string
Device string
Subcontexts []*jsonClassLoaderContext
@@ -555,6 +574,8 @@
for _, clc := range jClcs {
clcs = append(clcs, &ClassLoaderContext{
Name: clc.Name,
+ Optional: clc.Optional,
+ Implicit: clc.Implicit,
Host: constructPath(ctx, clc.Host),
Device: clc.Device,
Subcontexts: fromJsonClassLoaderContextRec(ctx, clc.Subcontexts),
@@ -579,6 +600,8 @@
for i, clc := range clcs {
jClcs[i] = &jsonClassLoaderContext{
Name: clc.Name,
+ Optional: clc.Optional,
+ Implicit: clc.Implicit,
Host: clc.Host.String(),
Device: clc.Device,
Subcontexts: toJsonClassLoaderContextRec(clc.Subcontexts),
diff --git a/dexpreopt/class_loader_context_test.go b/dexpreopt/class_loader_context_test.go
index 0b7b546..d81ac2c 100644
--- a/dexpreopt/class_loader_context_test.go
+++ b/dexpreopt/class_loader_context_test.go
@@ -50,33 +50,34 @@
ctx := testContext()
optional := false
+ implicit := true
m := make(ClassLoaderContextMap)
- m.AddContext(ctx, AnySdkVersion, "a", optional, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
- m.AddContext(ctx, AnySdkVersion, "b", optional, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
- m.AddContext(ctx, AnySdkVersion, "c", optional, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
+ m.AddContext(ctx, AnySdkVersion, "a", optional, implicit, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
+ m.AddContext(ctx, AnySdkVersion, "b", optional, implicit, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
+ m.AddContext(ctx, AnySdkVersion, "c", optional, implicit, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
// Add some libraries with nested subcontexts.
m1 := make(ClassLoaderContextMap)
- m1.AddContext(ctx, AnySdkVersion, "a1", optional, buildPath(ctx, "a1"), installPath(ctx, "a1"), nil)
- m1.AddContext(ctx, AnySdkVersion, "b1", optional, buildPath(ctx, "b1"), installPath(ctx, "b1"), nil)
+ m1.AddContext(ctx, AnySdkVersion, "a1", optional, implicit, buildPath(ctx, "a1"), installPath(ctx, "a1"), nil)
+ m1.AddContext(ctx, AnySdkVersion, "b1", optional, implicit, buildPath(ctx, "b1"), installPath(ctx, "b1"), nil)
m2 := make(ClassLoaderContextMap)
- m2.AddContext(ctx, AnySdkVersion, "a2", optional, buildPath(ctx, "a2"), installPath(ctx, "a2"), nil)
- m2.AddContext(ctx, AnySdkVersion, "b2", optional, buildPath(ctx, "b2"), installPath(ctx, "b2"), nil)
- m2.AddContext(ctx, AnySdkVersion, "c2", optional, buildPath(ctx, "c2"), installPath(ctx, "c2"), m1)
+ m2.AddContext(ctx, AnySdkVersion, "a2", optional, implicit, buildPath(ctx, "a2"), installPath(ctx, "a2"), nil)
+ m2.AddContext(ctx, AnySdkVersion, "b2", optional, implicit, buildPath(ctx, "b2"), installPath(ctx, "b2"), nil)
+ m2.AddContext(ctx, AnySdkVersion, "c2", optional, implicit, buildPath(ctx, "c2"), installPath(ctx, "c2"), m1)
m3 := make(ClassLoaderContextMap)
- m3.AddContext(ctx, AnySdkVersion, "a3", optional, buildPath(ctx, "a3"), installPath(ctx, "a3"), nil)
- m3.AddContext(ctx, AnySdkVersion, "b3", optional, buildPath(ctx, "b3"), installPath(ctx, "b3"), nil)
+ m3.AddContext(ctx, AnySdkVersion, "a3", optional, implicit, buildPath(ctx, "a3"), installPath(ctx, "a3"), nil)
+ m3.AddContext(ctx, AnySdkVersion, "b3", optional, implicit, buildPath(ctx, "b3"), installPath(ctx, "b3"), nil)
- m.AddContext(ctx, AnySdkVersion, "d", optional, buildPath(ctx, "d"), installPath(ctx, "d"), m2)
+ m.AddContext(ctx, AnySdkVersion, "d", optional, implicit, buildPath(ctx, "d"), installPath(ctx, "d"), m2)
// When the same library is both in conditional and unconditional context, it should be removed
// from conditional context.
- m.AddContext(ctx, 42, "f", optional, buildPath(ctx, "f"), installPath(ctx, "f"), nil)
- m.AddContext(ctx, AnySdkVersion, "f", optional, buildPath(ctx, "f"), installPath(ctx, "f"), nil)
+ m.AddContext(ctx, 42, "f", optional, implicit, buildPath(ctx, "f"), installPath(ctx, "f"), nil)
+ m.AddContext(ctx, AnySdkVersion, "f", optional, implicit, buildPath(ctx, "f"), installPath(ctx, "f"), nil)
// Merge map with implicit root library that is among toplevel contexts => does nothing.
m.AddContextMap(m1, "c")
@@ -85,12 +86,12 @@
m.AddContextMap(m3, "m_g")
// Compatibility libraries with unknown install paths get default paths.
- m.AddContext(ctx, 29, AndroidHidlManager, optional, buildPath(ctx, AndroidHidlManager), nil, nil)
- m.AddContext(ctx, 29, AndroidHidlBase, optional, buildPath(ctx, AndroidHidlBase), nil, nil)
+ m.AddContext(ctx, 29, AndroidHidlManager, optional, implicit, buildPath(ctx, AndroidHidlManager), nil, nil)
+ m.AddContext(ctx, 29, AndroidHidlBase, optional, implicit, buildPath(ctx, AndroidHidlBase), nil, nil)
// Add "android.test.mock" to conditional CLC, observe that is gets removed because it is only
// needed as a compatibility library if "android.test.runner" is in CLC as well.
- m.AddContext(ctx, 30, AndroidTestMock, optional, buildPath(ctx, AndroidTestMock), nil, nil)
+ m.AddContext(ctx, 30, AndroidTestMock, optional, implicit, buildPath(ctx, AndroidTestMock), nil, nil)
valid, validationError := validateClassLoaderContext(m)
@@ -164,11 +165,12 @@
func TestCLCJson(t *testing.T) {
ctx := testContext()
optional := false
+ implicit := true
m := make(ClassLoaderContextMap)
- m.AddContext(ctx, 28, "a", optional, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
- m.AddContext(ctx, 29, "b", optional, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
- m.AddContext(ctx, 30, "c", optional, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
- m.AddContext(ctx, AnySdkVersion, "d", optional, buildPath(ctx, "d"), installPath(ctx, "d"), nil)
+ m.AddContext(ctx, 28, "a", optional, implicit, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
+ m.AddContext(ctx, 29, "b", optional, implicit, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
+ m.AddContext(ctx, 30, "c", optional, implicit, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
+ m.AddContext(ctx, AnySdkVersion, "d", optional, implicit, buildPath(ctx, "d"), installPath(ctx, "d"), nil)
jsonCLC := toJsonClassLoaderContext(m)
restored := fromJsonClassLoaderContext(ctx, jsonCLC)
android.AssertIntEquals(t, "The size of the maps should be the same.", len(m), len(restored))
@@ -189,12 +191,13 @@
func testCLCUnknownPath(t *testing.T, whichPath string) {
ctx := testContext()
optional := false
+ implicit := true
m := make(ClassLoaderContextMap)
if whichPath == "build" {
- m.AddContext(ctx, AnySdkVersion, "a", optional, nil, nil, nil)
+ m.AddContext(ctx, AnySdkVersion, "a", optional, implicit, nil, nil, nil)
} else {
- m.AddContext(ctx, AnySdkVersion, "a", optional, buildPath(ctx, "a"), nil, nil)
+ m.AddContext(ctx, AnySdkVersion, "a", optional, implicit, buildPath(ctx, "a"), nil, nil)
}
// The library should be added to <uses-library> tags by the manifest_fixer.
@@ -229,10 +232,11 @@
func TestCLCNestedConditional(t *testing.T) {
ctx := testContext()
optional := false
+ implicit := true
m1 := make(ClassLoaderContextMap)
- m1.AddContext(ctx, 42, "a", optional, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
+ m1.AddContext(ctx, 42, "a", optional, implicit, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
m := make(ClassLoaderContextMap)
- err := m.addContext(ctx, AnySdkVersion, "b", optional, buildPath(ctx, "b"), installPath(ctx, "b"), m1)
+ err := m.addContext(ctx, AnySdkVersion, "b", optional, implicit, buildPath(ctx, "b"), installPath(ctx, "b"), m1)
checkError(t, err, "nested class loader context shouldn't have conditional part")
}
@@ -241,11 +245,12 @@
func TestCLCSdkVersionOrder(t *testing.T) {
ctx := testContext()
optional := false
+ implicit := true
m := make(ClassLoaderContextMap)
- m.AddContext(ctx, 28, "a", optional, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
- m.AddContext(ctx, 29, "b", optional, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
- m.AddContext(ctx, 30, "c", optional, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
- m.AddContext(ctx, AnySdkVersion, "d", optional, buildPath(ctx, "d"), installPath(ctx, "d"), nil)
+ m.AddContext(ctx, 28, "a", optional, implicit, buildPath(ctx, "a"), installPath(ctx, "a"), nil)
+ m.AddContext(ctx, 29, "b", optional, implicit, buildPath(ctx, "b"), installPath(ctx, "b"), nil)
+ m.AddContext(ctx, 30, "c", optional, implicit, buildPath(ctx, "c"), installPath(ctx, "c"), nil)
+ m.AddContext(ctx, AnySdkVersion, "d", optional, implicit, buildPath(ctx, "d"), installPath(ctx, "d"), nil)
valid, validationError := validateClassLoaderContext(m)
diff --git a/dexpreopt/config.go b/dexpreopt/config.go
index 7a74506..de3666a 100644
--- a/dexpreopt/config.go
+++ b/dexpreopt/config.go
@@ -159,7 +159,7 @@
}
func constructPath(ctx android.PathContext, path string) android.Path {
- buildDirPrefix := ctx.Config().BuildDir() + "/"
+ buildDirPrefix := ctx.Config().SoongOutDir() + "/"
if path == "" {
return nil
} else if strings.HasPrefix(path, buildDirPrefix) {
diff --git a/dexpreopt/dexpreopt.go b/dexpreopt/dexpreopt.go
index 4c6ae82..1401c75 100644
--- a/dexpreopt/dexpreopt.go
+++ b/dexpreopt/dexpreopt.go
@@ -430,11 +430,6 @@
}
}
- // Never enable on eng.
- if global.IsEng {
- debugInfo = false
- }
-
if debugInfo {
cmd.Flag("--generate-mini-debug-info")
} else {
diff --git a/dexpreopt/dexpreopt_gen/dexpreopt_gen.go b/dexpreopt/dexpreopt_gen/dexpreopt_gen.go
index 7dbe74c..ba05d94 100644
--- a/dexpreopt/dexpreopt_gen/dexpreopt_gen.go
+++ b/dexpreopt/dexpreopt_gen/dexpreopt_gen.go
@@ -87,7 +87,9 @@
usage("--module configuration file is required")
}
- ctx := &builderContext{android.NullConfig(*outDir)}
+ // NOTE: duplicating --out_dir here is incorrect (one should be the another
+ // plus "/soong" but doing so apparently breaks dexpreopt
+ ctx := &builderContext{android.NullConfig(*outDir, *outDir)}
globalSoongConfigData, err := ioutil.ReadFile(*globalSoongConfigPath)
if err != nil {
diff --git a/filesystem/bootimg.go b/filesystem/bootimg.go
index 29a8a39..73d807d 100644
--- a/filesystem/bootimg.go
+++ b/filesystem/bootimg.go
@@ -60,8 +60,8 @@
// https://source.android.com/devices/bootloader/partitions/vendor-boot-partitions
Vendor_boot *bool
- // Optional kernel commandline
- Cmdline *string `android:"arch_variant"`
+ // Optional kernel commandline arguments
+ Cmdline []string `android:"arch_variant"`
// File that contains bootconfig parameters. This can be set only when `vendor_boot` is true
// and `header_version` is greater than or equal to 4.
@@ -152,7 +152,7 @@
dtb := android.PathForModuleSrc(ctx, dtbName)
cmd.FlagWithInput("--dtb ", dtb)
- cmdline := proptools.String(b.properties.Cmdline)
+ cmdline := strings.Join(b.properties.Cmdline, " ")
if cmdline != "" {
flag := "--cmdline "
if vendor {
diff --git a/java/android_manifest.go b/java/android_manifest.go
index 1f7234d..38065f1 100644
--- a/java/android_manifest.go
+++ b/java/android_manifest.go
@@ -71,7 +71,9 @@
args = append(args, "--use-embedded-dex")
}
- requiredUsesLibs, optionalUsesLibs := classLoaderContexts.UsesLibs()
+ // manifest_fixer should add only the implicit SDK libraries inferred by Soong, not those added
+ // explicitly via `uses_libs`/`optional_uses_libs`.
+ requiredUsesLibs, optionalUsesLibs := classLoaderContexts.ImplicitUsesLibs()
for _, usesLib := range requiredUsesLibs {
args = append(args, "--uses-library", usesLib)
}
diff --git a/java/app.go b/java/app.go
index e7661df..5104f07 100755
--- a/java/app.go
+++ b/java/app.go
@@ -1224,17 +1224,28 @@
func (u *usesLibrary) deps(ctx android.BottomUpMutatorContext, hasFrameworkLibs bool) {
if !ctx.Config().UnbundledBuild() || ctx.Config().UnbundledBuildImage() {
- ctx.AddVariationDependencies(nil, usesLibReqTag, u.usesLibraryProperties.Uses_libs...)
- ctx.AddVariationDependencies(nil, usesLibOptTag, u.presentOptionalUsesLibs(ctx)...)
+ reqTag := makeUsesLibraryDependencyTag(dexpreopt.AnySdkVersion, false, false)
+ ctx.AddVariationDependencies(nil, reqTag, u.usesLibraryProperties.Uses_libs...)
+
+ optTag := makeUsesLibraryDependencyTag(dexpreopt.AnySdkVersion, true, false)
+ ctx.AddVariationDependencies(nil, optTag, u.presentOptionalUsesLibs(ctx)...)
+
// Only add these extra dependencies if the module depends on framework libs. This avoids
// creating a cyclic dependency:
// e.g. framework-res -> org.apache.http.legacy -> ... -> framework-res.
if hasFrameworkLibs {
- // Dexpreopt needs paths to the dex jars of these libraries in order to construct
- // class loader context for dex2oat. Add them as a dependency with a special tag.
- ctx.AddVariationDependencies(nil, usesLibCompat29ReqTag, dexpreopt.CompatUsesLibs29...)
- ctx.AddVariationDependencies(nil, usesLibCompat28OptTag, dexpreopt.OptionalCompatUsesLibs28...)
- ctx.AddVariationDependencies(nil, usesLibCompat30OptTag, dexpreopt.OptionalCompatUsesLibs30...)
+ // Add implicit <uses-library> dependencies on compatibility libraries. Some of them are
+ // optional, and some required --- this depends on the most common usage of the library
+ // and may be wrong for some apps (they need explicit `uses_libs`/`optional_uses_libs`).
+
+ compat28OptTag := makeUsesLibraryDependencyTag(28, true, true)
+ ctx.AddVariationDependencies(nil, compat28OptTag, dexpreopt.OptionalCompatUsesLibs28...)
+
+ compat29ReqTag := makeUsesLibraryDependencyTag(29, false, true)
+ ctx.AddVariationDependencies(nil, compat29ReqTag, dexpreopt.CompatUsesLibs29...)
+
+ compat30OptTag := makeUsesLibraryDependencyTag(30, true, true)
+ ctx.AddVariationDependencies(nil, compat30OptTag, dexpreopt.OptionalCompatUsesLibs30...)
}
}
}
@@ -1293,7 +1304,7 @@
replaceInList(u.usesLibraryProperties.Uses_libs, dep, libName)
replaceInList(u.usesLibraryProperties.Optional_uses_libs, dep, libName)
}
- clcMap.AddContext(ctx, tag.sdkVersion, libName, tag.optional,
+ clcMap.AddContext(ctx, tag.sdkVersion, libName, tag.optional, tag.implicit,
lib.DexJarBuildPath(), lib.DexJarInstallPath(), lib.ClassLoaderContexts())
} else if ctx.Config().AllowMissingDependencies() {
ctx.AddMissingDependencies([]string{dep})
diff --git a/java/app_import.go b/java/app_import.go
index b5a6084..3e5f972 100644
--- a/java/app_import.go
+++ b/java/app_import.go
@@ -204,9 +204,9 @@
return false
}
- // Uncompress dex in APKs of privileged apps
- if ctx.Config().UncompressPrivAppDex() && a.Privileged() {
- return true
+ // Uncompress dex in APKs of priv-apps if and only if DONT_UNCOMPRESS_PRIV_APPS_DEXS is false.
+ if a.Privileged() {
+ return ctx.Config().UncompressPrivAppDex()
}
return shouldUncompressDex(ctx, &a.dexpreopter)
diff --git a/java/app_import_test.go b/java/app_import_test.go
index 024a3df..efa52c1 100644
--- a/java/app_import_test.go
+++ b/java/app_import_test.go
@@ -15,6 +15,7 @@
package java
import (
+ "fmt"
"reflect"
"regexp"
"strings"
@@ -656,3 +657,74 @@
}
}
}
+
+func TestAndroidTestImport_UncompressDex(t *testing.T) {
+ testCases := []struct {
+ name string
+ bp string
+ }{
+ {
+ name: "normal",
+ bp: `
+ android_app_import {
+ name: "foo",
+ presigned: true,
+ apk: "prebuilts/apk/app.apk",
+ }
+ `,
+ },
+ {
+ name: "privileged",
+ bp: `
+ android_app_import {
+ name: "foo",
+ presigned: true,
+ privileged: true,
+ apk: "prebuilts/apk/app.apk",
+ }
+ `,
+ },
+ }
+
+ test := func(t *testing.T, bp string, unbundled bool, dontUncompressPrivAppDexs bool) {
+ t.Helper()
+
+ result := android.GroupFixturePreparers(
+ prepareForJavaTest,
+ android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+ if unbundled {
+ variables.Unbundled_build = proptools.BoolPtr(true)
+ }
+ variables.UncompressPrivAppDex = proptools.BoolPtr(!dontUncompressPrivAppDexs)
+ }),
+ ).RunTestWithBp(t, bp)
+
+ foo := result.ModuleForTests("foo", "android_common")
+ actual := foo.MaybeRule("uncompress-dex").Rule != nil
+
+ expect := !unbundled
+ if strings.Contains(bp, "privileged: true") {
+ if dontUncompressPrivAppDexs {
+ expect = false
+ } else {
+ // TODO(b/194504107): shouldn't priv-apps be always uncompressed unless
+ // DONT_UNCOMPRESS_PRIV_APPS_DEXS is true (regardless of unbundling)?
+ // expect = true
+ }
+ }
+
+ android.AssertBoolEquals(t, "uncompress dex", expect, actual)
+ }
+
+ for _, unbundled := range []bool{false, true} {
+ for _, dontUncompressPrivAppDexs := range []bool{false, true} {
+ for _, tt := range testCases {
+ name := fmt.Sprintf("%s,unbundled:%t,dontUncompressPrivAppDexs:%t",
+ tt.name, unbundled, dontUncompressPrivAppDexs)
+ t.Run(name, func(t *testing.T) {
+ test(t, tt.bp, unbundled, dontUncompressPrivAppDexs)
+ })
+ }
+ }
+ }
+}
diff --git a/java/app_test.go b/java/app_test.go
index 8de6691..07439fc 100644
--- a/java/app_test.go
+++ b/java/app_test.go
@@ -1737,7 +1737,7 @@
foo := result.ModuleForTests("foo", "android_common")
- outSoongDir := result.Config.BuildDir()
+ outSoongDir := result.Config.SoongOutDir()
outputs := foo.AllOutputs()
outputMap := make(map[string]bool)
@@ -2285,6 +2285,49 @@
sdk_version: "current",
}
+ java_library {
+ name: "runtime-required-x",
+ srcs: ["a.java"],
+ installable: true,
+ sdk_version: "current",
+ }
+
+ java_library {
+ name: "runtime-optional-x",
+ srcs: ["a.java"],
+ installable: true,
+ sdk_version: "current",
+ }
+
+ android_library {
+ name: "static-x",
+ uses_libs: ["runtime-required-x"],
+ optional_uses_libs: ["runtime-optional-x"],
+ sdk_version: "current",
+ }
+
+ java_library {
+ name: "runtime-required-y",
+ srcs: ["a.java"],
+ installable: true,
+ sdk_version: "current",
+ }
+
+ java_library {
+ name: "runtime-optional-y",
+ srcs: ["a.java"],
+ installable: true,
+ sdk_version: "current",
+ }
+
+ java_library {
+ name: "static-y",
+ srcs: ["a.java"],
+ uses_libs: ["runtime-required-y"],
+ optional_uses_libs: ["runtime-optional-y"],
+ sdk_version: "current",
+ }
+
// A library that has to use "provides_uses_lib", because:
// - it is not an SDK library
// - its library name is different from its module name
@@ -2307,6 +2350,8 @@
// statically linked component libraries should not pull their SDK libraries,
// so "fred" should not be added to class loader context
"fred.stubs",
+ "static-x",
+ "static-y",
],
uses_libs: [
"foo",
@@ -2353,10 +2398,7 @@
expectManifestFixerArgs := `--extract-native-libs=true ` +
`--uses-library qux ` +
`--uses-library quuz ` +
- `--uses-library foo ` + // TODO(b/132357300): "foo" should not be passed to manifest_fixer
- `--uses-library com.non.sdk.lib ` + // TODO(b/132357300): "com.non.sdk.lib" should not be passed to manifest_fixer
- `--uses-library runtime-library ` +
- `--optional-uses-library bar` // TODO(b/132357300): "bar" should not be passed to manifest_fixer
+ `--uses-library runtime-library`
android.AssertStringEquals(t, "manifest_fixer args", expectManifestFixerArgs, actualManifestFixerArgs)
// Test that all libraries are verified (library order matters).
@@ -2366,8 +2408,12 @@
`--uses-library qux ` +
`--uses-library quuz ` +
`--uses-library runtime-library ` +
+ `--uses-library runtime-required-x ` +
+ `--uses-library runtime-required-y ` +
`--optional-uses-library bar ` +
- `--optional-uses-library baz `
+ `--optional-uses-library baz ` +
+ `--optional-uses-library runtime-optional-x ` +
+ `--optional-uses-library runtime-optional-y `
android.AssertStringDoesContain(t, "verify cmd args", verifyCmd, verifyArgs)
// Test that all libraries are verified for an APK (library order matters).
@@ -2387,7 +2433,11 @@
`PCL[/system/framework/foo.jar]#` +
`PCL[/system/framework/non-sdk-lib.jar]#` +
`PCL[/system/framework/bar.jar]#` +
- `PCL[/system/framework/runtime-library.jar]`
+ `PCL[/system/framework/runtime-library.jar]#` +
+ `PCL[/system/framework/runtime-required-x.jar]#` +
+ `PCL[/system/framework/runtime-optional-x.jar]#` +
+ `PCL[/system/framework/runtime-required-y.jar]#` +
+ `PCL[/system/framework/runtime-optional-y.jar] `
android.AssertStringDoesContain(t, "dexpreopt app cmd args", cmd, w)
// Test conditional context for target SDK version 28.
diff --git a/java/base.go b/java/base.go
index ea5b137..86022c3 100644
--- a/java/base.go
+++ b/java/base.go
@@ -606,10 +606,8 @@
if component, ok := dep.(SdkLibraryComponentDependency); ok {
if lib := component.OptionalSdkLibraryImplementation(); lib != nil {
// Add library as optional if it's one of the optional compatibility libs.
- tag := usesLibReqTag
- if android.InList(*lib, dexpreopt.OptionalCompatUsesLibs) {
- tag = usesLibOptTag
- }
+ optional := android.InList(*lib, dexpreopt.OptionalCompatUsesLibs)
+ tag := makeUsesLibraryDependencyTag(dexpreopt.AnySdkVersion, optional, true)
ctx.AddVariationDependencies(nil, tag, *lib)
}
}
@@ -793,7 +791,7 @@
// Manually specify build directory in case it is not under the repo root.
// (javac doesn't seem to expand into symbolic links when searching for patch-module targets, so
// just adding a symlink under the root doesn't help.)
- patchPaths := []string{".", ctx.Config().BuildDir()}
+ patchPaths := []string{".", ctx.Config().SoongOutDir()}
// b/150878007
//
diff --git a/java/bootclasspath_fragment.go b/java/bootclasspath_fragment.go
index bb542c4..f7561b4 100644
--- a/java/bootclasspath_fragment.go
+++ b/java/bootclasspath_fragment.go
@@ -538,7 +538,7 @@
global := dexpreopt.GetGlobalConfig(ctx)
possibleUpdatableModules := gatherPossibleApexModuleNamesAndStems(ctx, b.properties.Contents, bootclasspathFragmentContentDepTag)
- jars := global.ApexBootJars.Filter(possibleUpdatableModules)
+ jars, unknown := global.ApexBootJars.Filter(possibleUpdatableModules)
// TODO(satayev): for apex_test we want to include all contents unconditionally to classpaths
// config. However, any test specific jars would not be present in ApexBootJars. Instead,
@@ -546,6 +546,12 @@
// This is an exception to support end-to-end test for SdkExtensions, until such support exists.
if android.InList("test_framework-sdkextensions", possibleUpdatableModules) {
jars = jars.Append("com.android.sdkext", "test_framework-sdkextensions")
+ } else if global.ApexBootJars.Len() != 0 && !android.IsModuleInVersionedSdk(ctx.Module()) {
+ unknown = android.RemoveListFromList(unknown, b.properties.Coverage.Contents)
+ _, unknown = android.RemoveFromList("core-icu4j", unknown)
+ if len(unknown) > 0 {
+ ctx.ModuleErrorf("%s in contents must also be declared in PRODUCT_APEX_BOOT_JARS", unknown)
+ }
}
return jars
}
@@ -712,8 +718,8 @@
android.SdkMemberTypeBase
}
-func (b *bootclasspathFragmentMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (b *bootclasspathFragmentMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (b *bootclasspathFragmentMemberType) IsInstance(module android.Module) bool {
diff --git a/java/core-libraries/Android.bp b/java/core-libraries/Android.bp
index 51d998a..b198c24 100644
--- a/java/core-libraries/Android.bp
+++ b/java/core-libraries/Android.bp
@@ -24,6 +24,10 @@
// core libraries.
//
// Don't use this directly, use "sdk_version: core_current".
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
java_library {
name: "core.current.stubs",
visibility: ["//visibility:public"],
diff --git a/java/dexpreopt_bootjars.go b/java/dexpreopt_bootjars.go
index 1019b4c..946092c 100644
--- a/java/dexpreopt_bootjars.go
+++ b/java/dexpreopt_bootjars.go
@@ -500,7 +500,11 @@
dst := dstBootJarsByModule[name]
if src == nil {
- ctx.ModuleErrorf("module %s does not provide a dex boot jar", name)
+ if !ctx.Config().AllowMissingDependencies() {
+ ctx.ModuleErrorf("module %s does not provide a dex boot jar", name)
+ } else {
+ ctx.AddMissingDependencies([]string{name})
+ }
} else if dst == nil {
ctx.ModuleErrorf("module %s is not part of the boot configuration", name)
} else {
diff --git a/java/java.go b/java/java.go
index 5bf3d79..1a052b4 100644
--- a/java/java.go
+++ b/java/java.go
@@ -248,15 +248,24 @@
type usesLibraryDependencyTag struct {
dependencyTag
- sdkVersion int // SDK version in which the library appared as a standalone library.
- optional bool // If the dependency is optional or required.
+
+ // SDK version in which the library appared as a standalone library.
+ sdkVersion int
+
+ // If the dependency is optional or required.
+ optional bool
+
+ // Whether this is an implicit dependency inferred by Soong, or an explicit one added via
+ // `uses_libs`/`optional_uses_libs` properties.
+ implicit bool
}
-func makeUsesLibraryDependencyTag(sdkVersion int, optional bool) usesLibraryDependencyTag {
+func makeUsesLibraryDependencyTag(sdkVersion int, optional bool, implicit bool) usesLibraryDependencyTag {
return usesLibraryDependencyTag{
dependencyTag: dependencyTag{name: fmt.Sprintf("uses-library-%d", sdkVersion)},
sdkVersion: sdkVersion,
optional: optional,
+ implicit: implicit,
}
}
@@ -285,11 +294,6 @@
syspropPublicStubDepTag = dependencyTag{name: "sysprop public stub"}
jniInstallTag = installDependencyTag{name: "jni install"}
binaryInstallTag = installDependencyTag{name: "binary install"}
- usesLibReqTag = makeUsesLibraryDependencyTag(dexpreopt.AnySdkVersion, false)
- usesLibOptTag = makeUsesLibraryDependencyTag(dexpreopt.AnySdkVersion, true)
- usesLibCompat28OptTag = makeUsesLibraryDependencyTag(28, true)
- usesLibCompat29ReqTag = makeUsesLibraryDependencyTag(29, false)
- usesLibCompat30OptTag = makeUsesLibraryDependencyTag(30, true)
)
func IsLibDepTag(depTag blueprint.DependencyTag) bool {
@@ -511,7 +515,7 @@
j.dexProperties.Uncompress_dex = proptools.BoolPtr(shouldUncompressDex(ctx, &j.dexpreopter))
}
j.dexpreopter.uncompressedDex = *j.dexProperties.Uncompress_dex
- j.classLoaderContexts = make(dexpreopt.ClassLoaderContextMap)
+ j.classLoaderContexts = j.usesLibrary.classLoaderContextForUsesLibDeps(ctx)
j.compile(ctx, nil)
// Collect the module directory for IDE info in java/jdeps.go.
@@ -530,6 +534,7 @@
func (j *Library) DepsMutator(ctx android.BottomUpMutatorContext) {
j.deps(ctx)
+ j.usesLibrary.deps(ctx, false)
}
const (
@@ -569,8 +574,8 @@
copyEverythingToSnapshot = false
)
-func (mt *librarySdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (mt *librarySdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (mt *librarySdkMemberType) IsInstance(module android.Module) bool {
@@ -870,8 +875,8 @@
android.SdkMemberTypeBase
}
-func (mt *testSdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (mt *testSdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (mt *testSdkMemberType) IsInstance(module android.Module) bool {
@@ -1812,8 +1817,10 @@
depTag := ctx.OtherModuleDependencyTag(depModule)
if depTag == libTag {
// Ok, propagate <uses-library> through non-static library dependencies.
- } else if tag, ok := depTag.(usesLibraryDependencyTag); ok && tag.sdkVersion == dexpreopt.AnySdkVersion {
- // Ok, propagate <uses-library> through non-compatibility <uses-library> dependencies.
+ } else if tag, ok := depTag.(usesLibraryDependencyTag); ok &&
+ tag.sdkVersion == dexpreopt.AnySdkVersion && tag.implicit {
+ // Ok, propagate <uses-library> through non-compatibility implicit <uses-library>
+ // dependencies.
} else if depTag == staticLibTag {
// Propagate <uses-library> through static library dependencies, unless it is a component
// library (such as stubs). Component libraries have a dependency on their SDK library,
@@ -1831,7 +1838,7 @@
// <uses_library> and should not be added to CLC, but the transitive <uses-library> dependencies
// from its CLC should be added to the current CLC.
if sdkLib != nil {
- clcMap.AddContext(ctx, dexpreopt.AnySdkVersion, *sdkLib, false,
+ clcMap.AddContext(ctx, dexpreopt.AnySdkVersion, *sdkLib, false, true,
dep.DexJarBuildPath(), dep.DexJarInstallPath(), dep.ClassLoaderContexts())
} else {
clcMap.AddContextMap(dep.ClassLoaderContexts(), depName)
diff --git a/java/java_test.go b/java/java_test.go
index b6780c2..8bb017f 100644
--- a/java/java_test.go
+++ b/java/java_test.go
@@ -1183,7 +1183,7 @@
break
}
}
- if expected != android.StringPathRelativeToTop(ctx.Config().BuildDir(), got) {
+ if expected != android.StringPathRelativeToTop(ctx.Config().SoongOutDir(), got) {
t.Errorf("Unexpected patch-module flag for module %q - expected %q, but got %q", moduleName, expected, got)
}
}
diff --git a/java/platform_compat_config.go b/java/platform_compat_config.go
index 712c2a2..0d8ebac 100644
--- a/java/platform_compat_config.go
+++ b/java/platform_compat_config.go
@@ -134,8 +134,8 @@
android.SdkMemberTypeBase
}
-func (b *compatConfigMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (b *compatConfigMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (b *compatConfigMemberType) IsInstance(module android.Module) bool {
diff --git a/java/sdk_library.go b/java/sdk_library.go
index c50e077..ce8f179 100644
--- a/java/sdk_library.go
+++ b/java/sdk_library.go
@@ -2471,8 +2471,8 @@
android.SdkMemberTypeBase
}
-func (s *sdkLibrarySdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (s *sdkLibrarySdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (s *sdkLibrarySdkMemberType) IsInstance(module android.Module) bool {
diff --git a/java/sdk_library_test.go b/java/sdk_library_test.go
index eeec504..938bb28 100644
--- a/java/sdk_library_test.go
+++ b/java/sdk_library_test.go
@@ -157,7 +157,7 @@
qux := result.ModuleForTests("qux", "android_common")
if quxLib, ok := qux.Module().(*Library); ok {
requiredSdkLibs, optionalSdkLibs := quxLib.ClassLoaderContexts().UsesLibs()
- android.AssertDeepEquals(t, "qux exports (required)", []string{"foo", "bar", "fred", "quuz"}, requiredSdkLibs)
+ android.AssertDeepEquals(t, "qux exports (required)", []string{"fred", "quuz", "foo", "bar"}, requiredSdkLibs)
android.AssertDeepEquals(t, "qux exports (optional)", []string{}, optionalSdkLibs)
}
}
diff --git a/java/system_modules.go b/java/system_modules.go
index d0dc74a..fec8eba 100644
--- a/java/system_modules.go
+++ b/java/system_modules.go
@@ -245,8 +245,8 @@
android.SdkMemberTypeBase
}
-func (mt *systemModulesSdkMemberType) AddDependencies(mctx android.BottomUpMutatorContext, dependencyTag blueprint.DependencyTag, names []string) {
- mctx.AddVariationDependencies(nil, dependencyTag, names...)
+func (mt *systemModulesSdkMemberType) AddDependencies(ctx android.SdkDependencyContext, dependencyTag blueprint.DependencyTag, names []string) {
+ ctx.AddVariationDependencies(nil, dependencyTag, names...)
}
func (mt *systemModulesSdkMemberType) IsInstance(module android.Module) bool {
diff --git a/java/systemserver_classpath_fragment.go b/java/systemserver_classpath_fragment.go
index 6c2a5b5..5311f62 100644
--- a/java/systemserver_classpath_fragment.go
+++ b/java/systemserver_classpath_fragment.go
@@ -107,7 +107,16 @@
global := dexpreopt.GetGlobalConfig(ctx)
possibleUpdatableModules := gatherPossibleApexModuleNamesAndStems(ctx, s.properties.Contents, systemServerClasspathFragmentContentDepTag)
- return global.ApexSystemServerJars.Filter(possibleUpdatableModules)
+ jars, unknown := global.ApexSystemServerJars.Filter(possibleUpdatableModules)
+ // TODO(satayev): remove geotz ssc_fragment, since geotz is not part of SSCP anymore.
+ _, unknown = android.RemoveFromList("geotz", unknown)
+
+ // For non test apexes, make sure that all contents are actually declared in make.
+ if global.ApexSystemServerJars.Len() > 0 && len(unknown) > 0 {
+ ctx.ModuleErrorf("%s in contents must also be declared in PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS", unknown)
+ }
+
+ return jars
}
type systemServerClasspathFragmentContentDependencyTag struct {
diff --git a/mk2rbc/Android.bp b/mk2rbc/Android.bp
index 3ea3f7f..4fa3eb6 100644
--- a/mk2rbc/Android.bp
+++ b/mk2rbc/Android.bp
@@ -13,6 +13,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
blueprint_go_binary {
name: "mk2rbc",
srcs: ["cmd/mk2rbc.go"],
diff --git a/mk2rbc/cmd/mk2rbc.go b/mk2rbc/cmd/mk2rbc.go
index 72525c4..209e82b 100644
--- a/mk2rbc/cmd/mk2rbc.go
+++ b/mk2rbc/cmd/mk2rbc.go
@@ -202,8 +202,7 @@
func buildProductConfigMap() map[string]string {
const androidProductsMk = "AndroidProducts.mk"
// Build the list of AndroidProducts.mk files: it's
- // build/make/target/product/AndroidProducts.mk plus
- // device/**/AndroidProducts.mk
+ // build/make/target/product/AndroidProducts.mk + device/**/AndroidProducts.mk plus + vendor/**/AndroidProducts.mk
targetAndroidProductsFile := filepath.Join(*rootDir, "build", "make", "target", "product", androidProductsMk)
if _, err := os.Stat(targetAndroidProductsFile); err != nil {
fmt.Fprintf(os.Stderr, "%s: %s\n(hint: %s is not a source tree root)\n",
@@ -213,17 +212,19 @@
if err := mk2rbc.UpdateProductConfigMap(productConfigMap, targetAndroidProductsFile); err != nil {
fmt.Fprintf(os.Stderr, "%s: %s\n", targetAndroidProductsFile, err)
}
- _ = filepath.Walk(filepath.Join(*rootDir, "device"),
- func(path string, info os.FileInfo, err error) error {
- if info.IsDir() || filepath.Base(path) != androidProductsMk {
+ for _, t := range []string{"device", "vendor"} {
+ _ = filepath.WalkDir(filepath.Join(*rootDir, t),
+ func(path string, d os.DirEntry, err error) error {
+ if err != nil || d.IsDir() || filepath.Base(path) != androidProductsMk {
+ return nil
+ }
+ if err2 := mk2rbc.UpdateProductConfigMap(productConfigMap, path); err2 != nil {
+ fmt.Fprintf(os.Stderr, "%s: %s\n", path, err)
+ // Keep going, we want to find all such errors in a single run
+ }
return nil
- }
- if err2 := mk2rbc.UpdateProductConfigMap(productConfigMap, path); err2 != nil {
- fmt.Fprintf(os.Stderr, "%s: %s\n", path, err)
- // Keep going, we want to find all such errors in a single run
- }
- return nil
- })
+ })
+ }
return productConfigMap
}
diff --git a/mk2rbc/mk2rbc_test.go b/mk2rbc/mk2rbc_test.go
index 46212ee..ca7fe6f 100644
--- a/mk2rbc/mk2rbc_test.go
+++ b/mk2rbc/mk2rbc_test.go
@@ -414,7 +414,7 @@
def init(g, handle):
cfg = rblf.cfg(handle)
- if rblf.filter(g.get("PRODUCT_LIST", ""), g["TARGET_PRODUCT"]):
+ if rblf.filter(g.get("PRODUCT_LIST", []), g["TARGET_PRODUCT"]):
pass
`,
},
diff --git a/mk2rbc/variable.go b/mk2rbc/variable.go
index 88d63c9..4bb9ed5 100644
--- a/mk2rbc/variable.go
+++ b/mk2rbc/variable.go
@@ -299,6 +299,10 @@
vt = vi.valueType
}
}
+ if strings.HasSuffix(name, "_LIST") && vt == starlarkTypeUnknown {
+ // Heuristics: Variables with "_LIST" suffix are lists
+ vt = starlarkTypeList
+ }
v = &otherGlobalVariable{baseVariable{nam: name, typ: vt}}
}
ctx.variables[name] = v
diff --git a/python/binary.go b/python/binary.go
index b106536..bc2768c 100644
--- a/python/binary.go
+++ b/python/binary.go
@@ -38,6 +38,7 @@
Main string
Srcs bazel.LabelListAttribute
Data bazel.LabelListAttribute
+ Deps bazel.LabelListAttribute
Python_version string
}
@@ -81,11 +82,13 @@
srcs := android.BazelLabelForModuleSrcExcludes(ctx, m.properties.Srcs, m.properties.Exclude_srcs)
data := android.BazelLabelForModuleSrc(ctx, m.properties.Data)
+ deps := android.BazelLabelForModuleDeps(ctx, m.properties.Libs)
attrs := &bazelPythonBinaryAttributes{
Main: main,
Srcs: bazel.MakeLabelListAttribute(srcs),
Data: bazel.MakeLabelListAttribute(data),
+ Deps: bazel.MakeLabelListAttribute(deps),
Python_version: python_version,
}
diff --git a/python/library.go b/python/library.go
index 9663b3c..a132216 100644
--- a/python/library.go
+++ b/python/library.go
@@ -17,11 +17,17 @@
// This file contains the module types for building Python library.
import (
+ "fmt"
+
"android/soong/android"
+ "android/soong/bazel"
+ "github.com/google/blueprint/proptools"
)
func init() {
registerPythonLibraryComponents(android.InitRegistrationContext)
+ android.RegisterBp2BuildMutator("python_library_host", PythonLibraryHostBp2Build)
+ android.RegisterBp2BuildMutator("python_library", PythonLibraryBp2Build)
}
func registerPythonLibraryComponents(ctx android.RegistrationContext) {
@@ -32,11 +38,79 @@
func PythonLibraryHostFactory() android.Module {
module := newModule(android.HostSupported, android.MultilibFirst)
+ android.InitBazelModule(module)
+
return module.init()
}
+type bazelPythonLibraryAttributes struct {
+ Srcs bazel.LabelListAttribute
+ Data bazel.LabelListAttribute
+ Deps bazel.LabelListAttribute
+ Srcs_version string
+}
+
+func PythonLibraryHostBp2Build(ctx android.TopDownMutatorContext) {
+ pythonLibBp2Build(ctx, "python_library_host")
+}
+
+func PythonLibraryBp2Build(ctx android.TopDownMutatorContext) {
+ pythonLibBp2Build(ctx, "python_library")
+}
+
+func pythonLibBp2Build(ctx android.TopDownMutatorContext, modType string) {
+ m, ok := ctx.Module().(*Module)
+ if !ok || !m.ConvertWithBp2build(ctx) {
+ return
+ }
+
+ // a Module can be something other than a `modType`
+ if ctx.ModuleType() != modType {
+ return
+ }
+
+ // TODO(b/182306917): this doesn't fully handle all nested props versioned
+ // by the python version, which would have been handled by the version split
+ // mutator. This is sufficient for very simple python_library modules under
+ // Bionic.
+ py3Enabled := proptools.BoolDefault(m.properties.Version.Py3.Enabled, true)
+ py2Enabled := proptools.BoolDefault(m.properties.Version.Py2.Enabled, false)
+ var python_version string
+ if py2Enabled && !py3Enabled {
+ python_version = "PY2"
+ } else if !py2Enabled && py3Enabled {
+ python_version = "PY3"
+ } else if !py2Enabled && !py3Enabled {
+ panic(fmt.Errorf(
+ "error for '%s' module: bp2build's %s converter doesn't understand having "+
+ "neither py2 nor py3 enabled", m.Name(), modType))
+ } else {
+ // do nothing, since python_version defaults to PY2ANDPY3
+ }
+
+ srcs := android.BazelLabelForModuleSrcExcludes(ctx, m.properties.Srcs, m.properties.Exclude_srcs)
+ data := android.BazelLabelForModuleSrc(ctx, m.properties.Data)
+ deps := android.BazelLabelForModuleDeps(ctx, m.properties.Libs)
+
+ attrs := &bazelPythonLibraryAttributes{
+ Srcs: bazel.MakeLabelListAttribute(srcs),
+ Data: bazel.MakeLabelListAttribute(data),
+ Deps: bazel.MakeLabelListAttribute(deps),
+ Srcs_version: python_version,
+ }
+
+ props := bazel.BazelTargetModuleProperties{
+ // Use the native py_library rule.
+ Rule_class: "py_library",
+ }
+
+ ctx.CreateBazelTargetModule(m.Name(), props, attrs)
+}
+
func PythonLibraryFactory() android.Module {
module := newModule(android.HostAndDeviceSupported, android.MultilibBoth)
+ android.InitBazelModule(module)
+
return module.init()
}
diff --git a/python/python.go b/python/python.go
index 0f5b788..83844e6 100644
--- a/python/python.go
+++ b/python/python.go
@@ -675,7 +675,7 @@
if !isPythonLibModule(child) {
ctx.PropertyErrorf("libs",
"the dependency %q of module %q is not Python library!",
- ctx.ModuleName(), ctx.OtherModuleName(child))
+ ctx.OtherModuleName(child), ctx.ModuleName())
}
// collect source and data paths, checking that there are no duplicate output file conflicts
if dep, ok := child.(pythonDependency); ok {
diff --git a/rust/compiler.go b/rust/compiler.go
index d9e21ff..7bd9af4 100644
--- a/rust/compiler.go
+++ b/rust/compiler.go
@@ -397,8 +397,15 @@
}
if compiler.location == InstallInData && ctx.RustModule().UseVndk() {
- dir = filepath.Join(dir, "vendor")
+ if ctx.RustModule().InProduct() {
+ dir = filepath.Join(dir, "product")
+ } else if ctx.RustModule().InVendor() {
+ dir = filepath.Join(dir, "vendor")
+ } else {
+ ctx.ModuleErrorf("Unknown data+VNDK installation kind")
+ }
}
+
return android.PathForModuleInstall(ctx, dir, compiler.subDir,
compiler.relativeInstallPath(), compiler.relative)
}
diff --git a/rust/config/allowed_list.go b/rust/config/allowed_list.go
index 926d2ac..63a8f04 100644
--- a/rust/config/allowed_list.go
+++ b/rust/config/allowed_list.go
@@ -13,6 +13,7 @@
"external/minijail",
"external/rust",
"external/selinux/libselinux",
+ "external/uwb",
"external/vm_tools/p9",
"frameworks/native/libs/binder/rust",
"frameworks/proto_logging/stats",
@@ -25,6 +26,7 @@
"system/extras/simpleperf",
"system/hardware/interfaces/keystore2",
"system/librustutils",
+ "system/logging/liblog",
"system/logging/rust",
"system/security",
"system/tools/aidl",
diff --git a/rust/doc.go b/rust/doc.go
index e7f1371..fe3581b 100644
--- a/rust/doc.go
+++ b/rust/doc.go
@@ -29,6 +29,14 @@
type rustdocSingleton struct{}
func (n *rustdocSingleton) GenerateBuildActions(ctx android.SingletonContext) {
+ docDir := android.PathForOutput(ctx, "rustdoc")
+ docZip := android.PathForOutput(ctx, "rustdoc.zip")
+ rule := android.NewRuleBuilder(pctx, ctx)
+ zipCmd := rule.Command().BuiltTool("soong_zip").
+ FlagWithOutput("-o ", docZip).
+ FlagWithArg("-C ", docDir.String()).
+ FlagWithArg("-D ", docDir.String())
+
ctx.VisitAllModules(func(module android.Module) {
if !module.Enabled() {
return
@@ -36,8 +44,10 @@
if m, ok := module.(*Module); ok {
if m.docTimestampFile.Valid() {
- ctx.Phony("rustdoc", m.docTimestampFile.Path())
+ zipCmd.Implicit(m.docTimestampFile.Path())
}
}
})
+ rule.Build("rustdoc-zip", "Zipping all built Rust documentation...")
+ ctx.Phony("rustdoc", docZip)
}
diff --git a/rust/image.go b/rust/image.go
index 3b54f12..5d57f15 100644
--- a/rust/image.go
+++ b/rust/image.go
@@ -34,11 +34,11 @@
}
func (mod *Module) ProductAvailable() bool {
- return false
+ return Bool(mod.VendorProperties.Product_available)
}
func (mod *Module) RamdiskAvailable() bool {
- return false
+ return Bool(mod.Properties.Ramdisk_available)
}
func (mod *Module) VendorRamdiskAvailable() bool {
@@ -50,7 +50,7 @@
}
func (mod *Module) RecoveryAvailable() bool {
- return false
+ return Bool(mod.Properties.Recovery_available)
}
func (mod *Module) ExtraVariants() []string {
@@ -62,9 +62,7 @@
}
func (mod *Module) SetRamdiskVariantNeeded(b bool) {
- if b {
- panic("Setting ramdisk variant needed for Rust module is unsupported: " + mod.BaseModuleName())
- }
+ mod.Properties.RamdiskVariantNeeded = b
}
func (mod *Module) SetVendorRamdiskVariantNeeded(b bool) {
@@ -72,9 +70,7 @@
}
func (mod *Module) SetRecoveryVariantNeeded(b bool) {
- if b {
- panic("Setting recovery variant needed for Rust module is unsupported: " + mod.BaseModuleName())
- }
+ mod.Properties.RecoveryVariantNeeded = b
}
func (mod *Module) SetCoreVariantNeeded(b bool) {
@@ -99,7 +95,7 @@
}
func (mod *Module) RamdiskVariantNeeded(android.BaseModuleContext) bool {
- return mod.InRamdisk()
+ return mod.Properties.RamdiskVariantNeeded
}
func (mod *Module) DebugRamdiskVariantNeeded(ctx android.BaseModuleContext) bool {
@@ -107,7 +103,7 @@
}
func (mod *Module) RecoveryVariantNeeded(android.BaseModuleContext) bool {
- return mod.InRecovery()
+ return mod.Properties.RecoveryVariantNeeded
}
func (mod *Module) ExtraImageVariations(android.BaseModuleContext) []string {
@@ -140,12 +136,17 @@
}
func (ctx *moduleContext) ProductSpecific() bool {
- return false
+ return ctx.ModuleContext.ProductSpecific() || ctx.RustModule().productSpecificModuleContext()
+}
+
+func (c *Module) productSpecificModuleContext() bool {
+ // Additionally check if this module is inProduct() that means it is a "product" variant of a
+ // module. As well as product specific modules, product variants must be installed to /product.
+ return c.InProduct()
}
func (mod *Module) InRecovery() bool {
- // TODO(b/165791368)
- return false
+ return mod.ModuleBase.InRecovery() || mod.ModuleBase.InstallInRecovery()
}
func (mod *Module) InVendorRamdisk() bool {
@@ -166,6 +167,11 @@
return false
}
+func (mod *Module) OnlyInProduct() bool {
+ //TODO(b/165791368)
+ return false
+}
+
// Returns true when this module is configured to have core and vendor variants.
func (mod *Module) HasVendorVariant() bool {
return Bool(mod.VendorProperties.Vendor_available) || Bool(mod.VendorProperties.Odm_available)
@@ -181,7 +187,7 @@
}
func (mod *Module) InProduct() bool {
- return false
+ return mod.Properties.ImageVariationPrefix == cc.ProductVariationPrefix
}
// Returns true if the module is "vendor" variant. Usually these modules are installed in /vendor
@@ -193,6 +199,8 @@
m := module.(*Module)
if variant == android.VendorRamdiskVariation {
m.MakeAsPlatform()
+ } else if variant == android.RecoveryVariation {
+ m.MakeAsPlatform()
} else if strings.HasPrefix(variant, cc.VendorVariationPrefix) {
m.Properties.ImageVariationPrefix = cc.VendorVariationPrefix
m.Properties.VndkVersion = strings.TrimPrefix(variant, cc.VendorVariationPrefix)
@@ -204,6 +212,9 @@
m.Properties.HideFromMake = true
m.HideFromMake()
}
+ } else if strings.HasPrefix(variant, cc.ProductVariationPrefix) {
+ m.Properties.ImageVariationPrefix = cc.ProductVariationPrefix
+ m.Properties.VndkVersion = strings.TrimPrefix(variant, cc.ProductVariationPrefix)
}
}
@@ -211,10 +222,7 @@
// Rust does not support installing to the product image yet.
vendorSpecific := mctx.SocSpecific() || mctx.DeviceSpecific()
- if Bool(mod.VendorProperties.Product_available) {
- mctx.PropertyErrorf("product_available",
- "Rust modules do not yet support being available to the product image")
- } else if mctx.ProductSpecific() {
+ if mctx.ProductSpecific() {
mctx.PropertyErrorf("product_specific",
"Rust modules do not yet support installing to the product image.")
} else if Bool(mod.VendorProperties.Double_loadable) {
diff --git a/rust/project_json_test.go b/rust/project_json_test.go
index bdd54c5..f7b6681 100644
--- a/rust/project_json_test.go
+++ b/rust/project_json_test.go
@@ -36,7 +36,7 @@
// The JSON file is generated via WriteFileToOutputDir. Therefore, it
// won't appear in the Output of the TestingSingleton. Manually verify
// it exists.
- content, err := ioutil.ReadFile(filepath.Join(result.Config.BuildDir(), rustProjectJsonFileName))
+ content, err := ioutil.ReadFile(filepath.Join(result.Config.SoongOutDir(), rustProjectJsonFileName))
if err != nil {
t.Errorf("rust-project.json has not been generated")
}
diff --git a/rust/rust.go b/rust/rust.go
index 4ceeef1..0cd299d 100644
--- a/rust/rust.go
+++ b/rust/rust.go
@@ -84,6 +84,8 @@
// Set by imageMutator
CoreVariantNeeded bool `blueprint:"mutated"`
VendorRamdiskVariantNeeded bool `blueprint:"mutated"`
+ RamdiskVariantNeeded bool `blueprint:"mutated"`
+ RecoveryVariantNeeded bool `blueprint:"mutated"`
ExtraVariants []string `blueprint:"mutated"`
// Allows this module to use non-APEX version of libraries. Useful
@@ -94,11 +96,18 @@
SnapshotSharedLibs []string `blueprint:"mutated"`
SnapshotStaticLibs []string `blueprint:"mutated"`
+ // Make this module available when building for ramdisk.
+ // On device without a dedicated recovery partition, the module is only
+ // available after switching root into
+ // /first_stage_ramdisk. To expose the module before switching root, install
+ // the recovery variant instead.
+ Ramdisk_available *bool
+
// Make this module available when building for vendor ramdisk.
// On device without a dedicated recovery partition, the module is only
// available after switching root into
// /first_stage_ramdisk. To expose the module before switching root, install
- // the recovery variant instead (TODO(b/165791368) recovery not yet supported)
+ // the recovery variant instead
Vendor_ramdisk_available *bool
// Normally Soong uses the directory structure to decide which modules
@@ -115,6 +124,9 @@
// framework module from the recovery snapshot.
Exclude_from_recovery_snapshot *bool
+ // Make this module available when building for recovery
+ Recovery_available *bool
+
// Minimum sdk version that the artifact should support when it runs as part of mainline modules(APEX).
Min_sdk_version *string
@@ -762,6 +774,10 @@
}
func (mod *Module) nativeCoverage() bool {
+ // Bug: http://b/137883967 - native-bridge modules do not currently work with coverage
+ if mod.Target().NativeBridge == android.NativeBridgeEnabled {
+ return false
+ }
return mod.compiler != nil && mod.compiler.nativeCoverage()
}
@@ -804,9 +820,21 @@
// Differentiate static libraries that are vendor available
if mod.UseVndk() {
- mod.Properties.SubName += cc.VendorSuffix
+ if mod.InProduct() && !mod.OnlyInProduct() {
+ mod.Properties.SubName += cc.ProductSuffix
+ } else {
+ mod.Properties.SubName += cc.VendorSuffix
+ }
+ } else if mod.InRamdisk() && !mod.OnlyInRamdisk() {
+ mod.Properties.SubName += cc.RamdiskSuffix
} else if mod.InVendorRamdisk() && !mod.OnlyInVendorRamdisk() {
mod.Properties.SubName += cc.VendorRamdiskSuffix
+ } else if mod.InRecovery() && !mod.OnlyInRecovery() {
+ mod.Properties.SubName += cc.RecoverySuffix
+ }
+
+ if mod.Target().NativeBridge == android.NativeBridgeEnabled {
+ mod.Properties.SubName += cc.NativeBridgeSuffix
}
if !toolchain.Supported() {
@@ -1205,6 +1233,18 @@
return mod.compiler.inData()
}
+func (mod *Module) InstallInRamdisk() bool {
+ return mod.InRamdisk()
+}
+
+func (mod *Module) InstallInVendorRamdisk() bool {
+ return mod.InVendorRamdisk()
+}
+
+func (mod *Module) InstallInRecovery() bool {
+ return mod.InRecovery()
+}
+
func linkPathFromFilePath(filepath android.Path) string {
return strings.Split(filepath.String(), filepath.Base())[0]
}
diff --git a/scripts/build-rustdocs.sh b/scripts/build-rustdocs.sh
index ad8ba16..fda9688 100755
--- a/scripts/build-rustdocs.sh
+++ b/scripts/build-rustdocs.sh
@@ -27,5 +27,5 @@
if [ -n "${DIST_DIR}" ]; then
mkdir -p ${DIST_DIR}
- cp -r ${OUT_DIR}/soong/rustdoc $DIST_DIR/rustdoc
+ cp ${OUT_DIR}/soong/rustdoc.zip $DIST_DIR
fi
diff --git a/scripts/check_boot_jars/check_boot_jars.py b/scripts/check_boot_jars/check_boot_jars.py
index c271211..b711f9d 100755
--- a/scripts/check_boot_jars/check_boot_jars.py
+++ b/scripts/check_boot_jars/check_boot_jars.py
@@ -1,101 +1,102 @@
#!/usr/bin/env python
+"""Check boot jars.
+Usage: check_boot_jars.py <dexdump_path> <package_allow_list_file> <jar1> \
+<jar2> ...
"""
-Check boot jars.
-
-Usage: check_boot_jars.py <dexdump_path> <package_allow_list_file> <jar1> <jar2> ...
-"""
+from __future__ import print_function
import logging
-import os.path
import re
import subprocess
import sys
import xml.etree.ElementTree
-
# The compiled allow list RE.
allow_list_re = None
def LoadAllowList(filename):
- """ Load and compile allow list regular expressions from filename.
- """
- lines = []
- with open(filename, 'r') as f:
- for line in f:
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- lines.append(line)
- combined_re = r'^(%s)$' % '|'.join(lines)
- global allow_list_re
- try:
- allow_list_re = re.compile(combined_re)
- except re.error:
- logging.exception(
- 'Cannot compile package allow list regular expression: %r',
- combined_re)
- allow_list_re = None
- return False
- return True
+ """ Load and compile allow list regular expressions from filename."""
+ lines = []
+ with open(filename, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ lines.append(line)
+ combined_re = r'^(%s)$' % '|'.join(lines)
+ global allow_list_re #pylint: disable=global-statement
+ try:
+ allow_list_re = re.compile(combined_re)
+ except re.error:
+ logging.exception(
+ 'Cannot compile package allow list regular expression: %r',
+ combined_re)
+ allow_list_re = None
+ return False
+ return True
def CheckDexJar(dexdump_path, allow_list_path, jar):
- """Check a dex jar file.
- """
- # Use dexdump to generate the XML representation of the dex jar file.
- p = subprocess.Popen(args='%s -l xml %s' % (dexdump_path, jar),
- stdout=subprocess.PIPE, shell=True)
- stdout, _ = p.communicate()
- if p.returncode != 0:
- return False
+ """Check a dex jar file."""
+ # Use dexdump to generate the XML representation of the dex jar file.
+ p = subprocess.Popen(
+ args='%s -l xml %s' % (dexdump_path, jar),
+ stdout=subprocess.PIPE,
+ shell=True)
+ stdout, _ = p.communicate()
+ if p.returncode != 0:
+ return False
- packages = 0
- try:
- # TODO(b/172063475) - improve performance
- root = xml.etree.ElementTree.fromstring(stdout)
- except xml.etree.ElementTree.ParseError as e:
- print >> sys.stderr, 'Error processing jar %s - %s' % (jar, e)
- print >> sys.stderr, stdout
- return False
- for package_elt in root.iterfind('package'):
- packages += 1
- package_name = package_elt.get('name')
- if not package_name or not allow_list_re.match(package_name):
- # Report the name of a class in the package as it is easier to navigate to
- # the source of a concrete class than to a package which is often required
- # to investigate this failure.
- class_name = package_elt[0].get('name')
- if package_name != "":
- class_name = package_name + "." + class_name
- print >> sys.stderr, ('Error: %s contains class file %s, whose package name "%s" is empty or'
- ' not in the allow list %s of packages allowed on the bootclasspath.'
- % (jar, class_name, package_name, allow_list_path))
- return False
- if packages == 0:
- print >> sys.stderr, ('Error: %s does not contain any packages.' % jar)
- return False
- return True
-
+ packages = 0
+ try:
+ # TODO(b/172063475) - improve performance
+ root = xml.etree.ElementTree.fromstring(stdout)
+ except xml.etree.ElementTree.ParseError as e:
+ print('Error processing jar %s - %s' % (jar, e), file=sys.stderr)
+ print(stdout, file=sys.stderr)
+ return False
+ for package_elt in root.iterfind('package'):
+ packages += 1
+ package_name = package_elt.get('name')
+ if not package_name or not allow_list_re.match(package_name):
+ # Report the name of a class in the package as it is easier to
+ # navigate to the source of a concrete class than to a package
+ # which is often required to investigate this failure.
+ class_name = package_elt[0].get('name')
+ if package_name:
+ class_name = package_name + '.' + class_name
+ print((
+ 'Error: %s contains class file %s, whose package name "%s" is '
+ 'empty or not in the allow list %s of packages allowed on the '
+ 'bootclasspath.'
+ % (jar, class_name, package_name, allow_list_path)),
+ file=sys.stderr)
+ return False
+ if packages == 0:
+ print(('Error: %s does not contain any packages.' % jar),
+ file=sys.stderr)
+ return False
+ return True
def main(argv):
- if len(argv) < 3:
- print __doc__
- return 1
- dexdump_path = argv[0]
- allow_list_path = argv[1]
+ if len(argv) < 3:
+ print(__doc__)
+ return 1
+ dexdump_path = argv[0]
+ allow_list_path = argv[1]
- if not LoadAllowList(allow_list_path):
- return 1
+ if not LoadAllowList(allow_list_path):
+ return 1
- passed = True
- for jar in argv[2:]:
- if not CheckDexJar(dexdump_path, allow_list_path, jar):
- passed = False
- if not passed:
- return 1
+ passed = True
+ for jar in argv[2:]:
+ if not CheckDexJar(dexdump_path, allow_list_path, jar):
+ passed = False
+ if not passed:
+ return 1
- return 0
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/scripts/check_boot_jars/package_allowed_list.txt b/scripts/check_boot_jars/package_allowed_list.txt
index 18ab427..942f26a 100644
--- a/scripts/check_boot_jars/package_allowed_list.txt
+++ b/scripts/check_boot_jars/package_allowed_list.txt
@@ -69,6 +69,7 @@
javax\.xml\.transform\.stream
javax\.xml\.validation
javax\.xml\.xpath
+jdk\.internal\.math
jdk\.internal\.util
jdk\.internal\.vm\.annotation
jdk\.net
diff --git a/scripts/construct_context.py b/scripts/construct_context.py
index f0658ba..3f601c3 100755
--- a/scripts/construct_context.py
+++ b/scripts/construct_context.py
@@ -25,57 +25,78 @@
def parse_args(args):
- """Parse commandline arguments."""
- parser = argparse.ArgumentParser()
- parser.add_argument('--target-sdk-version', default='', dest='sdk',
- help='specify target SDK version (as it appears in the manifest)')
- parser.add_argument('--host-context-for-sdk', dest='host_contexts',
- action='append', nargs=2, metavar=('sdk','context'),
- help='specify context on host for a given SDK version or "any" version')
- parser.add_argument('--target-context-for-sdk', dest='target_contexts',
- action='append', nargs=2, metavar=('sdk','context'),
- help='specify context on target for a given SDK version or "any" version')
- return parser.parse_args(args)
+ """Parse commandline arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--target-sdk-version',
+ default='',
+ dest='sdk',
+ help='specify target SDK version (as it appears in the manifest)')
+ parser.add_argument(
+ '--host-context-for-sdk',
+ dest='host_contexts',
+ action='append',
+ nargs=2,
+ metavar=('sdk', 'context'),
+ help='specify context on host for a given SDK version or "any" version')
+ parser.add_argument(
+ '--target-context-for-sdk',
+ dest='target_contexts',
+ action='append',
+ nargs=2,
+ metavar=('sdk', 'context'),
+ help='specify context on target for a given SDK version or "any" '
+ 'version'
+ )
+ return parser.parse_args(args)
+
# Special keyword that means that the context should be added to class loader
# context regardless of the target SDK version.
any_sdk = 'any'
+
# We assume that the order of context arguments passed to this script is
# correct (matches the order computed by package manager). It is possible to
# sort them here, but Soong needs to use deterministic order anyway, so it can
# as well use the correct order.
def construct_context(versioned_contexts, target_sdk):
- context = []
- for [sdk, ctx] in versioned_contexts:
- if sdk == any_sdk or compare_version_gt(sdk, target_sdk):
- context.append(ctx)
- return context
+ context = []
+ for [sdk, ctx] in versioned_contexts:
+ if sdk == any_sdk or compare_version_gt(sdk, target_sdk):
+ context.append(ctx)
+ return context
+
def construct_contexts(args):
- host_context = construct_context(args.host_contexts, args.sdk)
- target_context = construct_context(args.target_contexts, args.sdk)
- context_sep = '#'
- return ('class_loader_context_arg=--class-loader-context=PCL[]{%s} ; ' % context_sep.join(host_context) +
- 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{%s}' % context_sep.join(target_context))
+ host_context = construct_context(args.host_contexts, args.sdk)
+ target_context = construct_context(args.target_contexts, args.sdk)
+ context_sep = '#'
+ return (
+ 'class_loader_context_arg=--class-loader-context=PCL[]{%s} ; ' %
+ context_sep.join(host_context) +
+ 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{%s}' #pylint: disable=line-too-long
+ % context_sep.join(target_context))
+
def main():
- """Program entry point."""
- try:
- args = parse_args(sys.argv[1:])
- if not args.sdk:
- raise SystemExit('target sdk version is not set')
- if not args.host_contexts:
- args.host_contexts = []
- if not args.target_contexts:
- args.target_contexts = []
+ """Program entry point."""
+ try:
+ args = parse_args(sys.argv[1:])
+ if not args.sdk:
+ raise SystemExit('target sdk version is not set')
+ if not args.host_contexts:
+ args.host_contexts = []
+ if not args.target_contexts:
+ args.target_contexts = []
- print(construct_contexts(args))
+ print(construct_contexts(args))
- # pylint: disable=broad-except
- except Exception as err:
- print('error: ' + str(err), file=sys.stderr)
- sys.exit(-1)
+ # pylint: disable=broad-except
+ except Exception as err:
+ print('error: ' + str(err), file=sys.stderr)
+ sys.exit(-1)
+
if __name__ == '__main__':
- main()
+ main()
diff --git a/scripts/construct_context_test.py b/scripts/construct_context_test.py
index 3b05f90..2ff5ac5 100755
--- a/scripts/construct_context_test.py
+++ b/scripts/construct_context_test.py
@@ -23,53 +23,63 @@
sys.dont_write_bytecode = True
+
def construct_contexts(arglist):
- args = cc.parse_args(arglist)
- return cc.construct_contexts(args)
+ args = cc.parse_args(arglist)
+ return cc.construct_contexts(args)
+
contexts = [
- '--host-context-for-sdk', '28', 'PCL[out/zdir/z.jar]',
- '--target-context-for-sdk', '28', 'PCL[/system/z.jar]',
- '--host-context-for-sdk', '29', 'PCL[out/xdir/x.jar]#PCL[out/ydir/y.jar]',
- '--target-context-for-sdk', '29', 'PCL[/system/x.jar]#PCL[/product/y.jar]',
- '--host-context-for-sdk', 'any', 'PCL[out/adir/a.jar]#PCL[out/bdir/b.jar]',
- '--target-context-for-sdk', 'any', 'PCL[/system/a.jar]#PCL[/product/b.jar]',
+ '--host-context-for-sdk',
+ '28',
+ 'PCL[out/zdir/z.jar]',
+ '--target-context-for-sdk',
+ '28',
+ 'PCL[/system/z.jar]',
+ '--host-context-for-sdk',
+ '29',
+ 'PCL[out/xdir/x.jar]#PCL[out/ydir/y.jar]',
+ '--target-context-for-sdk',
+ '29',
+ 'PCL[/system/x.jar]#PCL[/product/y.jar]',
+ '--host-context-for-sdk',
+ 'any',
+ 'PCL[out/adir/a.jar]#PCL[out/bdir/b.jar]',
+ '--target-context-for-sdk',
+ 'any',
+ 'PCL[/system/a.jar]#PCL[/product/b.jar]',
]
+#pylint: disable=line-too-long
class ConstructContextTest(unittest.TestCase):
- def test_construct_context_28(self):
- args = ['--target-sdk-version', '28'] + contexts
- result = construct_contexts(args)
- expect = ('class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/xdir/x.jar]'
- '#PCL[out/ydir/y.jar]'
- '#PCL[out/adir/a.jar]'
- '#PCL[out/bdir/b.jar]}'
- ' ; '
- 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/x.jar]'
- '#PCL[/product/y.jar]'
- '#PCL[/system/a.jar]'
- '#PCL[/product/b.jar]}')
- self.assertEqual(result, expect)
- def test_construct_context_29(self):
- args = ['--target-sdk-version', '29'] + contexts
- result = construct_contexts(args)
- expect = ('class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/adir/a.jar]'
- '#PCL[out/bdir/b.jar]}'
- ' ; '
- 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/a.jar]'
- '#PCL[/product/b.jar]}')
- self.assertEqual(result, expect)
+ def test_construct_context_28(self):
+ args = ['--target-sdk-version', '28'] + contexts
+ result = construct_contexts(args)
+ expect = (
+ 'class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/xdir/x.jar]#PCL[out/ydir/y.jar]#PCL[out/adir/a.jar]#PCL[out/bdir/b.jar]}'
+ ' ; '
+ 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/x.jar]#PCL[/product/y.jar]#PCL[/system/a.jar]#PCL[/product/b.jar]}')
+ self.assertEqual(result, expect)
- def test_construct_context_S(self):
- args = ['--target-sdk-version', 'S'] + contexts
- result = construct_contexts(args)
- expect = ('class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/adir/a.jar]'
- '#PCL[out/bdir/b.jar]}'
- ' ; '
- 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/a.jar]'
- '#PCL[/product/b.jar]}')
- self.assertEqual(result, expect)
+ def test_construct_context_29(self):
+ args = ['--target-sdk-version', '29'] + contexts
+ result = construct_contexts(args)
+ expect = (
+ 'class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/adir/a.jar]#PCL[out/bdir/b.jar]}'
+ ' ; '
+ 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/a.jar]#PCL[/product/b.jar]}')
+ self.assertEqual(result, expect)
+
+ def test_construct_context_S(self):
+ args = ['--target-sdk-version', 'S'] + contexts
+ result = construct_contexts(args)
+ expect = (
+ 'class_loader_context_arg=--class-loader-context=PCL[]{PCL[out/adir/a.jar]#PCL[out/bdir/b.jar]}'
+ ' ; '
+ 'stored_class_loader_context_arg=--stored-class-loader-context=PCL[]{PCL[/system/a.jar]#PCL[/product/b.jar]}')
+ self.assertEqual(result, expect)
+#pylint: enable=line-too-long
if __name__ == '__main__':
- unittest.main(verbosity=2)
+ unittest.main(verbosity=2)
diff --git a/scripts/conv_linker_config.py b/scripts/conv_linker_config.py
index 92f79da..e46efe4 100644
--- a/scripts/conv_linker_config.py
+++ b/scripts/conv_linker_config.py
@@ -20,178 +20,181 @@
import json
import os
-import linker_config_pb2
+import linker_config_pb2 #pylint: disable=import-error
from google.protobuf.descriptor import FieldDescriptor
from google.protobuf.json_format import ParseDict
from google.protobuf.text_format import MessageToString
def Proto(args):
- json_content = ''
- with open(args.source) as f:
- for line in f:
- if not line.lstrip().startswith('//'):
- json_content += line
- obj = json.loads(json_content, object_pairs_hook=collections.OrderedDict)
- pb = ParseDict(obj, linker_config_pb2.LinkerConfig())
- with open(args.output, 'wb') as f:
- f.write(pb.SerializeToString())
+ json_content = ''
+ with open(args.source) as f:
+ for line in f:
+ if not line.lstrip().startswith('//'):
+ json_content += line
+ obj = json.loads(json_content, object_pairs_hook=collections.OrderedDict)
+ pb = ParseDict(obj, linker_config_pb2.LinkerConfig())
+ with open(args.output, 'wb') as f:
+ f.write(pb.SerializeToString())
def Print(args):
- with open(args.source, 'rb') as f:
- pb = linker_config_pb2.LinkerConfig()
- pb.ParseFromString(f.read())
- print(MessageToString(pb))
+ with open(args.source, 'rb') as f:
+ pb = linker_config_pb2.LinkerConfig()
+ pb.ParseFromString(f.read())
+ print(MessageToString(pb))
def SystemProvide(args):
- pb = linker_config_pb2.LinkerConfig()
- with open(args.source, 'rb') as f:
- pb.ParseFromString(f.read())
- libraries = args.value.split()
+ pb = linker_config_pb2.LinkerConfig()
+ with open(args.source, 'rb') as f:
+ pb.ParseFromString(f.read())
+ libraries = args.value.split()
- def IsInLibPath(lib_name):
- lib_path = os.path.join(args.system, 'lib', lib_name)
- lib64_path = os.path.join(args.system, 'lib64', lib_name)
- return os.path.exists(lib_path) or os.path.islink(lib_path) or os.path.exists(lib64_path) or os.path.islink(lib64_path)
+ def IsInLibPath(lib_name):
+ lib_path = os.path.join(args.system, 'lib', lib_name)
+ lib64_path = os.path.join(args.system, 'lib64', lib_name)
+ return os.path.exists(lib_path) or os.path.islink(
+ lib_path) or os.path.exists(lib64_path) or os.path.islink(
+ lib64_path)
- installed_libraries = list(filter(IsInLibPath, libraries))
- for item in installed_libraries:
- if item not in getattr(pb, 'provideLibs'):
- getattr(pb, 'provideLibs').append(item)
- with open(args.output, 'wb') as f:
- f.write(pb.SerializeToString())
+ installed_libraries = [lib for lib in libraries if IsInLibPath(lib)]
+ for item in installed_libraries:
+ if item not in getattr(pb, 'provideLibs'):
+ getattr(pb, 'provideLibs').append(item)
+ with open(args.output, 'wb') as f:
+ f.write(pb.SerializeToString())
def Append(args):
- pb = linker_config_pb2.LinkerConfig()
- with open(args.source, 'rb') as f:
- pb.ParseFromString(f.read())
+ pb = linker_config_pb2.LinkerConfig()
+ with open(args.source, 'rb') as f:
+ pb.ParseFromString(f.read())
- if getattr(type(pb), args.key).DESCRIPTOR.label == FieldDescriptor.LABEL_REPEATED:
- for value in args.value.split():
- getattr(pb, args.key).append(value)
- else:
- setattr(pb, args.key, args.value)
+ if getattr(type(pb),
+ args.key).DESCRIPTOR.label == FieldDescriptor.LABEL_REPEATED:
+ for value in args.value.split():
+ getattr(pb, args.key).append(value)
+ else:
+ setattr(pb, args.key, args.value)
- with open(args.output, 'wb') as f:
- f.write(pb.SerializeToString())
+ with open(args.output, 'wb') as f:
+ f.write(pb.SerializeToString())
+
def Merge(args):
- pb = linker_config_pb2.LinkerConfig()
- for other in args.input:
- with open(other, 'rb') as f:
- pb.MergeFromString(f.read())
+ pb = linker_config_pb2.LinkerConfig()
+ for other in args.input:
+ with open(other, 'rb') as f:
+ pb.MergeFromString(f.read())
- with open(args.out, 'wb') as f:
- f.write(pb.SerializeToString())
+ with open(args.out, 'wb') as f:
+ f.write(pb.SerializeToString())
+
def GetArgParser():
- parser = argparse.ArgumentParser()
- subparsers = parser.add_subparsers()
+ parser = argparse.ArgumentParser()
+ subparsers = parser.add_subparsers()
- parser_proto = subparsers.add_parser(
- 'proto', help='Convert the input JSON configuration file into protobuf.')
- parser_proto.add_argument(
- '-s',
- '--source',
- required=True,
- type=str,
- help='Source linker configuration file in JSON.')
- parser_proto.add_argument(
- '-o',
- '--output',
- required=True,
- type=str,
- help='Target path to create protobuf file.')
- parser_proto.set_defaults(func=Proto)
+ parser_proto = subparsers.add_parser(
+ 'proto',
+ help='Convert the input JSON configuration file into protobuf.')
+ parser_proto.add_argument(
+ '-s',
+ '--source',
+ required=True,
+ type=str,
+ help='Source linker configuration file in JSON.')
+ parser_proto.add_argument(
+ '-o',
+ '--output',
+ required=True,
+ type=str,
+ help='Target path to create protobuf file.')
+ parser_proto.set_defaults(func=Proto)
- print_proto = subparsers.add_parser(
- 'print', help='Print configuration in human-readable text format.')
- print_proto.add_argument(
- '-s',
- '--source',
- required=True,
- type=str,
- help='Source linker configuration file in protobuf.')
- print_proto.set_defaults(func=Print)
+ print_proto = subparsers.add_parser(
+ 'print', help='Print configuration in human-readable text format.')
+ print_proto.add_argument(
+ '-s',
+ '--source',
+ required=True,
+ type=str,
+ help='Source linker configuration file in protobuf.')
+ print_proto.set_defaults(func=Print)
- system_provide_libs = subparsers.add_parser(
- 'systemprovide', help='Append system provide libraries into the configuration.')
- system_provide_libs.add_argument(
- '-s',
- '--source',
- required=True,
- type=str,
- help='Source linker configuration file in protobuf.')
- system_provide_libs.add_argument(
- '-o',
- '--output',
- required=True,
- type=str,
- help='Target linker configuration file to write in protobuf.')
- system_provide_libs.add_argument(
- '--value',
- required=True,
- type=str,
- help='Values of the libraries to append. If there are more than one it should be separated by empty space')
- system_provide_libs.add_argument(
- '--system',
- required=True,
- type=str,
- help='Path of the system image.')
- system_provide_libs.set_defaults(func=SystemProvide)
+ system_provide_libs = subparsers.add_parser(
+ 'systemprovide',
+ help='Append system provide libraries into the configuration.')
+ system_provide_libs.add_argument(
+ '-s',
+ '--source',
+ required=True,
+ type=str,
+ help='Source linker configuration file in protobuf.')
+ system_provide_libs.add_argument(
+ '-o',
+ '--output',
+ required=True,
+ type=str,
+ help='Target linker configuration file to write in protobuf.')
+ system_provide_libs.add_argument(
+ '--value',
+ required=True,
+ type=str,
+ help='Values of the libraries to append. If there are more than one '
+ 'it should be separated by empty space'
+ )
+ system_provide_libs.add_argument(
+ '--system', required=True, type=str, help='Path of the system image.')
+ system_provide_libs.set_defaults(func=SystemProvide)
- append = subparsers.add_parser(
- 'append', help='Append value(s) to given key.')
- append.add_argument(
- '-s',
- '--source',
- required=True,
- type=str,
- help='Source linker configuration file in protobuf.')
- append.add_argument(
- '-o',
- '--output',
- required=True,
- type=str,
- help='Target linker configuration file to write in protobuf.')
- append.add_argument(
- '--key',
- required=True,
- type=str,
- help='.')
- append.add_argument(
- '--value',
- required=True,
- type=str,
- help='Values of the libraries to append. If there are more than one it should be separated by empty space')
- append.set_defaults(func=Append)
+ append = subparsers.add_parser(
+ 'append', help='Append value(s) to given key.')
+ append.add_argument(
+ '-s',
+ '--source',
+ required=True,
+ type=str,
+ help='Source linker configuration file in protobuf.')
+ append.add_argument(
+ '-o',
+ '--output',
+ required=True,
+ type=str,
+ help='Target linker configuration file to write in protobuf.')
+ append.add_argument('--key', required=True, type=str, help='.')
+ append.add_argument(
+ '--value',
+ required=True,
+ type=str,
+ help='Values of the libraries to append. If there are more than one'
+ 'it should be separated by empty space'
+ )
+ append.set_defaults(func=Append)
- append = subparsers.add_parser(
- 'merge', help='Merge configurations')
- append.add_argument(
- '-o',
- '--out',
- required=True,
- type=str,
- help='Ouptut linker configuration file to write in protobuf.')
- append.add_argument(
- '-i',
- '--input',
- nargs='+',
- type=str,
- help='Linker configuration files to merge.')
- append.set_defaults(func=Merge)
+ append = subparsers.add_parser('merge', help='Merge configurations')
+ append.add_argument(
+ '-o',
+ '--out',
+ required=True,
+ type=str,
+ help='Output linker configuration file to write in protobuf.')
+ append.add_argument(
+ '-i',
+ '--input',
+ nargs='+',
+ type=str,
+ help='Linker configuration files to merge.')
+ append.set_defaults(func=Merge)
- return parser
+ return parser
def main():
- args = GetArgParser().parse_args()
- args.func(args)
+ args = GetArgParser().parse_args()
+ args.func(args)
if __name__ == '__main__':
- main()
+ main()
diff --git a/scripts/get_clang_version.py b/scripts/get_clang_version.py
index 74bc9de..64d922a 100755
--- a/scripts/get_clang_version.py
+++ b/scripts/get_clang_version.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2021 The Android Open Source Project
#
diff --git a/scripts/hiddenapi/generate_hiddenapi_lists.py b/scripts/hiddenapi/generate_hiddenapi_lists.py
index 5ab93d1..35e0948 100755
--- a/scripts/hiddenapi/generate_hiddenapi_lists.py
+++ b/scripts/hiddenapi/generate_hiddenapi_lists.py
@@ -16,8 +16,6 @@
"""Generate API lists for non-SDK API enforcement."""
import argparse
from collections import defaultdict, namedtuple
-import functools
-import os
import re
import sys
@@ -60,15 +58,15 @@
# For example, the max-target-P list is checked in as it was in P,
# but signatures have changes since then. The flag instructs this
# script to skip any entries which do not exist any more.
-FLAG_IGNORE_CONFLICTS = "ignore-conflicts"
+FLAG_IGNORE_CONFLICTS = 'ignore-conflicts'
# Option specified after one of FLAGS_API_LIST to express that all
# apis within a given set of packages should be assign the given flag.
-FLAG_PACKAGES = "packages"
+FLAG_PACKAGES = 'packages'
# Option specified after one of FLAGS_API_LIST to indicate an extra
# tag that should be added to the matching APIs.
-FLAG_TAG = "tag"
+FLAG_TAG = 'tag'
# Regex patterns of fields/methods used in serialization. These are
# considered public API despite being hidden.
@@ -84,24 +82,30 @@
# Single regex used to match serialization API. It combines all the
# SERIALIZATION_PATTERNS into a single regular expression.
-SERIALIZATION_REGEX = re.compile(r'.*->(' + '|'.join(SERIALIZATION_PATTERNS) + r')$')
+SERIALIZATION_REGEX = re.compile(r'.*->(' + '|'.join(SERIALIZATION_PATTERNS) +
+ r')$')
# Predicates to be used with filter_apis.
-HAS_NO_API_LIST_ASSIGNED = lambda api, flags: not FLAGS_API_LIST_SET.intersection(flags)
+HAS_NO_API_LIST_ASSIGNED = \
+ lambda api,flags: not FLAGS_API_LIST_SET.intersection(flags)
+
IS_SERIALIZATION = lambda api, flags: SERIALIZATION_REGEX.match(api)
class StoreOrderedOptions(argparse.Action):
- """An argparse action that stores a number of option arguments in the order that
- they were specified.
+ """An argparse action that stores a number of option arguments in the order
+
+ that they were specified.
"""
- def __call__(self, parser, args, values, option_string = None):
+
+ def __call__(self, parser, args, values, option_string=None):
items = getattr(args, self.dest, None)
if items is None:
items = []
items.append([option_string.lstrip('-'), values])
setattr(args, self.dest, items)
+
def get_args():
"""Parses command line arguments.
@@ -110,22 +114,43 @@
"""
parser = argparse.ArgumentParser()
parser.add_argument('--output', required=True)
- parser.add_argument('--csv', nargs='*', default=[], metavar='CSV_FILE',
+ parser.add_argument(
+ '--csv',
+ nargs='*',
+ default=[],
+ metavar='CSV_FILE',
help='CSV files to be merged into output')
for flag in ALL_FLAGS:
- parser.add_argument('--' + flag, dest='ordered_flags', metavar='TXT_FILE',
- action=StoreOrderedOptions, help='lists of entries with flag "' + flag + '"')
- parser.add_argument('--' + FLAG_IGNORE_CONFLICTS, dest='ordered_flags', nargs=0,
- action=StoreOrderedOptions, help='Indicates that only known and otherwise unassigned '
- 'entries should be assign the given flag. Must follow a list of entries and applies '
- 'to the preceding such list.')
- parser.add_argument('--' + FLAG_PACKAGES, dest='ordered_flags', nargs=0,
- action=StoreOrderedOptions, help='Indicates that the previous list of entries '
- 'is a list of packages. All members in those packages will be given the flag. '
- 'Must follow a list of entries and applies to the preceding such list.')
- parser.add_argument('--' + FLAG_TAG, dest='ordered_flags', nargs=1,
- action=StoreOrderedOptions, help='Adds an extra tag to the previous list of entries. '
+ parser.add_argument(
+ '--' + flag,
+ dest='ordered_flags',
+ metavar='TXT_FILE',
+ action=StoreOrderedOptions,
+ help='lists of entries with flag "' + flag + '"')
+ parser.add_argument(
+ '--' + FLAG_IGNORE_CONFLICTS,
+ dest='ordered_flags',
+ nargs=0,
+ action=StoreOrderedOptions,
+ help='Indicates that only known and otherwise unassigned '
+ 'entries should be assign the given flag. Must follow a list of '
+ 'entries and applies to the preceding such list.')
+ parser.add_argument(
+ '--' + FLAG_PACKAGES,
+ dest='ordered_flags',
+ nargs=0,
+ action=StoreOrderedOptions,
+ help='Indicates that the previous list of entries '
+ 'is a list of packages. All members in those packages will be given '
+ 'the flag. Must follow a list of entries and applies to the preceding '
+ 'such list.')
+ parser.add_argument(
+ '--' + FLAG_TAG,
+ dest='ordered_flags',
+ nargs=1,
+ action=StoreOrderedOptions,
+ help='Adds an extra tag to the previous list of entries. '
'Must follow a list of entries and applies to the preceding such list.')
return parser.parse_args()
@@ -143,9 +168,9 @@
Lines of the file as a list of string.
"""
with open(filename, 'r') as f:
- lines = f.readlines();
- lines = filter(lambda line: not line.startswith('#'), lines)
- lines = map(lambda line: line.strip(), lines)
+ lines = f.readlines()
+ lines = [line for line in lines if not line.startswith('#')]
+ lines = [line.strip() for line in lines]
return set(lines)
@@ -156,7 +181,7 @@
filename (string): Path to the file to be writing into.
lines (list): List of strings to write into the file.
"""
- lines = map(lambda line: line + '\n', lines)
+ lines = [line + '\n' for line in lines]
with open(filename, 'w') as f:
f.writelines(lines)
@@ -170,17 +195,19 @@
Returns:
The package name of the class containing the field/method.
"""
- full_class_name = signature.split(";->")[0]
+ full_class_name = signature.split(';->')[0]
# Example: Landroid/hardware/radio/V1_2/IRadio$Proxy
- if (full_class_name[0] != "L"):
- raise ValueError("Expected to start with 'L': %s" % full_class_name)
+ if full_class_name[0] != 'L':
+ raise ValueError("Expected to start with 'L': %s"
+ % full_class_name)
full_class_name = full_class_name[1:]
# If full_class_name doesn't contain '/', then package_name will be ''.
- package_name = full_class_name.rpartition("/")[0]
+ package_name = full_class_name.rpartition('/')[0]
return package_name.replace('/', '.')
class FlagsDict:
+
def __init__(self):
self._dict_keyset = set()
self._dict = defaultdict(set)
@@ -188,37 +215,43 @@
def _check_entries_set(self, keys_subset, source):
assert isinstance(keys_subset, set)
assert keys_subset.issubset(self._dict_keyset), (
- "Error: {} specifies signatures not present in code:\n"
- "{}"
- "Please visit go/hiddenapi for more information.").format(
- source, "".join(map(lambda x: " " + str(x) + "\n", keys_subset - self._dict_keyset)))
+ 'Error: {} specifies signatures not present in code:\n'
+ '{}'
+ 'Please visit go/hiddenapi for more information.').format(
+ source, ''.join(
+ [' ' + str(x) + '\n' for x in
+ keys_subset - self._dict_keyset]))
def _check_flags_set(self, flags_subset, source):
assert isinstance(flags_subset, set)
assert flags_subset.issubset(ALL_FLAGS_SET), (
- "Error processing: {}\n"
- "The following flags were not recognized: \n"
- "{}\n"
- "Please visit go/hiddenapi for more information.").format(
- source, "\n".join(flags_subset - ALL_FLAGS_SET))
+ 'Error processing: {}\n'
+ 'The following flags were not recognized: \n'
+ '{}\n'
+ 'Please visit go/hiddenapi for more information.').format(
+ source, '\n'.join(flags_subset - ALL_FLAGS_SET))
def filter_apis(self, filter_fn):
"""Returns APIs which match a given predicate.
- This is a helper function which allows to filter on both signatures (keys) and
- flags (values). The built-in filter() invokes the lambda only with dict's keys.
+ This is a helper function which allows to filter on both signatures
+ (keys) and
+ flags (values). The built-in filter() invokes the lambda only with
+ dict's keys.
Args:
- filter_fn : Function which takes two arguments (signature/flags) and returns a boolean.
+ filter_fn : Function which takes two arguments (signature/flags) and
+ returns a boolean.
Returns:
A set of APIs which match the predicate.
"""
- return set(filter(lambda x: filter_fn(x, self._dict[x]), self._dict_keyset))
+ return {x for x in self._dict_keyset if filter_fn(x, self._dict[x])}
def get_valid_subset_of_unassigned_apis(self, api_subset):
- """Sanitizes a key set input to only include keys which exist in the dictionary
- and have not been assigned any API list flags.
+ """Sanitizes a key set input to only include keys which exist in the
+
+ dictionary and have not been assigned any API list flags.
Args:
entries_subset (set/list): Key set to be sanitized.
@@ -227,7 +260,8 @@
Sanitized key set.
"""
assert isinstance(api_subset, set)
- return api_subset.intersection(self.filter_apis(HAS_NO_API_LIST_ASSIGNED))
+ return api_subset.intersection(
+ self.filter_apis(HAS_NO_API_LIST_ASSIGNED))
def generate_csv(self):
"""Constructs CSV entries from a dictionary.
@@ -235,15 +269,16 @@
Old versions of flags are used to generate the file.
Returns:
- List of lines comprising a CSV file. See "parse_and_merge_csv" for format description.
+ List of lines comprising a CSV file. See "parse_and_merge_csv" for
+ format description.
"""
lines = []
for api in self._dict:
- flags = sorted(self._dict[api])
- lines.append(",".join([api] + flags))
+ flags = sorted(self._dict[api])
+ lines.append(','.join([api] + flags))
return sorted(lines)
- def parse_and_merge_csv(self, csv_lines, source = "<unknown>"):
+ def parse_and_merge_csv(self, csv_lines, source='<unknown>'):
"""Parses CSV entries and merges them into a given dictionary.
The expected CSV format is:
@@ -251,21 +286,20 @@
Args:
csv_lines (list of strings): Lines read from a CSV file.
- source (string): Origin of `csv_lines`. Will be printed in error messages.
-
- Throws:
- AssertionError if parsed flags are invalid.
+ source (string): Origin of `csv_lines`. Will be printed in error
+ messages.
+ Throws: AssertionError if parsed flags are invalid.
"""
# Split CSV lines into arrays of values.
- csv_values = [ line.split(',') for line in csv_lines ]
+ csv_values = [line.split(',') for line in csv_lines]
# Update the full set of API signatures.
- self._dict_keyset.update([ csv[0] for csv in csv_values ])
+ self._dict_keyset.update([csv[0] for csv in csv_values])
# Check that all flags are known.
csv_flags = set()
for csv in csv_values:
- csv_flags.update(csv[1:])
+ csv_flags.update(csv[1:])
self._check_flags_set(csv_flags, source)
# Iterate over all CSV lines, find entry in dict and append flags to it.
@@ -275,47 +309,53 @@
flags.append(FLAG_SDK)
self._dict[csv[0]].update(flags)
- def assign_flag(self, flag, apis, source="<unknown>", tag = None):
+ def assign_flag(self, flag, apis, source='<unknown>', tag=None):
"""Assigns a flag to given subset of entries.
Args:
flag (string): One of ALL_FLAGS.
apis (set): Subset of APIs to receive the flag.
- source (string): Origin of `entries_subset`. Will be printed in error messages.
-
- Throws:
- AssertionError if parsed API signatures of flags are invalid.
+ source (string): Origin of `entries_subset`. Will be printed in
+ error messages.
+ Throws: AssertionError if parsed API signatures of flags are invalid.
"""
# Check that all APIs exist in the dict.
self._check_entries_set(apis, source)
# Check that the flag is known.
- self._check_flags_set(set([ flag ]), source)
+ self._check_flags_set(set([flag]), source)
- # Iterate over the API subset, find each entry in dict and assign the flag to it.
+ # Iterate over the API subset, find each entry in dict and assign the
+ # flag to it.
for api in apis:
self._dict[api].add(flag)
if tag:
self._dict[api].add(tag)
-FlagFile = namedtuple('FlagFile', ('flag', 'file', 'ignore_conflicts', 'packages', 'tag'))
+FlagFile = namedtuple('FlagFile',
+ ('flag', 'file', 'ignore_conflicts', 'packages', 'tag'))
+
def parse_ordered_flags(ordered_flags):
r = []
- currentflag, file, ignore_conflicts, packages, tag = None, None, False, False, None
+ currentflag, file, ignore_conflicts, packages, tag = None, None, False, \
+ False, None
for flag_value in ordered_flags:
flag, value = flag_value[0], flag_value[1]
if flag in ALL_FLAGS_SET:
if currentflag:
- r.append(FlagFile(currentflag, file, ignore_conflicts, packages, tag))
+ r.append(
+ FlagFile(currentflag, file, ignore_conflicts, packages,
+ tag))
ignore_conflicts, packages, tag = False, False, None
currentflag = flag
file = value
else:
if currentflag is None:
- raise argparse.ArgumentError('--%s is only allowed after one of %s' % (
- flag, ' '.join(['--%s' % f for f in ALL_FLAGS_SET])))
+ raise argparse.ArgumentError( #pylint: disable=no-value-for-parameter
+ '--%s is only allowed after one of %s' %
+ (flag, ' '.join(['--%s' % f for f in ALL_FLAGS_SET])))
if flag == FLAG_IGNORE_CONFLICTS:
ignore_conflicts = True
elif flag == FLAG_PACKAGES:
@@ -323,13 +363,12 @@
elif flag == FLAG_TAG:
tag = value[0]
-
if currentflag:
r.append(FlagFile(currentflag, file, ignore_conflicts, packages, tag))
return r
-def main(argv):
+def main(argv): #pylint: disable=unused-argument
# Parse arguments.
args = vars(get_args())
flagfiles = parse_ordered_flags(args['ordered_flags'] or [])
@@ -342,7 +381,7 @@
# contain the full set of APIs. Subsequent additions from text files
# will be able to detect invalid entries, and/or filter all as-yet
# unassigned entries.
- for filename in args["csv"]:
+ for filename in args['csv']:
flags.parse_and_merge_csv(read_lines(filename), filename)
# Combine inputs which do not require any particular order.
@@ -352,24 +391,28 @@
# (2) Merge text files with a known flag into the dictionary.
for info in flagfiles:
if (not info.ignore_conflicts) and (not info.packages):
- flags.assign_flag(info.flag, read_lines(info.file), info.file, info.tag)
+ flags.assign_flag(info.flag, read_lines(info.file), info.file,
+ info.tag)
# Merge text files where conflicts should be ignored.
# This will only assign the given flag if:
# (a) the entry exists, and
# (b) it has not been assigned any other flag.
- # Because of (b), this must run after all strict assignments have been performed.
+ # Because of (b), this must run after all strict assignments have been
+ # performed.
for info in flagfiles:
if info.ignore_conflicts:
- valid_entries = flags.get_valid_subset_of_unassigned_apis(read_lines(info.file))
- flags.assign_flag(info.flag, valid_entries, filename, info.tag)
+ valid_entries = flags.get_valid_subset_of_unassigned_apis(
+ read_lines(info.file))
+ flags.assign_flag(info.flag, valid_entries, filename, info.tag) #pylint: disable=undefined-loop-variable
- # All members in the specified packages will be assigned the appropriate flag.
+ # All members in the specified packages will be assigned the appropriate
+ # flag.
for info in flagfiles:
if info.packages:
packages_needing_list = set(read_lines(info.file))
- should_add_signature_to_list = lambda sig,lists: extract_package(
- sig) in packages_needing_list and not lists
+ should_add_signature_to_list = lambda sig, lists: extract_package(
+ sig) in packages_needing_list and not lists #pylint: disable=cell-var-from-loop
valid_entries = flags.filter_apis(should_add_signature_to_list)
flags.assign_flag(info.flag, valid_entries, info.file, info.tag)
@@ -377,7 +420,8 @@
flags.assign_flag(FLAG_BLOCKED, flags.filter_apis(HAS_NO_API_LIST_ASSIGNED))
# Write output.
- write_lines(args["output"], flags.generate_csv())
+ write_lines(args['output'], flags.generate_csv())
-if __name__ == "__main__":
+
+if __name__ == '__main__':
main(sys.argv)
diff --git a/scripts/hiddenapi/generate_hiddenapi_lists_test.py b/scripts/hiddenapi/generate_hiddenapi_lists_test.py
index b81424b..204de97 100755
--- a/scripts/hiddenapi/generate_hiddenapi_lists_test.py
+++ b/scripts/hiddenapi/generate_hiddenapi_lists_test.py
@@ -15,34 +15,39 @@
# limitations under the License.
"""Unit tests for Hidden API list generation."""
import unittest
-from generate_hiddenapi_lists import *
+from generate_hiddenapi_lists import * # pylint: disable=wildcard-import,unused-wildcard-import
+
class TestHiddenapiListGeneration(unittest.TestCase):
-
def test_filter_apis(self):
# Initialize flags so that A and B are put on the allow list and
# C, D, E are left unassigned. Try filtering for the unassigned ones.
flags = FlagsDict()
- flags.parse_and_merge_csv(['A,' + FLAG_SDK, 'B,' + FLAG_SDK,
- 'C', 'D', 'E'])
+ flags.parse_and_merge_csv(
+ ['A,' + FLAG_SDK, 'B,' + FLAG_SDK, 'C', 'D', 'E']
+ )
filter_set = flags.filter_apis(lambda api, flags: not flags)
self.assertTrue(isinstance(filter_set, set))
- self.assertEqual(filter_set, set([ 'C', 'D', 'E' ]))
+ self.assertEqual(filter_set, set(['C', 'D', 'E']))
def test_get_valid_subset_of_unassigned_keys(self):
# Create flags where only A is unassigned.
flags = FlagsDict()
flags.parse_and_merge_csv(['A,' + FLAG_SDK, 'B', 'C'])
flags.assign_flag(FLAG_UNSUPPORTED, set(['C']))
- self.assertEqual(flags.generate_csv(),
- [ 'A,' + FLAG_SDK, 'B', 'C,' + FLAG_UNSUPPORTED ])
+ self.assertEqual(
+ flags.generate_csv(),
+ ['A,' + FLAG_SDK, 'B', 'C,' + FLAG_UNSUPPORTED],
+ )
# Check three things:
# (1) B is selected as valid unassigned
# (2) A is not selected because it is assigned to the allow list
# (3) D is not selected because it is not a valid key
self.assertEqual(
- flags.get_valid_subset_of_unassigned_apis(set(['A', 'B', 'D'])), set([ 'B' ]))
+ flags.get_valid_subset_of_unassigned_apis(set(['A', 'B', 'D'])),
+ set(['B']),
+ )
def test_parse_and_merge_csv(self):
flags = FlagsDict()
@@ -51,41 +56,48 @@
self.assertEqual(flags.generate_csv(), [])
# Test new additions.
- flags.parse_and_merge_csv([
- 'A,' + FLAG_UNSUPPORTED,
- 'B,' + FLAG_BLOCKED + ',' + FLAG_MAX_TARGET_O,
- 'C,' + FLAG_SDK + ',' + FLAG_SYSTEM_API,
- 'D,' + FLAG_UNSUPPORTED + ',' + FLAG_TEST_API,
- 'E,' + FLAG_BLOCKED + ',' + FLAG_TEST_API,
- ])
- self.assertEqual(flags.generate_csv(), [
- 'A,' + FLAG_UNSUPPORTED,
- 'B,' + FLAG_BLOCKED + "," + FLAG_MAX_TARGET_O,
- 'C,' + FLAG_SDK + ',' + FLAG_SYSTEM_API,
- 'D,' + FLAG_TEST_API + ',' + FLAG_UNSUPPORTED,
- 'E,' + FLAG_BLOCKED + ',' + FLAG_TEST_API,
- ])
+ flags.parse_and_merge_csv(
+ [
+ 'A,' + FLAG_UNSUPPORTED,
+ 'B,' + FLAG_BLOCKED + ',' + FLAG_MAX_TARGET_O,
+ 'C,' + FLAG_SDK + ',' + FLAG_SYSTEM_API,
+ 'D,' + FLAG_UNSUPPORTED + ',' + FLAG_TEST_API,
+ 'E,' + FLAG_BLOCKED + ',' + FLAG_TEST_API,
+ ]
+ )
+ self.assertEqual(
+ flags.generate_csv(),
+ [
+ 'A,' + FLAG_UNSUPPORTED,
+ 'B,' + FLAG_BLOCKED + "," + FLAG_MAX_TARGET_O,
+ 'C,' + FLAG_SDK + ',' + FLAG_SYSTEM_API,
+ 'D,' + FLAG_TEST_API + ',' + FLAG_UNSUPPORTED,
+ 'E,' + FLAG_BLOCKED + ',' + FLAG_TEST_API,
+ ],
+ )
# Test unknown flag.
with self.assertRaises(AssertionError):
- flags.parse_and_merge_csv([ 'Z,foo' ])
+ flags.parse_and_merge_csv(['Z,foo'])
def test_assign_flag(self):
flags = FlagsDict()
flags.parse_and_merge_csv(['A,' + FLAG_SDK, 'B'])
# Test new additions.
- flags.assign_flag(FLAG_UNSUPPORTED, set([ 'A', 'B' ]))
- self.assertEqual(flags.generate_csv(),
- [ 'A,' + FLAG_SDK + "," + FLAG_UNSUPPORTED, 'B,' + FLAG_UNSUPPORTED ])
+ flags.assign_flag(FLAG_UNSUPPORTED, set(['A', 'B']))
+ self.assertEqual(
+ flags.generate_csv(),
+ ['A,' + FLAG_SDK + "," + FLAG_UNSUPPORTED, 'B,' + FLAG_UNSUPPORTED],
+ )
# Test invalid API signature.
with self.assertRaises(AssertionError):
- flags.assign_flag(FLAG_SDK, set([ 'C' ]))
+ flags.assign_flag(FLAG_SDK, set(['C']))
# Test invalid flag.
with self.assertRaises(AssertionError):
- flags.assign_flag('foo', set([ 'A' ]))
+ flags.assign_flag('foo', set(['A']))
def test_extract_package(self):
signature = 'Lcom/foo/bar/Baz;->method1()Lcom/bar/Baz;'
@@ -100,5 +112,6 @@
expected_package = 'com.foo_bar.baz'
self.assertEqual(extract_package(signature), expected_package)
+
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/scripts/hiddenapi/merge_csv.py b/scripts/hiddenapi/merge_csv.py
index a65326c..c17ec25 100755
--- a/scripts/hiddenapi/merge_csv.py
+++ b/scripts/hiddenapi/merge_csv.py
@@ -13,8 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""
-Merge multiple CSV files, possibly with different columns.
+"""Merge multiple CSV files, possibly with different columns.
"""
import argparse
@@ -26,34 +25,52 @@
from zipfile import ZipFile
-args_parser = argparse.ArgumentParser(description='Merge given CSV files into a single one.')
-args_parser.add_argument('--header', help='Comma separated field names; '
- 'if missing determines the header from input files.')
-args_parser.add_argument('--zip_input', help='Treat files as ZIP archives containing CSV files to merge.',
- action="store_true")
-args_parser.add_argument('--key_field', help='The name of the field by which the rows should be sorted. '
- 'Must be in the field names. '
- 'Will be the first field in the output. '
- 'All input files must be sorted by that field.')
-args_parser.add_argument('--output', help='Output file for merged CSV.',
- default='-', type=argparse.FileType('w'))
+args_parser = argparse.ArgumentParser(
+ description='Merge given CSV files into a single one.'
+)
+args_parser.add_argument(
+ '--header',
+ help='Comma separated field names; '
+ 'if missing determines the header from input files.',
+)
+args_parser.add_argument(
+ '--zip_input',
+ help='Treat files as ZIP archives containing CSV files to merge.',
+ action="store_true",
+)
+args_parser.add_argument(
+ '--key_field',
+ help='The name of the field by which the rows should be sorted. '
+ 'Must be in the field names. '
+ 'Will be the first field in the output. '
+ 'All input files must be sorted by that field.',
+)
+args_parser.add_argument(
+ '--output',
+ help='Output file for merged CSV.',
+ default='-',
+ type=argparse.FileType('w'),
+)
args_parser.add_argument('files', nargs=argparse.REMAINDER)
args = args_parser.parse_args()
-def dict_reader(input):
- return csv.DictReader(input, delimiter=',', quotechar='|')
+def dict_reader(csvfile):
+ return csv.DictReader(csvfile, delimiter=',', quotechar='|')
+
csv_readers = []
-if not(args.zip_input):
+if not args.zip_input:
for file in args.files:
csv_readers.append(dict_reader(open(file, 'r')))
else:
for file in args.files:
- with ZipFile(file) as zip:
- for entry in zip.namelist():
+ with ZipFile(file) as zipfile:
+ for entry in zipfile.namelist():
if entry.endswith('.uau'):
- csv_readers.append(dict_reader(io.TextIOWrapper(zip.open(entry, 'r'))))
+ csv_readers.append(
+ dict_reader(io.TextIOWrapper(zipfile.open(entry, 'r')))
+ )
if args.header:
fieldnames = args.header.split(',')
@@ -73,8 +90,8 @@
keyField = args.key_field
if keyField:
assert keyField in fieldnames, (
- "--key_field {} not found, must be one of {}\n").format(
- keyField, ",".join(fieldnames))
+ "--key_field {} not found, must be one of {}\n"
+ ).format(keyField, ",".join(fieldnames))
# Make the key field the first field in the output
keyFieldIndex = fieldnames.index(args.key_field)
fieldnames.insert(0, fieldnames.pop(keyFieldIndex))
@@ -83,11 +100,17 @@
all_rows = heapq.merge(*csv_readers, key=operator.itemgetter(keyField))
# Write all rows from the input files to the output:
-writer = csv.DictWriter(args.output, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL,
- dialect='unix', fieldnames=fieldnames)
+writer = csv.DictWriter(
+ args.output,
+ delimiter=',',
+ quotechar='|',
+ quoting=csv.QUOTE_MINIMAL,
+ dialect='unix',
+ fieldnames=fieldnames,
+)
writer.writeheader()
# Read all the rows from the input and write them to the output in the correct
# order:
for row in all_rows:
- writer.writerow(row)
+ writer.writerow(row)
diff --git a/scripts/hiddenapi/signature_patterns.py b/scripts/hiddenapi/signature_patterns.py
index a7c5bb4..0acb2a0 100755
--- a/scripts/hiddenapi/signature_patterns.py
+++ b/scripts/hiddenapi/signature_patterns.py
@@ -13,22 +13,26 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""
-Generate a set of signature patterns from the modular flags generated by a
+"""Generate a set of signature patterns from the modular flags generated by a
bootclasspath_fragment that can be used to select a subset of monolithic flags
against which the modular flags can be compared.
"""
import argparse
import csv
+import sys
-def dict_reader(input):
- return csv.DictReader(input, delimiter=',', quotechar='|', fieldnames=['signature'])
+def dict_reader(csvfile):
+ return csv.DictReader(
+ csvfile, delimiter=',', quotechar='|', fieldnames=['signature']
+ )
+
def produce_patterns_from_file(file):
with open(file, 'r') as f:
return produce_patterns_from_stream(f)
+
def produce_patterns_from_stream(stream):
# Read in all the signatures into a list and remove member names.
patterns = set()
@@ -38,18 +42,26 @@
# Remove the class specific member signature
pieces = text.split(";->")
qualifiedClassName = pieces[0]
- # Remove inner class names as they cannot be separated from the containing outer class.
+ # Remove inner class names as they cannot be separated
+ # from the containing outer class.
pieces = qualifiedClassName.split("$", maxsplit=1)
pattern = pieces[0]
patterns.add(pattern)
- patterns = list(patterns)
+ patterns = list(patterns) #pylint: disable=redefined-variable-type
patterns.sort()
return patterns
+
def main(args):
- args_parser = argparse.ArgumentParser(description='Generate a set of signature patterns that select a subset of monolithic hidden API files.')
- args_parser.add_argument('--flags', help='The stub flags file which contains an entry for every dex member')
+ args_parser = argparse.ArgumentParser(
+ description='Generate a set of signature patterns '
+ 'that select a subset of monolithic hidden API files.'
+ )
+ args_parser.add_argument(
+ '--flags',
+ help='The stub flags file which contains an entry for every dex member',
+ )
args_parser.add_argument('--output', help='Generated signature prefixes')
args = args_parser.parse_args(args)
@@ -62,5 +74,6 @@
outputFile.write(pattern)
outputFile.write("\n")
+
if __name__ == "__main__":
main(sys.argv[1:])
diff --git a/scripts/hiddenapi/signature_patterns_test.py b/scripts/hiddenapi/signature_patterns_test.py
index 0431f45..3babe54 100755
--- a/scripts/hiddenapi/signature_patterns_test.py
+++ b/scripts/hiddenapi/signature_patterns_test.py
@@ -18,21 +18,25 @@
import io
import unittest
-from signature_patterns import *
+from signature_patterns import * #pylint: disable=unused-wildcard-import,wildcard-import
+
class TestGeneratedPatterns(unittest.TestCase):
-
- def produce_patterns_from_string(self, csv):
- with io.StringIO(csv) as f:
+ def produce_patterns_from_string(self, csvdata):
+ with io.StringIO(csvdata) as f:
return produce_patterns_from_stream(f)
def test_generate(self):
- patterns = self.produce_patterns_from_string('''
+ #pylint: disable=line-too-long
+ patterns = self.produce_patterns_from_string(
+ '''
Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V,blocked
Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;,public-api
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
-''')
+'''
+ )
+ #pylint: enable=line-too-long
expected = [
"java/lang/Character",
"java/lang/Object",
@@ -40,5 +44,6 @@
]
self.assertEqual(expected, patterns)
+
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/scripts/hiddenapi/verify_overlaps.py b/scripts/hiddenapi/verify_overlaps.py
index 6432bf1..4cd7e63 100755
--- a/scripts/hiddenapi/verify_overlaps.py
+++ b/scripts/hiddenapi/verify_overlaps.py
@@ -13,8 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""
-Verify that one set of hidden API flags is a subset of another.
+"""Verify that one set of hidden API flags is a subset of another.
"""
import argparse
@@ -22,9 +21,9 @@
import sys
from itertools import chain
+#pylint: disable=line-too-long
class InteriorNode:
- """
- An interior node in a trie.
+ """An interior node in a trie.
Each interior node has a dict that maps from an element of a signature to
either another interior node or a leaf. Each interior node represents either
@@ -52,19 +51,21 @@
Attributes:
nodes: a dict from an element of the signature to the Node/Leaf
- containing the next element/value.
+ containing the next element/value.
"""
+ #pylint: enable=line-too-long
+
def __init__(self):
self.nodes = {}
+ #pylint: disable=line-too-long
def signatureToElements(self, signature):
- """
- Split a signature or a prefix into a number of elements:
+ """Split a signature or a prefix into a number of elements:
1. The packages (excluding the leading L preceding the first package).
2. The class names, from outermost to innermost.
3. The member signature.
-
- e.g. Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
+ e.g.
+ Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
will be broken down into these elements:
1. package:java
2. package:lang
@@ -88,19 +89,21 @@
elements = parts[0].split("/")
packages = elements[0:-1]
className = elements[-1]
- if className == "*" or className == "**":
+ if className in ("*" , "**"): #pylint: disable=no-else-return
# Cannot specify a wildcard and target a specific member
if len(member) != 0:
- raise Exception("Invalid signature %s: contains wildcard %s and member signature %s"
- % (signature, className, member[0]))
+ raise Exception(
+ "Invalid signature %s: contains wildcard %s and member " \
+ "signature %s"
+ % (signature, className, member[0]))
wildcard = [className]
# Assemble the parts into a single list, adding prefixes to identify
# the different parts.
# 0 - package:java
# 1 - package:lang
# 2 - *
- return list(chain(map(lambda x : "package:" + x, packages),
- wildcard))
+ return list(
+ chain(["package:" + x for x in packages], wildcard))
else:
# Split the class name into outer / inner classes
# 0 - Character
@@ -113,13 +116,16 @@
# 2 - class:Character
# 3 - class:UnicodeScript
# 4 - member:of(I)Ljava/lang/Character$UnicodeScript;
- return list(chain(map(lambda x : "package:" + x, packages),
- map(lambda x : "class:" + x, classes),
- map(lambda x : "member:" + x, member)))
+ return list(
+ chain(
+ ["package:" + x for x in packages],
+ ["class:" + x for x in classes],
+ ["member:" + x for x in member]))
+ #pylint: enable=line-too-long
def add(self, signature, value):
- """
- Associate the value with the specific signature.
+ """Associate the value with the specific signature.
+
:param signature: the member signature
:param value: the value to associated with the signature
:return: n/a
@@ -132,21 +138,22 @@
if element in node.nodes:
node = node.nodes[element]
else:
- next = InteriorNode()
- node.nodes[element] = next
- node = next
+ next_node = InteriorNode()
+ node.nodes[element] = next_node
+ node = next_node
# Add a Leaf containing the value and associate it with the member
# signature within the class.
lastElement = elements[-1]
if not lastElement.startswith("member:"):
- raise Exception("Invalid signature: %s, does not identify a specific member" % signature)
+ raise Exception(
+ "Invalid signature: %s, does not identify a specific member" %
+ signature)
if lastElement in node.nodes:
raise Exception("Duplicate signature: %s" % signature)
node.nodes[lastElement] = Leaf(value)
def getMatchingRows(self, pattern):
- """
- Get the values (plural) associated with the pattern.
+ """Get the values (plural) associated with the pattern.
e.g. If the pattern is a full signature then this will return a list
containing the value associated with that signature.
@@ -175,13 +182,13 @@
elements = self.signatureToElements(pattern)
node = self
# Include all values from this node and all its children.
- selector = lambda x : True
+ selector = lambda x: True
lastElement = elements[-1]
- if lastElement == "*" or lastElement == "**":
+ if lastElement in ("*", "**"):
elements = elements[:-1]
if lastElement == "*":
# Do not include values from sub-packages.
- selector = lambda x : not x.startswith("package:")
+ selector = lambda x: not x.startswith("package:")
for element in elements:
if element in node.nodes:
node = node.nodes[element]
@@ -190,19 +197,18 @@
return chain.from_iterable(node.values(selector))
def values(self, selector):
- """
- :param selector: a function that can be applied to a key in the nodes
+ """:param selector: a function that can be applied to a key in the nodes
attribute to determine whether to return its values.
- :return: A list of iterables of all the values associated with this
- node and its children.
+
+ :return: A list of iterables of all the values associated with
+ this node and its children.
"""
values = []
self.appendValues(values, selector)
return values
def appendValues(self, values, selector):
- """
- Append the values associated with this node and its children to the
+ """Append the values associated with this node and its children to the
list.
For each item (key, child) in nodes the child node's values are returned
@@ -216,105 +222,116 @@
"""
for key, node in self.nodes.items():
if selector(key):
- node.appendValues(values, lambda x : True)
+ node.appendValues(values, lambda x: True)
+
class Leaf:
- """
- A leaf of the trie
+ """A leaf of the trie
Attributes:
value: the value associated with this leaf.
"""
+
def __init__(self, value):
self.value = value
- def values(self, selector):
- """
- :return: A list of a list of the value associated with this node.
+ def values(self, selector): #pylint: disable=unused-argument
+ """:return: A list of a list of the value associated with this node.
"""
return [[self.value]]
- def appendValues(self, values, selector):
- """
- Appends a list of the value associated with this node to the list.
+ def appendValues(self, values, selector): #pylint: disable=unused-argument
+ """Appends a list of the value associated with this node to the list.
+
:param values: a list of a iterables of values.
"""
values.append([self.value])
-def dict_reader(input):
- return csv.DictReader(input, delimiter=',', quotechar='|', fieldnames=['signature'])
+
+def dict_reader(csvfile):
+ return csv.DictReader(
+ csvfile, delimiter=",", quotechar="|", fieldnames=["signature"])
+
def read_flag_trie_from_file(file):
- with open(file, 'r') as stream:
+ with open(file, "r") as stream:
return read_flag_trie_from_stream(stream)
+
def read_flag_trie_from_stream(stream):
trie = InteriorNode()
reader = dict_reader(stream)
for row in reader:
- signature = row['signature']
+ signature = row["signature"]
trie.add(signature, row)
return trie
-def extract_subset_from_monolithic_flags_as_dict_from_file(monolithicTrie, patternsFile):
- """
- Extract a subset of flags from the dict containing all the monolithic flags.
+
+def extract_subset_from_monolithic_flags_as_dict_from_file(
+ monolithicTrie, patternsFile):
+ """Extract a subset of flags from the dict containing all the monolithic
+ flags.
:param monolithicFlagsDict: the dict containing all the monolithic flags.
:param patternsFile: a file containing a list of signature patterns that
define the subset.
:return: the dict from signature to row.
"""
- with open(patternsFile, 'r') as stream:
- return extract_subset_from_monolithic_flags_as_dict_from_stream(monolithicTrie, stream)
+ with open(patternsFile, "r") as stream:
+ return extract_subset_from_monolithic_flags_as_dict_from_stream(
+ monolithicTrie, stream)
-def extract_subset_from_monolithic_flags_as_dict_from_stream(monolithicTrie, stream):
- """
- Extract a subset of flags from the trie containing all the monolithic flags.
+
+def extract_subset_from_monolithic_flags_as_dict_from_stream(
+ monolithicTrie, stream):
+ """Extract a subset of flags from the trie containing all the monolithic
+ flags.
:param monolithicTrie: the trie containing all the monolithic flags.
:param stream: a stream containing a list of signature patterns that define
the subset.
:return: the dict from signature to row.
"""
- dict = {}
+ dict_signature_to_row = {}
for pattern in stream:
pattern = pattern.rstrip()
rows = monolithicTrie.getMatchingRows(pattern)
for row in rows:
- signature = row['signature']
- dict[signature] = row
- return dict
+ signature = row["signature"]
+ dict_signature_to_row[signature] = row
+ return dict_signature_to_row
+
def read_signature_csv_from_stream_as_dict(stream):
- """
- Read the csv contents from the stream into a dict. The first column is assumed to be the
- signature and used as the key. The whole row is stored as the value.
+ """Read the csv contents from the stream into a dict. The first column is
+ assumed to be the signature and used as the key.
+ The whole row is stored as the value.
:param stream: the csv contents to read
:return: the dict from signature to row.
"""
- dict = {}
+ dict_signature_to_row = {}
reader = dict_reader(stream)
for row in reader:
- signature = row['signature']
- dict[signature] = row
- return dict
+ signature = row["signature"]
+ dict_signature_to_row[signature] = row
+ return dict_signature_to_row
+
def read_signature_csv_from_file_as_dict(csvFile):
- """
- Read the csvFile into a dict. The first column is assumed to be the
- signature and used as the key. The whole row is stored as the value.
+ """Read the csvFile into a dict. The first column is assumed to be the
+ signature and used as the key.
+ The whole row is stored as the value.
:param csvFile: the csv file to read
:return: the dict from signature to row.
"""
- with open(csvFile, 'r') as f:
+ with open(csvFile, "r") as f:
return read_signature_csv_from_stream_as_dict(f)
+
def compare_signature_flags(monolithicFlagsDict, modularFlagsDict):
- """
- Compare the signature flags between the two dicts.
+ """Compare the signature flags between the two dicts.
:param monolithicFlagsDict: the dict containing the subset of the monolithic
flags that should be equal to the modular flags.
@@ -327,7 +344,8 @@
mismatchingSignatures = []
# Create a sorted set of all the signatures from both the monolithic and
# modular dicts.
- allSignatures = sorted(set(chain(monolithicFlagsDict.keys(), modularFlagsDict.keys())))
+ allSignatures = sorted(
+ set(chain(monolithicFlagsDict.keys(), modularFlagsDict.keys())))
for signature in allSignatures:
monolithicRow = monolithicFlagsDict.get(signature, {})
monolithicFlags = monolithicRow.get(None, [])
@@ -337,13 +355,21 @@
else:
modularFlags = ["blocked"]
if monolithicFlags != modularFlags:
- mismatchingSignatures.append((signature, modularFlags, monolithicFlags))
+ mismatchingSignatures.append(
+ (signature, modularFlags, monolithicFlags))
return mismatchingSignatures
+
def main(argv):
- args_parser = argparse.ArgumentParser(description='Verify that sets of hidden API flags are each a subset of the monolithic flag file.')
- args_parser.add_argument('monolithicFlags', help='The monolithic flag file')
- args_parser.add_argument('modularFlags', nargs=argparse.REMAINDER, help='Flags produced by individual bootclasspath_fragment modules')
+ args_parser = argparse.ArgumentParser(
+ description="Verify that sets of hidden API flags are each a subset of "
+ "the monolithic flag file."
+ )
+ args_parser.add_argument("monolithicFlags", help="The monolithic flag file")
+ args_parser.add_argument(
+ "modularFlags",
+ nargs=argparse.REMAINDER,
+ help="Flags produced by individual bootclasspath_fragment modules")
args = args_parser.parse_args(argv[1:])
# Read in all the flags into the trie
@@ -358,9 +384,13 @@
parts = modularPair.split(":")
modularFlagsPath = parts[0]
modularPatternsPath = parts[1]
- modularFlagsDict = read_signature_csv_from_file_as_dict(modularFlagsPath)
- monolithicFlagsSubsetDict = extract_subset_from_monolithic_flags_as_dict_from_file(monolithicTrie, modularPatternsPath)
- mismatchingSignatures = compare_signature_flags(monolithicFlagsSubsetDict, modularFlagsDict)
+ modularFlagsDict = read_signature_csv_from_file_as_dict(
+ modularFlagsPath)
+ monolithicFlagsSubsetDict = \
+ extract_subset_from_monolithic_flags_as_dict_from_file(
+ monolithicTrie, modularPatternsPath)
+ mismatchingSignatures = compare_signature_flags(
+ monolithicFlagsSubsetDict, modularFlagsDict)
if mismatchingSignatures:
failed = True
print("ERROR: Hidden API flags are inconsistent:")
@@ -369,11 +399,12 @@
for mismatch in mismatchingSignatures:
signature = mismatch[0]
print()
- print("< " + ",".join([signature]+ mismatch[1]))
- print("> " + ",".join([signature]+ mismatch[2]))
+ print("< " + ",".join([signature] + mismatch[1]))
+ print("> " + ",".join([signature] + mismatch[2]))
if failed:
sys.exit(1)
+
if __name__ == "__main__":
main(sys.argv)
diff --git a/scripts/hiddenapi/verify_overlaps_test.py b/scripts/hiddenapi/verify_overlaps_test.py
index 00c0611..22a1cdf 100755
--- a/scripts/hiddenapi/verify_overlaps_test.py
+++ b/scripts/hiddenapi/verify_overlaps_test.py
@@ -13,12 +13,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
"""Unit tests for verify_overlaps_test.py."""
import io
import unittest
-from verify_overlaps import *
+from verify_overlaps import * #pylint: disable=unused-wildcard-import,wildcard-import
+
class TestSignatureToElements(unittest.TestCase):
@@ -34,8 +34,10 @@
'class:1',
'member:<init>()V',
]
- self.assertEqual(expected, self.signatureToElements(
- "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V"))
+ self.assertEqual(
+ expected,
+ self.signatureToElements(
+ 'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V'))
def test_signatureToElements_2(self):
expected = [
@@ -44,8 +46,9 @@
'class:Object',
'member:hashCode()I',
]
- self.assertEqual(expected, self.signatureToElements(
- "Ljava/lang/Object;->hashCode()I"))
+ self.assertEqual(
+ expected,
+ self.signatureToElements('Ljava/lang/Object;->hashCode()I'))
def test_signatureToElements_3(self):
expected = [
@@ -56,39 +59,46 @@
'class:ExternalSyntheticLambda0',
'member:<init>(Ljava/lang/CharSequence;)V',
]
- self.assertEqual(expected, self.signatureToElements(
- "Ljava/lang/CharSequence$$ExternalSyntheticLambda0;"
- "-><init>(Ljava/lang/CharSequence;)V"))
+ self.assertEqual(
+ expected,
+ self.signatureToElements(
+ 'Ljava/lang/CharSequence$$ExternalSyntheticLambda0;'
+ '-><init>(Ljava/lang/CharSequence;)V'))
+#pylint: disable=line-too-long
class TestDetectOverlaps(unittest.TestCase):
- def read_flag_trie_from_string(self, csv):
- with io.StringIO(csv) as f:
+ def read_flag_trie_from_string(self, csvdata):
+ with io.StringIO(csvdata) as f:
return read_flag_trie_from_stream(f)
- def read_signature_csv_from_string_as_dict(self, csv):
- with io.StringIO(csv) as f:
+ def read_signature_csv_from_string_as_dict(self, csvdata):
+ with io.StringIO(csvdata) as f:
return read_signature_csv_from_stream_as_dict(f)
- def extract_subset_from_monolithic_flags_as_dict_from_string(self, monolithic, patterns):
+ def extract_subset_from_monolithic_flags_as_dict_from_string(
+ self, monolithic, patterns):
with io.StringIO(patterns) as f:
- return extract_subset_from_monolithic_flags_as_dict_from_stream(monolithic, f)
+ return extract_subset_from_monolithic_flags_as_dict_from_stream(
+ monolithic, f)
- extractInput = '''
+ extractInput = """
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
Ljava/util/zip/ZipFile;-><clinit>()V,blocked
Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;,blocked
Ljava/lang/Character;->serialVersionUID:J,sdk
Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V,blocked
-'''
+"""
def test_extract_subset_signature(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'Ljava/lang/Object;->hashCode()I'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
'Ljava/lang/Object;->hashCode()I': {
None: ['public-api', 'system-api', 'test-api'],
@@ -98,11 +108,13 @@
self.assertEqual(expected, subset)
def test_extract_subset_class(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'java/lang/Object'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
'Ljava/lang/Object;->hashCode()I': {
None: ['public-api', 'system-api', 'test-api'],
@@ -116,16 +128,20 @@
self.assertEqual(expected, subset)
def test_extract_subset_outer_class(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'java/lang/Character'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
- 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;': {
- None: ['blocked'],
- 'signature': 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
- },
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
+ {
+ None: ['blocked'],
+ 'signature':
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
+ },
'Ljava/lang/Character;->serialVersionUID:J': {
None: ['sdk'],
'signature': 'Ljava/lang/Character;->serialVersionUID:J',
@@ -134,30 +150,38 @@
self.assertEqual(expected, subset)
def test_extract_subset_nested_class(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'java/lang/Character$UnicodeScript'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
- 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;': {
- None: ['blocked'],
- 'signature': 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
- },
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
+ {
+ None: ['blocked'],
+ 'signature':
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
+ },
}
self.assertEqual(expected, subset)
def test_extract_subset_package(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'java/lang/*'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
- 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;': {
- None: ['blocked'],
- 'signature': 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
- },
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
+ {
+ None: ['blocked'],
+ 'signature':
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
+ },
'Ljava/lang/Character;->serialVersionUID:J': {
None: ['sdk'],
'signature': 'Ljava/lang/Character;->serialVersionUID:J',
@@ -178,16 +202,20 @@
self.assertEqual(expected, subset)
def test_extract_subset_recursive_package(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'java/**'
- subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
+ subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
expected = {
- 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;': {
- None: ['blocked'],
- 'signature': 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
- },
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
+ {
+ None: ['blocked'],
+ 'signature':
+ 'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
+ },
'Ljava/lang/Character;->serialVersionUID:J': {
None: ['sdk'],
'signature': 'Ljava/lang/Character;->serialVersionUID:J',
@@ -212,47 +240,53 @@
self.assertEqual(expected, subset)
def test_extract_subset_invalid_pattern_wildcard_and_member(self):
- monolithic = self.read_flag_trie_from_string(TestDetectOverlaps.extractInput)
+ monolithic = self.read_flag_trie_from_string(
+ TestDetectOverlaps.extractInput)
patterns = 'Ljava/lang/*;->hashCode()I'
with self.assertRaises(Exception) as context:
- self.extract_subset_from_monolithic_flags_as_dict_from_string(monolithic, patterns)
- self.assertTrue("contains wildcard * and member signature hashCode()I" in str(context.exception))
+ self.extract_subset_from_monolithic_flags_as_dict_from_string(
+ monolithic, patterns)
+ self.assertTrue('contains wildcard * and member signature hashCode()I'
+ in str(context.exception))
def test_read_trie_duplicate(self):
with self.assertRaises(Exception) as context:
- self.read_flag_trie_from_string('''
+ self.read_flag_trie_from_string("""
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
Ljava/lang/Object;->hashCode()I,blocked
-''')
- self.assertTrue("Duplicate signature: Ljava/lang/Object;->hashCode()I" in str(context.exception))
+""")
+ self.assertTrue('Duplicate signature: Ljava/lang/Object;->hashCode()I'
+ in str(context.exception))
def test_read_trie_missing_member(self):
with self.assertRaises(Exception) as context:
- self.read_flag_trie_from_string('''
+ self.read_flag_trie_from_string("""
Ljava/lang/Object,public-api,system-api,test-api
-''')
- self.assertTrue("Invalid signature: Ljava/lang/Object, does not identify a specific member" in str(context.exception))
+""")
+ self.assertTrue(
+ 'Invalid signature: Ljava/lang/Object, does not identify a specific member'
+ in str(context.exception))
def test_match(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
-''')
- modular = self.read_signature_csv_from_string_as_dict('''
+""")
+ modular = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
-''')
+""")
mismatches = compare_signature_flags(monolithic, modular)
expected = []
self.assertEqual(expected, mismatches)
def test_mismatch_overlapping_flags(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,public-api
-''')
- modular = self.read_signature_csv_from_string_as_dict('''
+""")
+ modular = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
-''')
+""")
mismatches = compare_signature_flags(monolithic, modular)
expected = [
(
@@ -263,14 +297,13 @@
]
self.assertEqual(expected, mismatches)
-
def test_mismatch_monolithic_blocked(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
-''')
- modular = self.read_signature_csv_from_string_as_dict('''
+""")
+ modular = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
-''')
+""")
mismatches = compare_signature_flags(monolithic, modular)
expected = [
(
@@ -282,12 +315,12 @@
self.assertEqual(expected, mismatches)
def test_mismatch_modular_blocked(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
-''')
- modular = self.read_signature_csv_from_string_as_dict('''
+""")
+ modular = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
-''')
+""")
mismatches = compare_signature_flags(monolithic, modular)
expected = [
(
@@ -300,9 +333,9 @@
def test_match_treat_missing_from_modular_as_blocked(self):
monolithic = self.read_signature_csv_from_string_as_dict('')
- modular = self.read_signature_csv_from_string_as_dict('''
+ modular = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
-''')
+""")
mismatches = compare_signature_flags(monolithic, modular)
expected = [
(
@@ -314,9 +347,9 @@
self.assertEqual(expected, mismatches)
def test_mismatch_treat_missing_from_modular_as_blocked(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
-''')
+""")
modular = {}
mismatches = compare_signature_flags(monolithic, modular)
expected = [
@@ -329,13 +362,14 @@
self.assertEqual(expected, mismatches)
def test_blocked_missing_from_modular(self):
- monolithic = self.read_signature_csv_from_string_as_dict('''
+ monolithic = self.read_signature_csv_from_string_as_dict("""
Ljava/lang/Object;->hashCode()I,blocked
-''')
+""")
modular = {}
mismatches = compare_signature_flags(monolithic, modular)
expected = []
self.assertEqual(expected, mismatches)
+#pylint: enable=line-too-long
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/scripts/manifest_check.py b/scripts/manifest_check.py
index 4ef4399..71fe358 100755
--- a/scripts/manifest_check.py
+++ b/scripts/manifest_check.py
@@ -25,7 +25,6 @@
import sys
from xml.dom import minidom
-
from manifest import android_ns
from manifest import get_children_with_tag
from manifest import parse_manifest
@@ -33,49 +32,61 @@
class ManifestMismatchError(Exception):
- pass
+ pass
def parse_args():
- """Parse commandline arguments."""
+ """Parse commandline arguments."""
- parser = argparse.ArgumentParser()
- parser.add_argument('--uses-library', dest='uses_libraries',
- action='append',
- help='specify uses-library entries known to the build system')
- parser.add_argument('--optional-uses-library',
- dest='optional_uses_libraries',
- action='append',
- help='specify uses-library entries known to the build system with required:false')
- parser.add_argument('--enforce-uses-libraries',
- dest='enforce_uses_libraries',
- action='store_true',
- help='check the uses-library entries known to the build system against the manifest')
- parser.add_argument('--enforce-uses-libraries-relax',
- dest='enforce_uses_libraries_relax',
- action='store_true',
- help='do not fail immediately, just save the error message to file')
- parser.add_argument('--enforce-uses-libraries-status',
- dest='enforce_uses_libraries_status',
- help='output file to store check status (error message)')
- parser.add_argument('--extract-target-sdk-version',
- dest='extract_target_sdk_version',
- action='store_true',
- help='print the targetSdkVersion from the manifest')
- parser.add_argument('--dexpreopt-config',
- dest='dexpreopt_configs',
- action='append',
- help='a paths to a dexpreopt.config of some library')
- parser.add_argument('--aapt',
- dest='aapt',
- help='path to aapt executable')
- parser.add_argument('--output', '-o', dest='output', help='output AndroidManifest.xml file')
- parser.add_argument('input', help='input AndroidManifest.xml file')
- return parser.parse_args()
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '--uses-library',
+ dest='uses_libraries',
+ action='append',
+ help='specify uses-library entries known to the build system')
+ parser.add_argument(
+ '--optional-uses-library',
+ dest='optional_uses_libraries',
+ action='append',
+ help='specify uses-library entries known to the build system with '
+ 'required:false'
+ )
+ parser.add_argument(
+ '--enforce-uses-libraries',
+ dest='enforce_uses_libraries',
+ action='store_true',
+ help='check the uses-library entries known to the build system against '
+ 'the manifest'
+ )
+ parser.add_argument(
+ '--enforce-uses-libraries-relax',
+ dest='enforce_uses_libraries_relax',
+ action='store_true',
+ help='do not fail immediately, just save the error message to file')
+ parser.add_argument(
+ '--enforce-uses-libraries-status',
+ dest='enforce_uses_libraries_status',
+ help='output file to store check status (error message)')
+ parser.add_argument(
+ '--extract-target-sdk-version',
+ dest='extract_target_sdk_version',
+ action='store_true',
+ help='print the targetSdkVersion from the manifest')
+ parser.add_argument(
+ '--dexpreopt-config',
+ dest='dexpreopt_configs',
+ action='append',
+ help='a paths to a dexpreopt.config of some library')
+ parser.add_argument('--aapt', dest='aapt', help='path to aapt executable')
+ parser.add_argument(
+ '--output', '-o', dest='output', help='output AndroidManifest.xml file')
+ parser.add_argument('input', help='input AndroidManifest.xml file')
+ return parser.parse_args()
def enforce_uses_libraries(manifest, required, optional, relax, is_apk, path):
- """Verify that the <uses-library> tags in the manifest match those provided
+ """Verify that the <uses-library> tags in the manifest match those provided
+
by the build system.
Args:
@@ -84,274 +95,294 @@
optional: optional libs known to the build system
relax: if true, suppress error on mismatch and just write it to file
is_apk: if the manifest comes from an APK or an XML file
- """
- if is_apk:
- manifest_required, manifest_optional, tags = extract_uses_libs_apk(manifest)
- else:
- manifest_required, manifest_optional, tags = extract_uses_libs_xml(manifest)
+ """
+ if is_apk:
+ manifest_required, manifest_optional, tags = extract_uses_libs_apk(
+ manifest)
+ else:
+ manifest_required, manifest_optional, tags = extract_uses_libs_xml(
+ manifest)
- # Trim namespace component. Normally Soong does that automatically when it
- # handles module names specified in Android.bp properties. However not all
- # <uses-library> entries in the manifest correspond to real modules: some of
- # the optional libraries may be missing at build time. Therefor this script
- # accepts raw module names as spelled in Android.bp/Amdroid.mk and trims the
- # optional namespace part manually.
- required = trim_namespace_parts(required)
- optional = trim_namespace_parts(optional)
+ # Trim namespace component. Normally Soong does that automatically when it
+ # handles module names specified in Android.bp properties. However not all
+ # <uses-library> entries in the manifest correspond to real modules: some of
+ # the optional libraries may be missing at build time. Therefor this script
+ # accepts raw module names as spelled in Android.bp/Amdroid.mk and trims the
+ # optional namespace part manually.
+ required = trim_namespace_parts(required)
+ optional = trim_namespace_parts(optional)
- if manifest_required == required and manifest_optional == optional:
- return None
+ if manifest_required == required and manifest_optional == optional:
+ return None
- errmsg = ''.join([
- 'mismatch in the <uses-library> tags between the build system and the '
- 'manifest:\n',
- '\t- required libraries in build system: [%s]\n' % ', '.join(required),
- '\t vs. in the manifest: [%s]\n' % ', '.join(manifest_required),
- '\t- optional libraries in build system: [%s]\n' % ', '.join(optional),
- '\t vs. in the manifest: [%s]\n' % ', '.join(manifest_optional),
- '\t- tags in the manifest (%s):\n' % path,
- '\t\t%s\n' % '\t\t'.join(tags),
- 'note: the following options are available:\n',
- '\t- to temporarily disable the check on command line, rebuild with ',
- 'RELAX_USES_LIBRARY_CHECK=true (this will set compiler filter "verify" ',
- 'and disable AOT-compilation in dexpreopt)\n',
- '\t- to temporarily disable the check for the whole product, set ',
- 'PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true in the product makefiles\n',
- '\t- to fix the check, make build system properties coherent with the '
- 'manifest\n',
- '\t- see build/make/Changes.md for details\n'])
+ #pylint: disable=line-too-long
+ errmsg = ''.join([
+ 'mismatch in the <uses-library> tags between the build system and the '
+ 'manifest:\n',
+ '\t- required libraries in build system: [%s]\n' % ', '.join(required),
+ '\t vs. in the manifest: [%s]\n' %
+ ', '.join(manifest_required),
+ '\t- optional libraries in build system: [%s]\n' % ', '.join(optional),
+ '\t vs. in the manifest: [%s]\n' %
+ ', '.join(manifest_optional),
+ '\t- tags in the manifest (%s):\n' % path,
+ '\t\t%s\n' % '\t\t'.join(tags),
+ 'note: the following options are available:\n',
+ '\t- to temporarily disable the check on command line, rebuild with ',
+ 'RELAX_USES_LIBRARY_CHECK=true (this will set compiler filter "verify" ',
+ 'and disable AOT-compilation in dexpreopt)\n',
+ '\t- to temporarily disable the check for the whole product, set ',
+ 'PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true in the product makefiles\n',
+ '\t- to fix the check, make build system properties coherent with the '
+ 'manifest\n', '\t- see build/make/Changes.md for details\n'
+ ])
+ #pylint: enable=line-too-long
- if not relax:
- raise ManifestMismatchError(errmsg)
+ if not relax:
+ raise ManifestMismatchError(errmsg)
- return errmsg
+ return errmsg
-MODULE_NAMESPACE = re.compile("^//[^:]+:")
+MODULE_NAMESPACE = re.compile('^//[^:]+:')
+
def trim_namespace_parts(modules):
- """Trim the namespace part of each module, if present. Leave only the name."""
+ """Trim the namespace part of each module, if present.
- trimmed = []
- for module in modules:
- trimmed.append(MODULE_NAMESPACE.sub('', module))
- return trimmed
+ Leave only the name.
+ """
+
+ trimmed = []
+ for module in modules:
+ trimmed.append(MODULE_NAMESPACE.sub('', module))
+ return trimmed
def extract_uses_libs_apk(badging):
- """Extract <uses-library> tags from the manifest of an APK."""
+ """Extract <uses-library> tags from the manifest of an APK."""
- pattern = re.compile("^uses-library(-not-required)?:'(.*)'$", re.MULTILINE)
+ pattern = re.compile("^uses-library(-not-required)?:'(.*)'$", re.MULTILINE)
- required = []
- optional = []
- lines = []
- for match in re.finditer(pattern, badging):
- lines.append(match.group(0))
- libname = match.group(2)
- if match.group(1) == None:
- required.append(libname)
- else:
- optional.append(libname)
+ required = []
+ optional = []
+ lines = []
+ for match in re.finditer(pattern, badging):
+ lines.append(match.group(0))
+ libname = match.group(2)
+ if match.group(1) is None:
+ required.append(libname)
+ else:
+ optional.append(libname)
- required = first_unique_elements(required)
- optional = first_unique_elements(optional)
- tags = first_unique_elements(lines)
- return required, optional, tags
+ required = first_unique_elements(required)
+ optional = first_unique_elements(optional)
+ tags = first_unique_elements(lines)
+ return required, optional, tags
-def extract_uses_libs_xml(xml):
- """Extract <uses-library> tags from the manifest."""
+def extract_uses_libs_xml(xml): #pylint: disable=inconsistent-return-statements
+ """Extract <uses-library> tags from the manifest."""
- manifest = parse_manifest(xml)
- elems = get_children_with_tag(manifest, 'application')
- application = elems[0] if len(elems) == 1 else None
- if len(elems) > 1:
- raise RuntimeError('found multiple <application> tags')
- elif not elems:
- if uses_libraries or optional_uses_libraries:
- raise ManifestMismatchError('no <application> tag found')
- return
+ manifest = parse_manifest(xml)
+ elems = get_children_with_tag(manifest, 'application')
+ application = elems[0] if len(elems) == 1 else None
+ if len(elems) > 1: #pylint: disable=no-else-raise
+ raise RuntimeError('found multiple <application> tags')
+ elif not elems:
+ if uses_libraries or optional_uses_libraries: #pylint: disable=undefined-variable
+ raise ManifestMismatchError('no <application> tag found')
+ return
- libs = get_children_with_tag(application, 'uses-library')
+ libs = get_children_with_tag(application, 'uses-library')
- required = [uses_library_name(x) for x in libs if uses_library_required(x)]
- optional = [uses_library_name(x) for x in libs if not uses_library_required(x)]
+ required = [uses_library_name(x) for x in libs if uses_library_required(x)]
+ optional = [
+ uses_library_name(x) for x in libs if not uses_library_required(x)
+ ]
- # render <uses-library> tags as XML for a pretty error message
- tags = []
- for lib in libs:
- tags.append(lib.toprettyxml())
+ # render <uses-library> tags as XML for a pretty error message
+ tags = []
+ for lib in libs:
+ tags.append(lib.toprettyxml())
- required = first_unique_elements(required)
- optional = first_unique_elements(optional)
- tags = first_unique_elements(tags)
- return required, optional, tags
+ required = first_unique_elements(required)
+ optional = first_unique_elements(optional)
+ tags = first_unique_elements(tags)
+ return required, optional, tags
def first_unique_elements(l):
- result = []
- [result.append(x) for x in l if x not in result]
- return result
+ result = []
+ for x in l:
+ if x not in result:
+ result.append(x)
+ return result
def uses_library_name(lib):
- """Extract the name attribute of a uses-library tag.
+ """Extract the name attribute of a uses-library tag.
Args:
lib: a <uses-library> tag.
- """
- name = lib.getAttributeNodeNS(android_ns, 'name')
- return name.value if name is not None else ""
+ """
+ name = lib.getAttributeNodeNS(android_ns, 'name')
+ return name.value if name is not None else ''
def uses_library_required(lib):
- """Extract the required attribute of a uses-library tag.
+ """Extract the required attribute of a uses-library tag.
Args:
lib: a <uses-library> tag.
- """
- required = lib.getAttributeNodeNS(android_ns, 'required')
- return (required.value == 'true') if required is not None else True
+ """
+ required = lib.getAttributeNodeNS(android_ns, 'required')
+ return (required.value == 'true') if required is not None else True
-def extract_target_sdk_version(manifest, is_apk = False):
- """Returns the targetSdkVersion from the manifest.
+def extract_target_sdk_version(manifest, is_apk=False):
+ """Returns the targetSdkVersion from the manifest.
Args:
manifest: manifest (either parsed XML or aapt dump of APK)
is_apk: if the manifest comes from an APK or an XML file
- """
- if is_apk:
- return extract_target_sdk_version_apk(manifest)
- else:
- return extract_target_sdk_version_xml(manifest)
+ """
+ if is_apk: #pylint: disable=no-else-return
+ return extract_target_sdk_version_apk(manifest)
+ else:
+ return extract_target_sdk_version_xml(manifest)
def extract_target_sdk_version_apk(badging):
- """Extract targetSdkVersion tags from the manifest of an APK."""
+ """Extract targetSdkVersion tags from the manifest of an APK."""
- pattern = re.compile("^targetSdkVersion?:'(.*)'$", re.MULTILINE)
+ pattern = re.compile("^targetSdkVersion?:'(.*)'$", re.MULTILINE)
- for match in re.finditer(pattern, badging):
- return match.group(1)
+ for match in re.finditer(pattern, badging):
+ return match.group(1)
- raise RuntimeError('cannot find targetSdkVersion in the manifest')
+ raise RuntimeError('cannot find targetSdkVersion in the manifest')
def extract_target_sdk_version_xml(xml):
- """Extract targetSdkVersion tags from the manifest."""
+ """Extract targetSdkVersion tags from the manifest."""
- manifest = parse_manifest(xml)
+ manifest = parse_manifest(xml)
- # Get or insert the uses-sdk element
- uses_sdk = get_children_with_tag(manifest, 'uses-sdk')
- if len(uses_sdk) > 1:
- raise RuntimeError('found multiple uses-sdk elements')
- elif len(uses_sdk) == 0:
- raise RuntimeError('missing uses-sdk element')
+ # Get or insert the uses-sdk element
+ uses_sdk = get_children_with_tag(manifest, 'uses-sdk')
+ if len(uses_sdk) > 1: #pylint: disable=no-else-raise
+ raise RuntimeError('found multiple uses-sdk elements')
+ elif len(uses_sdk) == 0:
+ raise RuntimeError('missing uses-sdk element')
- uses_sdk = uses_sdk[0]
+ uses_sdk = uses_sdk[0]
- min_attr = uses_sdk.getAttributeNodeNS(android_ns, 'minSdkVersion')
- if min_attr is None:
- raise RuntimeError('minSdkVersion is not specified')
+ min_attr = uses_sdk.getAttributeNodeNS(android_ns, 'minSdkVersion')
+ if min_attr is None:
+ raise RuntimeError('minSdkVersion is not specified')
- target_attr = uses_sdk.getAttributeNodeNS(android_ns, 'targetSdkVersion')
- if target_attr is None:
- target_attr = min_attr
+ target_attr = uses_sdk.getAttributeNodeNS(android_ns, 'targetSdkVersion')
+ if target_attr is None:
+ target_attr = min_attr
- return target_attr.value
+ return target_attr.value
def load_dexpreopt_configs(configs):
- """Load dexpreopt.config files and map module names to library names."""
- module_to_libname = {}
+ """Load dexpreopt.config files and map module names to library names."""
+ module_to_libname = {}
- if configs is None:
- configs = []
+ if configs is None:
+ configs = []
- for config in configs:
- with open(config, 'r') as f:
- contents = json.load(f)
- module_to_libname[contents['Name']] = contents['ProvidesUsesLibrary']
+ for config in configs:
+ with open(config, 'r') as f:
+ contents = json.load(f)
+ module_to_libname[contents['Name']] = contents['ProvidesUsesLibrary']
- return module_to_libname
+ return module_to_libname
def translate_libnames(modules, module_to_libname):
- """Translate module names into library names using the mapping."""
- if modules is None:
- modules = []
+ """Translate module names into library names using the mapping."""
+ if modules is None:
+ modules = []
- libnames = []
- for name in modules:
- if name in module_to_libname:
- name = module_to_libname[name]
- libnames.append(name)
+ libnames = []
+ for name in modules:
+ if name in module_to_libname:
+ name = module_to_libname[name]
+ libnames.append(name)
- return libnames
+ return libnames
def main():
- """Program entry point."""
- try:
- args = parse_args()
+ """Program entry point."""
+ try:
+ args = parse_args()
- # The input can be either an XML manifest or an APK, they are parsed and
- # processed in different ways.
- is_apk = args.input.endswith('.apk')
- if is_apk:
- aapt = args.aapt if args.aapt != None else "aapt"
- manifest = subprocess.check_output([aapt, "dump", "badging", args.input])
- else:
- manifest = minidom.parse(args.input)
+ # The input can be either an XML manifest or an APK, they are parsed and
+ # processed in different ways.
+ is_apk = args.input.endswith('.apk')
+ if is_apk:
+ aapt = args.aapt if args.aapt is not None else 'aapt'
+ manifest = subprocess.check_output(
+ [aapt, 'dump', 'badging', args.input])
+ else:
+ manifest = minidom.parse(args.input)
- if args.enforce_uses_libraries:
- # Load dexpreopt.config files and build a mapping from module names to
- # library names. This is necessary because build system addresses
- # libraries by their module name (`uses_libs`, `optional_uses_libs`,
- # `LOCAL_USES_LIBRARIES`, `LOCAL_OPTIONAL_LIBRARY_NAMES` all contain
- # module names), while the manifest addresses libraries by their name.
- mod_to_lib = load_dexpreopt_configs(args.dexpreopt_configs)
- required = translate_libnames(args.uses_libraries, mod_to_lib)
- optional = translate_libnames(args.optional_uses_libraries, mod_to_lib)
+ if args.enforce_uses_libraries:
+ # Load dexpreopt.config files and build a mapping from module
+ # names to library names. This is necessary because build system
+ # addresses libraries by their module name (`uses_libs`,
+ # `optional_uses_libs`, `LOCAL_USES_LIBRARIES`,
+ # `LOCAL_OPTIONAL_LIBRARY_NAMES` all contain module names), while
+ # the manifest addresses libraries by their name.
+ mod_to_lib = load_dexpreopt_configs(args.dexpreopt_configs)
+ required = translate_libnames(args.uses_libraries, mod_to_lib)
+ optional = translate_libnames(args.optional_uses_libraries,
+ mod_to_lib)
- # Check if the <uses-library> lists in the build system agree with those
- # in the manifest. Raise an exception on mismatch, unless the script was
- # passed a special parameter to suppress exceptions.
- errmsg = enforce_uses_libraries(manifest, required, optional,
- args.enforce_uses_libraries_relax, is_apk, args.input)
+ # Check if the <uses-library> lists in the build system agree with
+ # those in the manifest. Raise an exception on mismatch, unless the
+ # script was passed a special parameter to suppress exceptions.
+ errmsg = enforce_uses_libraries(manifest, required, optional,
+ args.enforce_uses_libraries_relax,
+ is_apk, args.input)
- # Create a status file that is empty on success, or contains an error
- # message on failure. When exceptions are suppressed, dexpreopt command
- # command will check file size to determine if the check has failed.
- if args.enforce_uses_libraries_status:
- with open(args.enforce_uses_libraries_status, 'w') as f:
- if not errmsg == None:
- f.write("%s\n" % errmsg)
+ # Create a status file that is empty on success, or contains an
+ # error message on failure. When exceptions are suppressed,
+ # dexpreopt command command will check file size to determine if
+ # the check has failed.
+ if args.enforce_uses_libraries_status:
+ with open(args.enforce_uses_libraries_status, 'w') as f:
+ if not errmsg is not None:
+ f.write('%s\n' % errmsg)
- if args.extract_target_sdk_version:
- try:
- print(extract_target_sdk_version(manifest, is_apk))
- except:
- # Failed; don't crash, return "any" SDK version. This will result in
- # dexpreopt not adding any compatibility libraries.
- print(10000)
+ if args.extract_target_sdk_version:
+ try:
+ print(extract_target_sdk_version(manifest, is_apk))
+ except: #pylint: disable=bare-except
+ # Failed; don't crash, return "any" SDK version. This will
+ # result in dexpreopt not adding any compatibility libraries.
+ print(10000)
- if args.output:
- # XML output is supposed to be written only when this script is invoked
- # with XML input manifest, not with an APK.
- if is_apk:
- raise RuntimeError('cannot save APK manifest as XML')
+ if args.output:
+ # XML output is supposed to be written only when this script is
+ # invoked with XML input manifest, not with an APK.
+ if is_apk:
+ raise RuntimeError('cannot save APK manifest as XML')
- with open(args.output, 'wb') as f:
- write_xml(f, manifest)
+ with open(args.output, 'wb') as f:
+ write_xml(f, manifest)
- # pylint: disable=broad-except
- except Exception as err:
- print('error: ' + str(err), file=sys.stderr)
- sys.exit(-1)
+ # pylint: disable=broad-except
+ except Exception as err:
+ print('error: ' + str(err), file=sys.stderr)
+ sys.exit(-1)
+
if __name__ == '__main__':
- main()
+ main()
diff --git a/scripts/manifest_check_test.py b/scripts/manifest_check_test.py
index e3e8ac4..3be7a30 100755
--- a/scripts/manifest_check_test.py
+++ b/scripts/manifest_check_test.py
@@ -26,202 +26,235 @@
def uses_library_xml(name, attr=''):
- return '<uses-library android:name="%s"%s />' % (name, attr)
+ return '<uses-library android:name="%s"%s />' % (name, attr)
def required_xml(value):
- return ' android:required="%s"' % ('true' if value else 'false')
+ return ' android:required="%s"' % ('true' if value else 'false')
def uses_library_apk(name, sfx=''):
- return "uses-library%s:'%s'" % (sfx, name)
+ return "uses-library%s:'%s'" % (sfx, name)
def required_apk(value):
- return '' if value else '-not-required'
+ return '' if value else '-not-required'
class EnforceUsesLibrariesTest(unittest.TestCase):
- """Unit tests for add_extract_native_libs function."""
+ """Unit tests for add_extract_native_libs function."""
- def run_test(self, xml, apk, uses_libraries=[], optional_uses_libraries=[]):
- doc = minidom.parseString(xml)
- try:
- relax = False
- manifest_check.enforce_uses_libraries(doc, uses_libraries,
- optional_uses_libraries, relax, False, 'path/to/X/AndroidManifest.xml')
- manifest_check.enforce_uses_libraries(apk, uses_libraries,
- optional_uses_libraries, relax, True, 'path/to/X/X.apk')
- return True
- except manifest_check.ManifestMismatchError:
- return False
+ def run_test(self, xml, apk, uses_libraries=[], optional_uses_libraries=[]): #pylint: disable=dangerous-default-value
+ doc = minidom.parseString(xml)
+ try:
+ relax = False
+ manifest_check.enforce_uses_libraries(
+ doc, uses_libraries, optional_uses_libraries, relax, False,
+ 'path/to/X/AndroidManifest.xml')
+ manifest_check.enforce_uses_libraries(apk, uses_libraries,
+ optional_uses_libraries,
+ relax, True,
+ 'path/to/X/X.apk')
+ return True
+ except manifest_check.ManifestMismatchError:
+ return False
- xml_tmpl = (
- '<?xml version="1.0" encoding="utf-8"?>\n'
- '<manifest xmlns:android="http://schemas.android.com/apk/res/android">\n'
- ' <application>\n'
- ' %s\n'
- ' </application>\n'
- '</manifest>\n')
+ xml_tmpl = (
+ '<?xml version="1.0" encoding="utf-8"?>\n<manifest '
+ 'xmlns:android="http://schemas.android.com/apk/res/android">\n '
+ '<application>\n %s\n </application>\n</manifest>\n')
- apk_tmpl = (
- "package: name='com.google.android.something' versionCode='100'\n"
- "sdkVersion:'29'\n"
- "targetSdkVersion:'29'\n"
- "uses-permission: name='android.permission.ACCESS_NETWORK_STATE'\n"
- "%s\n"
- "densities: '160' '240' '320' '480' '640' '65534")
+ apk_tmpl = (
+ "package: name='com.google.android.something' versionCode='100'\n"
+ "sdkVersion:'29'\n"
+ "targetSdkVersion:'29'\n"
+ "uses-permission: name='android.permission.ACCESS_NETWORK_STATE'\n"
+ '%s\n'
+ "densities: '160' '240' '320' '480' '640' '65534")
- def test_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo'))
- apk = self.apk_tmpl % (uses_library_apk('foo'))
- matches = self.run_test(xml, apk, uses_libraries=['foo'])
- self.assertTrue(matches)
+ def test_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo'))
+ apk = self.apk_tmpl % (uses_library_apk('foo'))
+ matches = self.run_test(xml, apk, uses_libraries=['foo'])
+ self.assertTrue(matches)
- def test_uses_library_required(self):
- xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(True)))
- apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(True)))
- matches = self.run_test(xml, apk, uses_libraries=['foo'])
- self.assertTrue(matches)
+ def test_uses_library_required(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(True)))
+ apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(True)))
+ matches = self.run_test(xml, apk, uses_libraries=['foo'])
+ self.assertTrue(matches)
- def test_optional_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
- apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
- matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
- self.assertTrue(matches)
+ def test_optional_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
+ apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
+ matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
+ self.assertTrue(matches)
- def test_expected_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
- apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
- matches = self.run_test(xml, apk, uses_libraries=['foo'])
- self.assertFalse(matches)
+ def test_expected_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
+ apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
+ matches = self.run_test(xml, apk, uses_libraries=['foo'])
+ self.assertFalse(matches)
- def test_expected_optional_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo'))
- apk = self.apk_tmpl % (uses_library_apk('foo'))
- matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
- self.assertFalse(matches)
+ def test_expected_optional_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo'))
+ apk = self.apk_tmpl % (uses_library_apk('foo'))
+ matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
+ self.assertFalse(matches)
- def test_missing_uses_library(self):
- xml = self.xml_tmpl % ('')
- apk = self.apk_tmpl % ('')
- matches = self.run_test(xml, apk, uses_libraries=['foo'])
- self.assertFalse(matches)
+ def test_missing_uses_library(self):
+ xml = self.xml_tmpl % ('')
+ apk = self.apk_tmpl % ('')
+ matches = self.run_test(xml, apk, uses_libraries=['foo'])
+ self.assertFalse(matches)
- def test_missing_optional_uses_library(self):
- xml = self.xml_tmpl % ('')
- apk = self.apk_tmpl % ('')
- matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
- self.assertFalse(matches)
+ def test_missing_optional_uses_library(self):
+ xml = self.xml_tmpl % ('')
+ apk = self.apk_tmpl % ('')
+ matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
+ self.assertFalse(matches)
- def test_extra_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo'))
- apk = self.apk_tmpl % (uses_library_xml('foo'))
- matches = self.run_test(xml, apk)
- self.assertFalse(matches)
+ def test_extra_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo'))
+ apk = self.apk_tmpl % (uses_library_xml('foo'))
+ matches = self.run_test(xml, apk)
+ self.assertFalse(matches)
- def test_extra_optional_uses_library(self):
- xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
- apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
- matches = self.run_test(xml, apk)
- self.assertFalse(matches)
+ def test_extra_optional_uses_library(self):
+ xml = self.xml_tmpl % (uses_library_xml('foo', required_xml(False)))
+ apk = self.apk_tmpl % (uses_library_apk('foo', required_apk(False)))
+ matches = self.run_test(xml, apk)
+ self.assertFalse(matches)
- def test_multiple_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo'),
- uses_library_xml('bar')]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo'),
- uses_library_apk('bar')]))
- matches = self.run_test(xml, apk, uses_libraries=['foo', 'bar'])
- self.assertTrue(matches)
+ def test_multiple_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join(
+ [uses_library_xml('foo'),
+ uses_library_xml('bar')]))
+ apk = self.apk_tmpl % ('\n'.join(
+ [uses_library_apk('foo'),
+ uses_library_apk('bar')]))
+ matches = self.run_test(xml, apk, uses_libraries=['foo', 'bar'])
+ self.assertTrue(matches)
- def test_multiple_optional_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo', required_xml(False)),
- uses_library_xml('bar', required_xml(False))]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo', required_apk(False)),
- uses_library_apk('bar', required_apk(False))]))
- matches = self.run_test(xml, apk, optional_uses_libraries=['foo', 'bar'])
- self.assertTrue(matches)
+ def test_multiple_optional_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join([
+ uses_library_xml('foo', required_xml(False)),
+ uses_library_xml('bar', required_xml(False))
+ ]))
+ apk = self.apk_tmpl % ('\n'.join([
+ uses_library_apk('foo', required_apk(False)),
+ uses_library_apk('bar', required_apk(False))
+ ]))
+ matches = self.run_test(
+ xml, apk, optional_uses_libraries=['foo', 'bar'])
+ self.assertTrue(matches)
- def test_order_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo'),
- uses_library_xml('bar')]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo'),
- uses_library_apk('bar')]))
- matches = self.run_test(xml, apk, uses_libraries=['bar', 'foo'])
- self.assertFalse(matches)
+ def test_order_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join(
+ [uses_library_xml('foo'),
+ uses_library_xml('bar')]))
+ apk = self.apk_tmpl % ('\n'.join(
+ [uses_library_apk('foo'),
+ uses_library_apk('bar')]))
+ matches = self.run_test(xml, apk, uses_libraries=['bar', 'foo'])
+ self.assertFalse(matches)
- def test_order_optional_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo', required_xml(False)),
- uses_library_xml('bar', required_xml(False))]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo', required_apk(False)),
- uses_library_apk('bar', required_apk(False))]))
- matches = self.run_test(xml, apk, optional_uses_libraries=['bar', 'foo'])
- self.assertFalse(matches)
+ def test_order_optional_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join([
+ uses_library_xml('foo', required_xml(False)),
+ uses_library_xml('bar', required_xml(False))
+ ]))
+ apk = self.apk_tmpl % ('\n'.join([
+ uses_library_apk('foo', required_apk(False)),
+ uses_library_apk('bar', required_apk(False))
+ ]))
+ matches = self.run_test(
+ xml, apk, optional_uses_libraries=['bar', 'foo'])
+ self.assertFalse(matches)
- def test_duplicate_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo'),
- uses_library_xml('foo')]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo'),
- uses_library_apk('foo')]))
- matches = self.run_test(xml, apk, uses_libraries=['foo'])
- self.assertTrue(matches)
+ def test_duplicate_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join(
+ [uses_library_xml('foo'),
+ uses_library_xml('foo')]))
+ apk = self.apk_tmpl % ('\n'.join(
+ [uses_library_apk('foo'),
+ uses_library_apk('foo')]))
+ matches = self.run_test(xml, apk, uses_libraries=['foo'])
+ self.assertTrue(matches)
- def test_duplicate_optional_uses_library(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo', required_xml(False)),
- uses_library_xml('foo', required_xml(False))]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo', required_apk(False)),
- uses_library_apk('foo', required_apk(False))]))
- matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
- self.assertTrue(matches)
+ def test_duplicate_optional_uses_library(self):
+ xml = self.xml_tmpl % ('\n'.join([
+ uses_library_xml('foo', required_xml(False)),
+ uses_library_xml('foo', required_xml(False))
+ ]))
+ apk = self.apk_tmpl % ('\n'.join([
+ uses_library_apk('foo', required_apk(False)),
+ uses_library_apk('foo', required_apk(False))
+ ]))
+ matches = self.run_test(xml, apk, optional_uses_libraries=['foo'])
+ self.assertTrue(matches)
- def test_mixed(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo'),
- uses_library_xml('bar', required_xml(False))]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo'),
- uses_library_apk('bar', required_apk(False))]))
- matches = self.run_test(xml, apk, uses_libraries=['foo'],
- optional_uses_libraries=['bar'])
- self.assertTrue(matches)
+ def test_mixed(self):
+ xml = self.xml_tmpl % ('\n'.join([
+ uses_library_xml('foo'),
+ uses_library_xml('bar', required_xml(False))
+ ]))
+ apk = self.apk_tmpl % ('\n'.join([
+ uses_library_apk('foo'),
+ uses_library_apk('bar', required_apk(False))
+ ]))
+ matches = self.run_test(
+ xml, apk, uses_libraries=['foo'], optional_uses_libraries=['bar'])
+ self.assertTrue(matches)
- def test_mixed_with_namespace(self):
- xml = self.xml_tmpl % ('\n'.join([uses_library_xml('foo'),
- uses_library_xml('bar', required_xml(False))]))
- apk = self.apk_tmpl % ('\n'.join([uses_library_apk('foo'),
- uses_library_apk('bar', required_apk(False))]))
- matches = self.run_test(xml, apk, uses_libraries=['//x/y/z:foo'],
- optional_uses_libraries=['//x/y/z:bar'])
- self.assertTrue(matches)
+ def test_mixed_with_namespace(self):
+ xml = self.xml_tmpl % ('\n'.join([
+ uses_library_xml('foo'),
+ uses_library_xml('bar', required_xml(False))
+ ]))
+ apk = self.apk_tmpl % ('\n'.join([
+ uses_library_apk('foo'),
+ uses_library_apk('bar', required_apk(False))
+ ]))
+ matches = self.run_test(
+ xml,
+ apk,
+ uses_libraries=['//x/y/z:foo'],
+ optional_uses_libraries=['//x/y/z:bar'])
+ self.assertTrue(matches)
class ExtractTargetSdkVersionTest(unittest.TestCase):
- def run_test(self, xml, apk, version):
- doc = minidom.parseString(xml)
- v = manifest_check.extract_target_sdk_version(doc, is_apk=False)
- self.assertEqual(v, version)
- v = manifest_check.extract_target_sdk_version(apk, is_apk=True)
- self.assertEqual(v, version)
- xml_tmpl = (
- '<?xml version="1.0" encoding="utf-8"?>\n'
- '<manifest xmlns:android="http://schemas.android.com/apk/res/android">\n'
- ' <uses-sdk android:minSdkVersion="28" android:targetSdkVersion="%s" />\n'
- '</manifest>\n')
+ def run_test(self, xml, apk, version):
+ doc = minidom.parseString(xml)
+ v = manifest_check.extract_target_sdk_version(doc, is_apk=False)
+ self.assertEqual(v, version)
+ v = manifest_check.extract_target_sdk_version(apk, is_apk=True)
+ self.assertEqual(v, version)
- apk_tmpl = (
- "package: name='com.google.android.something' versionCode='100'\n"
- "sdkVersion:'28'\n"
- "targetSdkVersion:'%s'\n"
- "uses-permission: name='android.permission.ACCESS_NETWORK_STATE'\n")
+ xml_tmpl = (
+ '<?xml version="1.0" encoding="utf-8"?>\n<manifest '
+ 'xmlns:android="http://schemas.android.com/apk/res/android">\n '
+ '<uses-sdk android:minSdkVersion="28" android:targetSdkVersion="%s" '
+ '/>\n</manifest>\n')
- def test_targert_sdk_version_28(self):
- xml = self.xml_tmpl % "28"
- apk = self.apk_tmpl % "28"
- self.run_test(xml, apk, "28")
+ apk_tmpl = (
+ "package: name='com.google.android.something' versionCode='100'\n"
+ "sdkVersion:'28'\n"
+ "targetSdkVersion:'%s'\n"
+ "uses-permission: name='android.permission.ACCESS_NETWORK_STATE'\n")
- def test_targert_sdk_version_29(self):
- xml = self.xml_tmpl % "29"
- apk = self.apk_tmpl % "29"
- self.run_test(xml, apk, "29")
+ def test_targert_sdk_version_28(self):
+ xml = self.xml_tmpl % '28'
+ apk = self.apk_tmpl % '28'
+ self.run_test(xml, apk, '28')
+
+ def test_targert_sdk_version_29(self):
+ xml = self.xml_tmpl % '29'
+ apk = self.apk_tmpl % '29'
+ self.run_test(xml, apk, '29')
+
if __name__ == '__main__':
- unittest.main(verbosity=2)
+ unittest.main(verbosity=2)
diff --git a/sdk/Android.bp b/sdk/Android.bp
index 368c03a..0c9bf27 100644
--- a/sdk/Android.bp
+++ b/sdk/Android.bp
@@ -16,6 +16,7 @@
srcs: [
"bp.go",
"exports.go",
+ "member_type.go",
"sdk.go",
"update.go",
],
diff --git a/sdk/member_type.go b/sdk/member_type.go
new file mode 100644
index 0000000..ee27c86
--- /dev/null
+++ b/sdk/member_type.go
@@ -0,0 +1,164 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package sdk
+
+import (
+ "reflect"
+
+ "android/soong/android"
+ "github.com/google/blueprint/proptools"
+)
+
+// Contains information about the sdk properties that list sdk members by type, e.g.
+// Java_header_libs.
+type sdkMemberTypeListProperty struct {
+ // getter for the list of member names
+ getter func(properties interface{}) []string
+
+ // setter for the list of member names
+ setter func(properties interface{}, list []string)
+
+ // the type of member referenced in the list
+ memberType android.SdkMemberType
+
+ // the dependency tag used for items in this list that can be used to determine the memberType
+ // for a resolved dependency.
+ dependencyTag android.SdkMemberTypeDependencyTag
+}
+
+func (p *sdkMemberTypeListProperty) propertyName() string {
+ return p.memberType.SdkPropertyName()
+}
+
+// Cache of dynamically generated dynamicSdkMemberTypes objects. The key is the pointer
+// to a slice of SdkMemberType instances held in android.SdkMemberTypes.
+var dynamicSdkMemberTypesMap android.OncePer
+
+// A dynamically generated set of member list properties and associated structure type.
+type dynamicSdkMemberTypes struct {
+ // The dynamically generated structure type.
+ //
+ // Contains one []string exported field for each android.SdkMemberTypes. The name of the field
+ // is the exported form of the value returned by SdkMemberType.SdkPropertyName().
+ propertiesStructType reflect.Type
+
+ // Information about each of the member type specific list properties.
+ memberTypeListProperties []*sdkMemberTypeListProperty
+
+ memberTypeToProperty map[android.SdkMemberType]*sdkMemberTypeListProperty
+}
+
+func (d *dynamicSdkMemberTypes) createMemberTypeListProperties() interface{} {
+ return reflect.New(d.propertiesStructType).Interface()
+}
+
+func getDynamicSdkMemberTypes(registry *android.SdkMemberTypesRegistry) *dynamicSdkMemberTypes {
+
+ // Get a key that uniquely identifies the registry contents.
+ key := registry.UniqueOnceKey()
+
+ // Get the registered types.
+ registeredTypes := registry.RegisteredTypes()
+
+ // Get the cached value, creating new instance if necessary.
+ return dynamicSdkMemberTypesMap.Once(key, func() interface{} {
+ return createDynamicSdkMemberTypes(registeredTypes)
+ }).(*dynamicSdkMemberTypes)
+}
+
+// Create the dynamicSdkMemberTypes from the list of registered member types.
+//
+// A struct is created which contains one exported field per member type corresponding to
+// the SdkMemberType.SdkPropertyName() value.
+//
+// A list of sdkMemberTypeListProperty instances is created, one per member type that provides:
+// * a reference to the member type.
+// * a getter for the corresponding field in the properties struct.
+// * a dependency tag that identifies the member type of a resolved dependency.
+//
+func createDynamicSdkMemberTypes(sdkMemberTypes []android.SdkMemberType) *dynamicSdkMemberTypes {
+
+ var listProperties []*sdkMemberTypeListProperty
+ memberTypeToProperty := map[android.SdkMemberType]*sdkMemberTypeListProperty{}
+ var fields []reflect.StructField
+
+ // Iterate over the member types creating StructField and sdkMemberTypeListProperty objects.
+ nextFieldIndex := 0
+ for _, memberType := range sdkMemberTypes {
+
+ p := memberType.SdkPropertyName()
+
+ var getter func(properties interface{}) []string
+ var setter func(properties interface{}, list []string)
+ if memberType.RequiresBpProperty() {
+ // Create a dynamic exported field for the member type's property.
+ fields = append(fields, reflect.StructField{
+ Name: proptools.FieldNameForProperty(p),
+ Type: reflect.TypeOf([]string{}),
+ Tag: `android:"arch_variant"`,
+ })
+
+ // Copy the field index for use in the getter func as using the loop variable directly will
+ // cause all funcs to use the last value.
+ fieldIndex := nextFieldIndex
+ nextFieldIndex += 1
+
+ getter = func(properties interface{}) []string {
+ // The properties is expected to be of the following form (where
+ // <Module_types> is the name of an SdkMemberType.SdkPropertyName().
+ // properties *struct {<Module_types> []string, ....}
+ //
+ // Although it accesses the field by index the following reflection code is equivalent to:
+ // *properties.<Module_types>
+ //
+ list := reflect.ValueOf(properties).Elem().Field(fieldIndex).Interface().([]string)
+ return list
+ }
+
+ setter = func(properties interface{}, list []string) {
+ // The properties is expected to be of the following form (where
+ // <Module_types> is the name of an SdkMemberType.SdkPropertyName().
+ // properties *struct {<Module_types> []string, ....}
+ //
+ // Although it accesses the field by index the following reflection code is equivalent to:
+ // *properties.<Module_types> = list
+ //
+ reflect.ValueOf(properties).Elem().Field(fieldIndex).Set(reflect.ValueOf(list))
+ }
+ }
+
+ // Create an sdkMemberTypeListProperty for the member type.
+ memberListProperty := &sdkMemberTypeListProperty{
+ getter: getter,
+ setter: setter,
+ memberType: memberType,
+
+ // Dependencies added directly from member properties are always exported.
+ dependencyTag: android.DependencyTagForSdkMemberType(memberType, true),
+ }
+
+ memberTypeToProperty[memberType] = memberListProperty
+ listProperties = append(listProperties, memberListProperty)
+ }
+
+ // Create a dynamic struct from the collated fields.
+ propertiesStructType := reflect.StructOf(fields)
+
+ return &dynamicSdkMemberTypes{
+ memberTypeListProperties: listProperties,
+ memberTypeToProperty: memberTypeToProperty,
+ propertiesStructType: propertiesStructType,
+ }
+}
diff --git a/sdk/sdk.go b/sdk/sdk.go
index b1c8aeb..6dea752 100644
--- a/sdk/sdk.go
+++ b/sdk/sdk.go
@@ -17,7 +17,6 @@
import (
"fmt"
"io"
- "reflect"
"strconv"
"github.com/google/blueprint"
@@ -50,7 +49,7 @@
// The dynamically generated information about the registered SdkMemberType
dynamicSdkMemberTypes *dynamicSdkMemberTypes
- // The dynamically created instance of the properties struct containing the sdk member
+ // The dynamically created instance of the properties struct containing the sdk member type
// list properties, e.g. java_libs.
dynamicMemberTypeListProperties interface{}
@@ -95,148 +94,6 @@
Prebuilt_visibility []string
}
-// Contains information about the sdk properties that list sdk members, e.g.
-// Java_header_libs.
-type sdkMemberListProperty struct {
- // getter for the list of member names
- getter func(properties interface{}) []string
-
- // setter for the list of member names
- setter func(properties interface{}, list []string)
-
- // the type of member referenced in the list
- memberType android.SdkMemberType
-
- // the dependency tag used for items in this list that can be used to determine the memberType
- // for a resolved dependency.
- dependencyTag android.SdkMemberTypeDependencyTag
-}
-
-func (p *sdkMemberListProperty) propertyName() string {
- return p.memberType.SdkPropertyName()
-}
-
-// Cache of dynamically generated dynamicSdkMemberTypes objects. The key is the pointer
-// to a slice of SdkMemberType instances held in android.SdkMemberTypes.
-var dynamicSdkMemberTypesMap android.OncePer
-
-// A dynamically generated set of member list properties and associated structure type.
-type dynamicSdkMemberTypes struct {
- // The dynamically generated structure type.
- //
- // Contains one []string exported field for each android.SdkMemberTypes. The name of the field
- // is the exported form of the value returned by SdkMemberType.SdkPropertyName().
- propertiesStructType reflect.Type
-
- // Information about each of the member type specific list properties.
- memberListProperties []*sdkMemberListProperty
-
- memberTypeToProperty map[android.SdkMemberType]*sdkMemberListProperty
-}
-
-func (d *dynamicSdkMemberTypes) createMemberListProperties() interface{} {
- return reflect.New(d.propertiesStructType).Interface()
-}
-
-func getDynamicSdkMemberTypes(registry *android.SdkMemberTypesRegistry) *dynamicSdkMemberTypes {
-
- // Get a key that uniquely identifies the registry contents.
- key := registry.UniqueOnceKey()
-
- // Get the registered types.
- registeredTypes := registry.RegisteredTypes()
-
- // Get the cached value, creating new instance if necessary.
- return dynamicSdkMemberTypesMap.Once(key, func() interface{} {
- return createDynamicSdkMemberTypes(registeredTypes)
- }).(*dynamicSdkMemberTypes)
-}
-
-// Create the dynamicSdkMemberTypes from the list of registered member types.
-//
-// A struct is created which contains one exported field per member type corresponding to
-// the SdkMemberType.SdkPropertyName() value.
-//
-// A list of sdkMemberListProperty instances is created, one per member type that provides:
-// * a reference to the member type.
-// * a getter for the corresponding field in the properties struct.
-// * a dependency tag that identifies the member type of a resolved dependency.
-//
-func createDynamicSdkMemberTypes(sdkMemberTypes []android.SdkMemberType) *dynamicSdkMemberTypes {
-
- var listProperties []*sdkMemberListProperty
- memberTypeToProperty := map[android.SdkMemberType]*sdkMemberListProperty{}
- var fields []reflect.StructField
-
- // Iterate over the member types creating StructField and sdkMemberListProperty objects.
- nextFieldIndex := 0
- for _, memberType := range sdkMemberTypes {
-
- p := memberType.SdkPropertyName()
-
- var getter func(properties interface{}) []string
- var setter func(properties interface{}, list []string)
- if memberType.RequiresBpProperty() {
- // Create a dynamic exported field for the member type's property.
- fields = append(fields, reflect.StructField{
- Name: proptools.FieldNameForProperty(p),
- Type: reflect.TypeOf([]string{}),
- Tag: `android:"arch_variant"`,
- })
-
- // Copy the field index for use in the getter func as using the loop variable directly will
- // cause all funcs to use the last value.
- fieldIndex := nextFieldIndex
- nextFieldIndex += 1
-
- getter = func(properties interface{}) []string {
- // The properties is expected to be of the following form (where
- // <Module_types> is the name of an SdkMemberType.SdkPropertyName().
- // properties *struct {<Module_types> []string, ....}
- //
- // Although it accesses the field by index the following reflection code is equivalent to:
- // *properties.<Module_types>
- //
- list := reflect.ValueOf(properties).Elem().Field(fieldIndex).Interface().([]string)
- return list
- }
-
- setter = func(properties interface{}, list []string) {
- // The properties is expected to be of the following form (where
- // <Module_types> is the name of an SdkMemberType.SdkPropertyName().
- // properties *struct {<Module_types> []string, ....}
- //
- // Although it accesses the field by index the following reflection code is equivalent to:
- // *properties.<Module_types> = list
- //
- reflect.ValueOf(properties).Elem().Field(fieldIndex).Set(reflect.ValueOf(list))
- }
- }
-
- // Create an sdkMemberListProperty for the member type.
- memberListProperty := &sdkMemberListProperty{
- getter: getter,
- setter: setter,
- memberType: memberType,
-
- // Dependencies added directly from member properties are always exported.
- dependencyTag: android.DependencyTagForSdkMemberType(memberType, true),
- }
-
- memberTypeToProperty[memberType] = memberListProperty
- listProperties = append(listProperties, memberListProperty)
- }
-
- // Create a dynamic struct from the collated fields.
- propertiesStructType := reflect.StructOf(fields)
-
- return &dynamicSdkMemberTypes{
- memberListProperties: listProperties,
- memberTypeToProperty: memberTypeToProperty,
- propertiesStructType: propertiesStructType,
- }
-}
-
// sdk defines an SDK which is a logical group of modules (e.g. native libs, headers, java libs, etc.)
// which Mainline modules like APEX can choose to build with.
func SdkModuleFactory() android.Module {
@@ -247,16 +104,16 @@
s := &sdk{}
s.properties.Module_exports = moduleExports
// Get the dynamic sdk member type data for the currently registered sdk member types.
- var registry *android.SdkMemberTypesRegistry
+ var typeRegistry *android.SdkMemberTypesRegistry
if moduleExports {
- registry = android.ModuleExportsMemberTypes
+ typeRegistry = android.ModuleExportsMemberTypes
} else {
- registry = android.SdkMemberTypes
+ typeRegistry = android.SdkMemberTypes
}
- s.dynamicSdkMemberTypes = getDynamicSdkMemberTypes(registry)
+ s.dynamicSdkMemberTypes = getDynamicSdkMemberTypes(typeRegistry)
// Create an instance of the dynamically created struct that contains all the
// properties for the member type specific list properties.
- s.dynamicMemberTypeListProperties = s.dynamicSdkMemberTypes.createMemberListProperties()
+ s.dynamicMemberTypeListProperties = s.dynamicSdkMemberTypes.createMemberTypeListProperties()
s.AddProperties(&s.properties, s.dynamicMemberTypeListProperties)
// Make sure that the prebuilt visibility property is verified for errors.
@@ -280,11 +137,11 @@
return s
}
-func (s *sdk) memberListProperties() []*sdkMemberListProperty {
- return s.dynamicSdkMemberTypes.memberListProperties
+func (s *sdk) memberTypeListProperties() []*sdkMemberTypeListProperty {
+ return s.dynamicSdkMemberTypes.memberTypeListProperties
}
-func (s *sdk) memberListProperty(memberType android.SdkMemberType) *sdkMemberListProperty {
+func (s *sdk) memberTypeListProperty(memberType android.SdkMemberType) *sdkMemberTypeListProperty {
return s.dynamicSdkMemberTypes.memberTypeToProperty[memberType]
}
@@ -341,6 +198,19 @@
}}
}
+// newDependencyContext creates a new SdkDependencyContext for this sdk.
+func (s *sdk) newDependencyContext(mctx android.BottomUpMutatorContext) android.SdkDependencyContext {
+ return &dependencyContext{
+ BottomUpMutatorContext: mctx,
+ }
+}
+
+type dependencyContext struct {
+ android.BottomUpMutatorContext
+}
+
+var _ android.SdkDependencyContext = (*dependencyContext)(nil)
+
// RegisterPreDepsMutators registers pre-deps mutators to support modules implementing SdkAware
// interface and the sdk module type. This function has been made public to be called by tests
// outside of the sdk package
@@ -410,14 +280,15 @@
if s, ok := mctx.Module().(*sdk); ok {
// Add dependencies from enabled and non CommonOS variants to the sdk member variants.
if s.Enabled() && !s.IsCommonOSVariant() {
- for _, memberListProperty := range s.memberListProperties() {
+ ctx := s.newDependencyContext(mctx)
+ for _, memberListProperty := range s.memberTypeListProperties() {
if memberListProperty.getter == nil {
continue
}
names := memberListProperty.getter(s.dynamicMemberTypeListProperties)
if len(names) > 0 {
tag := memberListProperty.dependencyTag
- memberListProperty.memberType.AddDependencies(mctx, tag, names)
+ memberListProperty.memberType.AddDependencies(ctx, tag, names)
}
}
}
diff --git a/sdk/update.go b/sdk/update.go
index 1cd8f13..96a6e69 100644
--- a/sdk/update.go
+++ b/sdk/update.go
@@ -251,7 +251,7 @@
}
var members []*sdkMember
- for _, memberListProperty := range s.memberListProperties() {
+ for _, memberListProperty := range s.memberTypeListProperties() {
membersOfType := byType[memberListProperty.memberType]
members = append(members, membersOfType...)
}
@@ -667,7 +667,7 @@
staticProperties := &snapshotModuleStaticProperties{
Compile_multilib: sdkVariant.multilibUsages.String(),
}
- dynamicProperties := s.dynamicSdkMemberTypes.createMemberListProperties()
+ dynamicProperties := s.dynamicSdkMemberTypes.createMemberTypeListProperties()
combinedProperties := &combinedSnapshotModuleProperties{
sdkVariant: sdkVariant,
@@ -687,7 +687,7 @@
}
combined := sdkVariantToCombinedProperties[memberVariantDep.sdkVariant]
- memberListProperty := s.memberListProperty(memberVariantDep.memberType)
+ memberListProperty := s.memberTypeListProperty(memberVariantDep.memberType)
memberName := ctx.OtherModuleName(memberVariantDep.variant)
if memberListProperty.getter == nil {
@@ -717,7 +717,7 @@
}
// Extract the common members, removing them from the original properties.
- commonDynamicProperties := s.dynamicSdkMemberTypes.createMemberListProperties()
+ commonDynamicProperties := s.dynamicSdkMemberTypes.createMemberTypeListProperties()
extractor := newCommonValueExtractor(commonDynamicProperties)
extractCommonProperties(ctx, extractor, commonDynamicProperties, propertyContainers)
@@ -750,7 +750,7 @@
}
dynamicMemberTypeListProperties := combined.dynamicProperties
- for _, memberListProperty := range s.memberListProperties() {
+ for _, memberListProperty := range s.memberTypeListProperties() {
if memberListProperty.getter == nil {
continue
}
diff --git a/tests/bootstrap_test.sh b/tests/bootstrap_test.sh
index 9342d75..a22adc5 100755
--- a/tests/bootstrap_test.sh
+++ b/tests/bootstrap_test.sh
@@ -144,7 +144,7 @@
run_soong
local ninja_mtime1=$(stat -c "%y" out/soong/build.ninja)
- local glob_deps_file=out/soong/.bootstrap/globs/0.d
+ local glob_deps_file=out/soong/globs/build/0.d
if [ -e "$glob_deps_file" ]; then
fail "Glob deps file unexpectedly written on first build"
@@ -472,16 +472,35 @@
fi
}
-function test_null_build_after_docs {
+function test_soong_docs_smoke() {
setup
- run_soong
- local mtime1=$(stat -c "%y" out/soong/build.ninja)
- prebuilts/build-tools/linux-x86/bin/ninja -f out/soong/build.ninja soong_docs
- run_soong
- local mtime2=$(stat -c "%y" out/soong/build.ninja)
+ run_soong soong_docs
- if [[ "$mtime1" != "$mtime2" ]]; then
+ [[ -e "out/soong/docs/soong_build.html" ]] || fail "Documentation for main page not created"
+ [[ -e "out/soong/docs/cc.html" ]] || fail "Documentation for C++ modules not created"
+}
+
+function test_null_build_after_soong_docs() {
+ setup
+
+ run_soong
+ local ninja_mtime1=$(stat -c "%y" out/soong/build.ninja)
+
+ run_soong soong_docs
+ local docs_mtime1=$(stat -c "%y" out/soong/docs/soong_build.html)
+
+ run_soong soong_docs
+ local docs_mtime2=$(stat -c "%y" out/soong/docs/soong_build.html)
+
+ if [[ "$docs_mtime1" != "$docs_mtime2" ]]; then
+ fail "Output Ninja file changed on null build"
+ fi
+
+ run_soong
+ local ninja_mtime2=$(stat -c "%y" out/soong/build.ninja)
+
+ if [[ "$ninja_mtime1" != "$ninja_mtime2" ]]; then
fail "Output Ninja file changed on null build"
fi
}
@@ -521,7 +540,7 @@
function test_bp2build_smoke {
setup
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -e out/soong/.bootstrap/bp2build_workspace_marker ]] || fail "bp2build marker file not created"
[[ -e out/soong/workspace ]] || fail "Bazel workspace not created"
}
@@ -530,7 +549,7 @@
setup
create_mock_bazel
- run_bp2build
+ run_soong bp2build
if [[ ! -f "./out/soong/.bootstrap/bp2build_workspace_marker" ]]; then
fail "Marker file was not generated"
@@ -550,7 +569,7 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -e out/soong/bp2build/a/${GENERATED_BUILD_FILE_NAME} ]] || fail "a/${GENERATED_BUILD_FILE_NAME} not created"
[[ -L out/soong/workspace/a/${GENERATED_BUILD_FILE_NAME} ]] || fail "a/${GENERATED_BUILD_FILE_NAME} not symlinked"
@@ -564,7 +583,7 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -e out/soong/bp2build/b/${GENERATED_BUILD_FILE_NAME} ]] || fail "a/${GENERATED_BUILD_FILE_NAME} not created"
[[ -L out/soong/workspace/b/${GENERATED_BUILD_FILE_NAME} ]] || fail "a/${GENERATED_BUILD_FILE_NAME} not symlinked"
}
@@ -572,10 +591,10 @@
function test_bp2build_null_build {
setup
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
local mtime1=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
local mtime2=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
if [[ "$mtime1" != "$mtime2" ]]; then
@@ -596,22 +615,63 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
grep -q a1.txt "out/soong/bp2build/a/${GENERATED_BUILD_FILE_NAME}" || fail "a1.txt not in ${GENERATED_BUILD_FILE_NAME} file"
touch a/a2.txt
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
grep -q a2.txt "out/soong/bp2build/a/${GENERATED_BUILD_FILE_NAME}" || fail "a2.txt not in ${GENERATED_BUILD_FILE_NAME} file"
}
+function test_multiple_soong_build_modes() {
+ setup
+ run_soong json-module-graph bp2build nothing
+ if [[ ! -f "out/soong/.bootstrap/bp2build_workspace_marker" ]]; then
+ fail "bp2build marker file was not generated"
+ fi
+
+
+ if [[ ! -f "out/soong/module-graph.json" ]]; then
+ fail "JSON file was not created"
+ fi
+
+ if [[ ! -f "out/soong/build.ninja" ]]; then
+ fail "Main build.ninja file was not created"
+ fi
+}
+
function test_dump_json_module_graph() {
setup
- SOONG_DUMP_JSON_MODULE_GRAPH="$MOCK_TOP/modules.json" run_soong
- if [[ ! -r "$MOCK_TOP/modules.json" ]]; then
+ run_soong json-module-graph
+ if [[ ! -r "out/soong/module-graph.json" ]]; then
fail "JSON file was not created"
fi
}
+function test_json_module_graph_back_and_forth_null_build() {
+ setup
+
+ run_soong
+ local ninja_mtime1=$(stat -c "%y" out/soong/build.ninja)
+
+ run_soong json-module-graph
+ local json_mtime1=$(stat -c "%y" out/soong/module-graph.json)
+
+ run_soong
+ local ninja_mtime2=$(stat -c "%y" out/soong/build.ninja)
+ if [[ "$ninja_mtime1" != "$ninja_mtime2" ]]; then
+ fail "Output Ninja file changed after writing JSON module graph"
+ fi
+
+ run_soong json-module-graph
+ local json_mtime2=$(stat -c "%y" out/soong/module-graph.json)
+ if [[ "$json_mtime1" != "$json_mtime2" ]]; then
+ fail "JSON module graph file changed after writing Ninja file"
+ fi
+
+}
+
+
function test_bp2build_bazel_workspace_structure {
setup
@@ -626,7 +686,7 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -e out/soong/workspace ]] || fail "Bazel workspace not created"
[[ -d out/soong/workspace/a/b ]] || fail "module directory not a directory"
[[ -L "out/soong/workspace/a/b/${GENERATED_BUILD_FILE_NAME}" ]] || fail "${GENERATED_BUILD_FILE_NAME} file not symlinked"
@@ -650,10 +710,10 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
touch a/a2.txt # No reference in the .bp file needed
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -L out/soong/workspace/a/a2.txt ]] || fail "a/a2.txt not symlinked"
}
@@ -671,7 +731,7 @@
}
EOF
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
[[ -L "out/soong/workspace/a/${GENERATED_BUILD_FILE_NAME}" ]] || fail "${GENERATED_BUILD_FILE_NAME} file not symlinked"
[[ "$(readlink -f out/soong/workspace/a/${GENERATED_BUILD_FILE_NAME})" =~ "bp2build/a/${GENERATED_BUILD_FILE_NAME}"$ ]] \
|| fail "${GENERATED_BUILD_FILE_NAME} files symlinked to the wrong place"
@@ -700,7 +760,7 @@
}
EOF
- if GENERATE_BAZEL_FILES=1 run_soong >& "$MOCK_TOP/errors"; then
+ if run_soong bp2build >& "$MOCK_TOP/errors"; then
fail "Build should have failed"
fi
@@ -714,7 +774,7 @@
run_soong
local output_mtime1=$(stat -c "%y" out/soong/build.ninja)
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
local output_mtime2=$(stat -c "%y" out/soong/build.ninja)
if [[ "$output_mtime1" != "$output_mtime2" ]]; then
fail "Output Ninja file changed when switching to bp2build"
@@ -732,7 +792,7 @@
fail "bp2build marker file changed when switching to regular build from bp2build"
fi
- GENERATE_BAZEL_FILES=1 run_soong
+ run_soong bp2build
local output_mtime4=$(stat -c "%y" out/soong/build.ninja)
local marker_mtime3=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
if [[ "$output_mtime1" != "$output_mtime4" ]]; then
@@ -743,9 +803,32 @@
fi
}
+function test_queryview_smoke() {
+ setup
+
+ run_soong queryview
+ [[ -e out/soong/queryview/WORKSPACE ]] || fail "queryview WORKSPACE file not created"
+
+}
+
+function test_queryview_null_build() {
+ setup
+
+ run_soong queryview
+ local output_mtime1=$(stat -c "%y" out/soong/queryview.marker)
+
+ run_soong queryview
+ local output_mtime2=$(stat -c "%y" out/soong/queryview.marker)
+
+ if [[ "$output_mtime1" != "$output_mtime2" ]]; then
+ fail "Queryview marker file changed on null build"
+ fi
+}
+
test_smoke
test_null_build
-test_null_build_after_docs
+test_soong_docs_smoke
+test_null_build_after_soong_docs
test_soong_build_rebuilt_if_blueprint_changes
test_glob_noop_incremental
test_add_file_to_glob
@@ -755,8 +838,12 @@
test_add_file_to_soong_build
test_glob_during_bootstrapping
test_soong_build_rerun_iff_environment_changes
+test_multiple_soong_build_modes
test_dump_json_module_graph
+test_json_module_graph_back_and_forth_null_build
test_write_to_source_tree
+test_queryview_smoke
+test_queryview_null_build
test_bp2build_smoke
test_bp2build_generates_marker_file
test_bp2build_null_build
diff --git a/tests/bp2build_bazel_test.sh b/tests/bp2build_bazel_test.sh
index 9bd85a4..379eb65 100755
--- a/tests/bp2build_bazel_test.sh
+++ b/tests/bp2build_bazel_test.sh
@@ -10,10 +10,10 @@
function test_bp2build_null_build() {
setup
- run_bp2build
+ run_soong bp2build
local output_mtime1=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
- run_bp2build
+ run_soong bp2build
local output_mtime2=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
if [[ "$output_mtime1" != "$output_mtime2" ]]; then
@@ -35,10 +35,10 @@
EOF
touch foo/bar/a.txt foo/bar/b.txt
- run_bp2build
+ run_soong bp2build
local output_mtime1=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
- run_bp2build
+ run_soong bp2build
local output_mtime2=$(stat -c "%y" out/soong/.bootstrap/bp2build_workspace_marker)
if [[ "$output_mtime1" != "$output_mtime2" ]]; then
@@ -80,7 +80,7 @@
}
EOF
- run_bp2build
+ run_soong bp2build
if [[ ! -f "./out/soong/workspace/foo/convertible_soong_module/${GENERATED_BUILD_FILE_NAME}" ]]; then
fail "./out/soong/workspace/foo/convertible_soong_module/${GENERATED_BUILD_FILE_NAME} was not generated"
diff --git a/tests/lib.sh b/tests/lib.sh
index 813a9dd..e777820 100644
--- a/tests/lib.sh
+++ b/tests/lib.sh
@@ -124,10 +124,6 @@
tools/bazel "$@"
}
-run_bp2build() {
- GENERATE_BAZEL_FILES=true build/soong/soong_ui.bash --make-mode --skip-ninja --skip-make --skip-soong-tests nothing
-}
-
run_ninja() {
build/soong/soong_ui.bash --make-mode --skip-make --skip-soong-tests "$@"
}
diff --git a/ui/build/build.go b/ui/build/build.go
index d869bf0..2e44aaa 100644
--- a/ui/build/build.go
+++ b/ui/build/build.go
@@ -248,6 +248,16 @@
what = what &^ RunNinja
}
+ if !config.SoongBuildInvocationNeeded() {
+ // This means that the output of soong_build is not needed and thus it would
+ // run unnecessarily. In addition, if this code wasn't there invocations
+ // with only special-cased target names like "m bp2build" would result in
+ // passing Ninja the empty target list and it would then build the default
+ // targets which is not what the user asked for.
+ what = what &^ RunNinja
+ what = what &^ RunKati
+ }
+
if config.StartGoma() {
startGoma(ctx, config)
}
@@ -278,16 +288,6 @@
if what&RunSoong != 0 {
runSoong(ctx, config)
-
- if config.bazelBuildMode() == generateBuildFiles {
- // Return early, if we're using Soong as solely the generator of BUILD files.
- return
- }
-
- if config.bazelBuildMode() == generateJsonModuleGraph {
- // Return early, if we're using Soong as solely the generator of the JSON module graph
- return
- }
}
if what&RunKati != 0 {
diff --git a/ui/build/config.go b/ui/build/config.go
index 956406d..35dacf2 100644
--- a/ui/build/config.go
+++ b/ui/build/config.go
@@ -33,7 +33,8 @@
type Config struct{ *configImpl }
type configImpl struct {
- // From the environment
+ // Some targets that are implemented in soong_build
+ // (bp2build, json-module-graph) are not here and have their own bits below.
arguments []string
goma bool
environ *Environment
@@ -41,17 +42,21 @@
buildDateTime string
// From the arguments
- parallel int
- keepGoing int
- verbose bool
- checkbuild bool
- dist bool
- skipConfig bool
- skipKati bool
- skipKatiNinja bool
- skipSoong bool
- skipNinja bool
- skipSoongTests bool
+ parallel int
+ keepGoing int
+ verbose bool
+ checkbuild bool
+ dist bool
+ jsonModuleGraph bool
+ bp2build bool
+ queryview bool
+ soongDocs bool
+ skipConfig bool
+ skipKati bool
+ skipKatiNinja bool
+ skipSoong bool
+ skipNinja bool
+ skipSoongTests bool
// From the product config
katiArgs []string
@@ -106,12 +111,6 @@
// Don't use bazel at all.
noBazel bazelBuildMode = iota
- // Only generate build files (in a subdirectory of the out directory) and exit.
- generateBuildFiles
-
- // Only generate the Soong json module graph for use with jq, and exit.
- generateJsonModuleGraph
-
// Generate synthetic build files and incorporate these files into a build which
// partially uses Bazel. Build metadata may come from Android.bp or BUILD files.
mixedBuild
@@ -639,6 +638,14 @@
c.environ.Set(k, v)
} else if arg == "dist" {
c.dist = true
+ } else if arg == "json-module-graph" {
+ c.jsonModuleGraph = true
+ } else if arg == "bp2build" {
+ c.bp2build = true
+ } else if arg == "queryview" {
+ c.queryview = true
+ } else if arg == "soong_docs" {
+ c.soongDocs = true
} else {
if arg == "checkbuild" {
c.checkbuild = true
@@ -705,6 +712,26 @@
return c.arguments
}
+func (c *configImpl) SoongBuildInvocationNeeded() bool {
+ if c.Dist() {
+ return true
+ }
+
+ if len(c.Arguments()) > 0 {
+ // Explicit targets requested that are not special targets like b2pbuild
+ // or the JSON module graph
+ return true
+ }
+
+ if !c.JsonModuleGraph() && !c.Bp2Build() && !c.Queryview() && !c.SoongDocs() {
+ // Command line was empty, the default Ninja target is built
+ return true
+ }
+
+ // build.ninja doesn't need to be generated
+ return false
+}
+
func (c *configImpl) OutDir() string {
if outDir, ok := c.environ.Get("OUT_DIR"); ok {
return outDir
@@ -739,6 +766,28 @@
return filepath.Join(c.OutDir(), "soong")
}
+func (c *configImpl) PrebuiltOS() string {
+ switch runtime.GOOS {
+ case "linux":
+ return "linux-x86"
+ case "darwin":
+ return "darwin-x86"
+ default:
+ panic("Unknown GOOS")
+ }
+}
+func (c *configImpl) HostToolDir() string {
+ return filepath.Join(c.SoongOutDir(), "host", c.PrebuiltOS(), "bin")
+}
+
+func (c *configImpl) NamedGlobFile(name string) string {
+ return shared.JoinPath(c.SoongOutDir(), ".bootstrap/build-globs."+name+".ninja")
+}
+
+func (c *configImpl) UsedEnvFile(tag string) string {
+ return shared.JoinPath(c.SoongOutDir(), usedEnvFile+"."+tag)
+}
+
func (c *configImpl) MainNinjaFile() string {
return shared.JoinPath(c.SoongOutDir(), "build.ninja")
}
@@ -747,6 +796,18 @@
return shared.JoinPath(c.SoongOutDir(), ".bootstrap/bp2build_workspace_marker")
}
+func (c *configImpl) SoongDocsHtml() string {
+ return shared.JoinPath(c.SoongOutDir(), "docs/soong_build.html")
+}
+
+func (c *configImpl) QueryviewMarkerFile() string {
+ return shared.JoinPath(c.SoongOutDir(), "queryview.marker")
+}
+
+func (c *configImpl) ModuleGraphFile() string {
+ return shared.JoinPath(c.SoongOutDir(), "module-graph.json")
+}
+
func (c *configImpl) TempDir() string {
return shared.TempDirForOutDir(c.SoongOutDir())
}
@@ -772,6 +833,22 @@
return c.dist
}
+func (c *configImpl) JsonModuleGraph() bool {
+ return c.jsonModuleGraph
+}
+
+func (c *configImpl) Bp2Build() bool {
+ return c.bp2build
+}
+
+func (c *configImpl) Queryview() bool {
+ return c.queryview
+}
+
+func (c *configImpl) SoongDocs() bool {
+ return c.soongDocs
+}
+
func (c *configImpl) IsVerbose() bool {
return c.verbose
}
@@ -917,10 +994,6 @@
func (c *configImpl) bazelBuildMode() bazelBuildMode {
if c.Environment().IsEnvTrue("USE_BAZEL_ANALYSIS") {
return mixedBuild
- } else if c.Environment().IsEnvTrue("GENERATE_BAZEL_FILES") {
- return generateBuildFiles
- } else if v, ok := c.Environment().Get("SOONG_DUMP_JSON_MODULE_GRAPH"); ok && v != "" {
- return generateJsonModuleGraph
} else {
return noBazel
}
diff --git a/ui/build/finder.go b/ui/build/finder.go
index 09d53cc..8f74969 100644
--- a/ui/build/finder.go
+++ b/ui/build/finder.go
@@ -15,15 +15,16 @@
package build
import (
- "android/soong/finder"
- "android/soong/finder/fs"
- "android/soong/ui/logger"
"bytes"
"io/ioutil"
"os"
"path/filepath"
"strings"
+ "android/soong/finder"
+ "android/soong/finder/fs"
+ "android/soong/ui/logger"
+
"android/soong/ui/metrics"
)
@@ -72,8 +73,6 @@
"AndroidProducts.mk",
// General Soong build definitions, using the Blueprint syntax.
"Android.bp",
- // build/blueprint build definitions, using the Blueprint syntax.
- "Blueprints",
// Bazel build definitions.
"BUILD.bazel",
// Bazel build definitions.
@@ -165,8 +164,6 @@
// Recursively look for all Android.bp files
androidBps := f.FindNamedAt(".", "Android.bp")
- // The files are named "Blueprints" only in the build/blueprint directory.
- androidBps = append(androidBps, f.FindNamedAt("build/blueprint", "Blueprints")...)
if len(androidBps) == 0 {
ctx.Fatalf("No Android.bp found")
}
diff --git a/ui/build/soong.go b/ui/build/soong.go
index 8ef8c74..617d293 100644
--- a/ui/build/soong.go
+++ b/ui/build/soong.go
@@ -37,6 +37,12 @@
const (
availableEnvFile = "soong.environment.available"
usedEnvFile = "soong.environment.used"
+
+ soongBuildTag = "build"
+ bp2buildTag = "bp2build"
+ jsonModuleGraphTag = "modulegraph"
+ queryviewTag = "queryview"
+ soongDocsTag = "soong_docs"
)
func writeEnvironmentFile(ctx Context, envFile string, envDeps map[string]string) error {
@@ -71,27 +77,47 @@
// A tiny struct used to tell Blueprint that it's in bootstrap mode. It would
// probably be nicer to use a flag in bootstrap.Args instead.
type BlueprintConfig struct {
- buildDir string
- ninjaBuildDir string
- debugCompilation bool
+ toolDir string
+ soongOutDir string
+ outDir string
+ runGoTests bool
+ debugCompilation bool
+ subninjas []string
+ primaryBuilderInvocations []bootstrap.PrimaryBuilderInvocation
}
-func (c BlueprintConfig) BuildDir() string {
- return c.buildDir
+func (c BlueprintConfig) HostToolDir() string {
+ return c.toolDir
}
-func (c BlueprintConfig) NinjaBuildDir() string {
- return c.ninjaBuildDir
+func (c BlueprintConfig) SoongOutDir() string {
+ return c.soongOutDir
+}
+
+func (c BlueprintConfig) OutDir() string {
+ return c.outDir
+}
+
+func (c BlueprintConfig) RunGoTests() bool {
+ return c.runGoTests
}
func (c BlueprintConfig) DebugCompilation() bool {
return c.debugCompilation
}
-func environmentArgs(config Config, suffix string) []string {
+func (c BlueprintConfig) Subninjas() []string {
+ return c.subninjas
+}
+
+func (c BlueprintConfig) PrimaryBuilderInvocations() []bootstrap.PrimaryBuilderInvocation {
+ return c.primaryBuilderInvocations
+}
+
+func environmentArgs(config Config, tag string) []string {
return []string{
"--available_env", shared.JoinPath(config.SoongOutDir(), availableEnvFile),
- "--used_env", shared.JoinPath(config.SoongOutDir(), usedEnvFile+suffix),
+ "--used_env", config.UsedEnvFile(tag),
}
}
@@ -109,93 +135,134 @@
}
}
+func primaryBuilderInvocation(config Config, name string, output string, specificArgs []string) bootstrap.PrimaryBuilderInvocation {
+ commonArgs := make([]string, 0, 0)
+
+ if !config.skipSoongTests {
+ commonArgs = append(commonArgs, "-t")
+ }
+
+ commonArgs = append(commonArgs, "-l", filepath.Join(config.FileListDir(), "Android.bp.list"))
+
+ if os.Getenv("SOONG_DELVE") != "" {
+ commonArgs = append(commonArgs, "--delve_listen", os.Getenv("SOONG_DELVE"))
+ commonArgs = append(commonArgs, "--delve_path", shared.ResolveDelveBinary())
+ }
+
+ allArgs := make([]string, 0, 0)
+ allArgs = append(allArgs, specificArgs...)
+ allArgs = append(allArgs,
+ "--globListDir", name,
+ "--globFile", config.NamedGlobFile(name))
+
+ allArgs = append(allArgs, commonArgs...)
+ allArgs = append(allArgs, environmentArgs(config, name)...)
+ allArgs = append(allArgs, "Android.bp")
+
+ return bootstrap.PrimaryBuilderInvocation{
+ Inputs: []string{"Android.bp"},
+ Outputs: []string{output},
+ Args: allArgs,
+ }
+}
+
func bootstrapBlueprint(ctx Context, config Config) {
ctx.BeginTrace(metrics.RunSoong, "blueprint bootstrap")
defer ctx.EndTrace()
- var args bootstrap.Args
-
- bootstrapGlobFile := shared.JoinPath(config.SoongOutDir(), ".bootstrap/build-globs.ninja")
- bp2buildGlobFile := shared.JoinPath(config.SoongOutDir(), ".bootstrap/build-globs.bp2build.ninja")
-
- // The glob .ninja files are subninja'd. However, they are generated during
- // the build itself so we write an empty file so that the subninja doesn't
- // fail on clean builds
- writeEmptyGlobFile(ctx, bootstrapGlobFile)
- writeEmptyGlobFile(ctx, bp2buildGlobFile)
- bootstrapDepFile := shared.JoinPath(config.SoongOutDir(), ".bootstrap/build.ninja.d")
-
- args.RunGoTests = !config.skipSoongTests
- args.UseValidations = true // Use validations to depend on tests
- args.BuildDir = config.SoongOutDir()
- args.NinjaBuildDir = config.OutDir()
- args.TopFile = "Android.bp"
- args.ModuleListFile = filepath.Join(config.FileListDir(), "Android.bp.list")
- args.OutFile = shared.JoinPath(config.SoongOutDir(), ".bootstrap/build.ninja")
- // The primary builder (aka soong_build) will use bootstrapGlobFile as the globFile to generate build.ninja(.d)
- // Building soong_build does not require a glob file
- // Using "" instead of "<soong_build_glob>.ninja" will ensure that an unused glob file is not written to out/soong/.bootstrap during StagePrimary
- args.Subninjas = []string{bootstrapGlobFile, bp2buildGlobFile}
- args.GeneratingPrimaryBuilder = true
- args.EmptyNinjaFile = config.EmptyNinjaFile()
-
- args.DelveListen = os.Getenv("SOONG_DELVE")
- if args.DelveListen != "" {
- args.DelvePath = shared.ResolveDelveBinary()
+ mainSoongBuildExtraArgs := []string{"-o", config.MainNinjaFile()}
+ if config.EmptyNinjaFile() {
+ mainSoongBuildExtraArgs = append(mainSoongBuildExtraArgs, "--empty-ninja-file")
}
- commonArgs := bootstrap.PrimaryBuilderExtraFlags(args, config.MainNinjaFile())
- mainSoongBuildInputs := []string{"Android.bp"}
+ mainSoongBuildInvocation := primaryBuilderInvocation(
+ config,
+ soongBuildTag,
+ config.MainNinjaFile(),
+ mainSoongBuildExtraArgs)
if config.bazelBuildMode() == mixedBuild {
- mainSoongBuildInputs = append(mainSoongBuildInputs, config.Bp2BuildMarkerFile())
+ // Mixed builds call Bazel from soong_build and they therefore need the
+ // Bazel workspace to be available. Make that so by adding a dependency on
+ // the bp2build marker file to the action that invokes soong_build .
+ mainSoongBuildInvocation.Inputs = append(mainSoongBuildInvocation.Inputs,
+ config.Bp2BuildMarkerFile())
}
- soongBuildArgs := []string{
- "--globListDir", "globs",
- "--globFile", bootstrapGlobFile,
+ bp2buildInvocation := primaryBuilderInvocation(
+ config,
+ bp2buildTag,
+ config.Bp2BuildMarkerFile(),
+ []string{
+ "--bp2build_marker", config.Bp2BuildMarkerFile(),
+ })
+
+ jsonModuleGraphInvocation := primaryBuilderInvocation(
+ config,
+ jsonModuleGraphTag,
+ config.ModuleGraphFile(),
+ []string{
+ "--module_graph_file", config.ModuleGraphFile(),
+ })
+
+ queryviewInvocation := primaryBuilderInvocation(
+ config,
+ queryviewTag,
+ config.QueryviewMarkerFile(),
+ []string{
+ "--bazel_queryview_dir", filepath.Join(config.SoongOutDir(), "queryview"),
+ })
+
+ soongDocsInvocation := primaryBuilderInvocation(
+ config,
+ soongDocsTag,
+ config.SoongDocsHtml(),
+ []string{
+ "--soong_docs", config.SoongDocsHtml(),
+ })
+
+ globFiles := []string{
+ config.NamedGlobFile(soongBuildTag),
+ config.NamedGlobFile(bp2buildTag),
+ config.NamedGlobFile(jsonModuleGraphTag),
+ config.NamedGlobFile(queryviewTag),
+ config.NamedGlobFile(soongDocsTag),
}
- soongBuildArgs = append(soongBuildArgs, commonArgs...)
- soongBuildArgs = append(soongBuildArgs, environmentArgs(config, "")...)
- soongBuildArgs = append(soongBuildArgs, "Android.bp")
-
- mainSoongBuildInvocation := bootstrap.PrimaryBuilderInvocation{
- Inputs: mainSoongBuildInputs,
- Outputs: []string{config.MainNinjaFile()},
- Args: soongBuildArgs,
+ // The glob .ninja files are subninja'd. However, they are generated during
+ // the build itself so we write an empty file if the file does not exist yet
+ // so that the subninja doesn't fail on clean builds
+ for _, globFile := range globFiles {
+ writeEmptyGlobFile(ctx, globFile)
}
- bp2buildArgs := []string{
- "--bp2build_marker", config.Bp2BuildMarkerFile(),
- "--globListDir", "globs.bp2build",
- "--globFile", bp2buildGlobFile,
- }
+ var blueprintArgs bootstrap.Args
- bp2buildArgs = append(bp2buildArgs, commonArgs...)
- bp2buildArgs = append(bp2buildArgs, environmentArgs(config, ".bp2build")...)
- bp2buildArgs = append(bp2buildArgs, "Android.bp")
-
- bp2buildInvocation := bootstrap.PrimaryBuilderInvocation{
- Inputs: []string{"Android.bp"},
- Outputs: []string{config.Bp2BuildMarkerFile()},
- Args: bp2buildArgs,
- }
- args.PrimaryBuilderInvocations = []bootstrap.PrimaryBuilderInvocation{
- bp2buildInvocation,
- mainSoongBuildInvocation,
- }
+ blueprintArgs.ModuleListFile = filepath.Join(config.FileListDir(), "Android.bp.list")
+ blueprintArgs.OutFile = shared.JoinPath(config.SoongOutDir(), ".bootstrap/build.ninja")
+ blueprintArgs.EmptyNinjaFile = false
blueprintCtx := blueprint.NewContext()
blueprintCtx.SetIgnoreUnknownModuleTypes(true)
blueprintConfig := BlueprintConfig{
- buildDir: config.SoongOutDir(),
- ninjaBuildDir: config.OutDir(),
+ soongOutDir: config.SoongOutDir(),
+ toolDir: config.HostToolDir(),
+ outDir: config.OutDir(),
+ runGoTests: !config.skipSoongTests,
+ // If we want to debug soong_build, we need to compile it for debugging
debugCompilation: os.Getenv("SOONG_DELVE") != "",
+ subninjas: globFiles,
+ primaryBuilderInvocations: []bootstrap.PrimaryBuilderInvocation{
+ mainSoongBuildInvocation,
+ bp2buildInvocation,
+ jsonModuleGraphInvocation,
+ queryviewInvocation,
+ soongDocsInvocation},
}
- bootstrapDeps := bootstrap.RunBlueprint(args, blueprintCtx, blueprintConfig)
- err := deptools.WriteDepFile(bootstrapDepFile, args.OutFile, bootstrapDeps)
+ bootstrapDeps := bootstrap.RunBlueprint(blueprintArgs, bootstrap.DoEverything, blueprintCtx, blueprintConfig)
+ bootstrapDepFile := shared.JoinPath(config.SoongOutDir(), ".bootstrap/build.ninja.d")
+ err := deptools.WriteDepFile(bootstrapDepFile, blueprintArgs.OutFile, bootstrapDeps)
if err != nil {
ctx.Fatalf("Error writing depfile '%s': %s", bootstrapDepFile, err)
}
@@ -206,6 +273,7 @@
v, _ := currentEnv.Get(k)
return v
}
+
if stale, _ := shared.StaleEnvFile(envFile, getenv); stale {
os.Remove(envFile)
}
@@ -227,7 +295,7 @@
}
buildMode := config.bazelBuildMode()
- integratedBp2Build := (buildMode == mixedBuild) || (buildMode == generateBuildFiles)
+ integratedBp2Build := buildMode == mixedBuild
// This is done unconditionally, but does not take a measurable amount of time
bootstrapBlueprint(ctx, config)
@@ -255,16 +323,26 @@
ctx.BeginTrace(metrics.RunSoong, "environment check")
defer ctx.EndTrace()
- soongBuildEnvFile := filepath.Join(config.SoongOutDir(), usedEnvFile)
- checkEnvironmentFile(soongBuildEnv, soongBuildEnvFile)
+ checkEnvironmentFile(soongBuildEnv, config.UsedEnvFile(soongBuildTag))
- if integratedBp2Build {
- bp2buildEnvFile := filepath.Join(config.SoongOutDir(), usedEnvFile+".bp2build")
- checkEnvironmentFile(soongBuildEnv, bp2buildEnvFile)
+ if integratedBp2Build || config.Bp2Build() {
+ checkEnvironmentFile(soongBuildEnv, config.UsedEnvFile(bp2buildTag))
+ }
+
+ if config.JsonModuleGraph() {
+ checkEnvironmentFile(soongBuildEnv, config.UsedEnvFile(jsonModuleGraphTag))
+ }
+
+ if config.Queryview() {
+ checkEnvironmentFile(soongBuildEnv, config.UsedEnvFile(queryviewTag))
+ }
+
+ if config.SoongDocs() {
+ checkEnvironmentFile(soongBuildEnv, config.UsedEnvFile(soongDocsTag))
}
}()
- runMicrofactory(ctx, config, ".bootstrap/bpglob", "github.com/google/blueprint/bootstrap/bpglob",
+ runMicrofactory(ctx, config, filepath.Join(config.HostToolDir(), "bpglob"), "github.com/google/blueprint/bootstrap/bpglob",
map[string]string{"github.com/google/blueprint": "build/blueprint"})
ninja := func(name, ninjaFile string, targets ...string) {
@@ -303,16 +381,30 @@
cmd.RunAndStreamOrFatal()
}
- var target string
+ targets := make([]string, 0, 0)
- if config.bazelBuildMode() == generateBuildFiles {
- target = config.Bp2BuildMarkerFile()
- } else {
- // This build generates <builddir>/build.ninja, which is used later by build/soong/ui/build/build.go#Build().
- target = config.MainNinjaFile()
+ if config.JsonModuleGraph() {
+ targets = append(targets, config.ModuleGraphFile())
}
- ninja("bootstrap", ".bootstrap/build.ninja", target)
+ if config.Bp2Build() {
+ targets = append(targets, config.Bp2BuildMarkerFile())
+ }
+
+ if config.Queryview() {
+ targets = append(targets, config.QueryviewMarkerFile())
+ }
+
+ if config.SoongDocs() {
+ targets = append(targets, config.SoongDocsHtml())
+ }
+
+ if config.SoongBuildInvocationNeeded() {
+ // This build generates <builddir>/build.ninja, which is used later by build/soong/ui/build/build.go#Build().
+ targets = append(targets, config.MainNinjaFile())
+ }
+
+ ninja("bootstrap", ".bootstrap/build.ninja", targets...)
var soongBuildMetrics *soong_metrics_proto.SoongBuildMetrics
if shouldCollectBuildSoongMetrics(config) {
@@ -354,7 +446,7 @@
func shouldCollectBuildSoongMetrics(config Config) bool {
// Do not collect metrics protobuf if the soong_build binary ran as the
// bp2build converter or the JSON graph dump.
- return config.bazelBuildMode() != generateBuildFiles && config.bazelBuildMode() != generateJsonModuleGraph
+ return config.SoongBuildInvocationNeeded()
}
func loadSoongBuildMetrics(ctx Context, config Config) *soong_metrics_proto.SoongBuildMetrics {
diff --git a/ui/build/test_build.go b/ui/build/test_build.go
index 57ceaba..f9a60b6 100644
--- a/ui/build/test_build.go
+++ b/ui/build/test_build.go
@@ -51,7 +51,6 @@
executable := config.PrebuiltBuildTool("ninja")
commonArgs := []string{}
- commonArgs = append(commonArgs, config.NinjaArgs()...)
commonArgs = append(commonArgs, "-f", config.CombinedNinjaFile())
args := append(commonArgs, "-t", "targets", "rule")