Merge "Soong: Add synopsis to cc_prebuilt_* modules."
diff --git a/android/rule_builder.go b/android/rule_builder.go
index a2a5366..2d0fac1 100644
--- a/android/rule_builder.go
+++ b/android/rule_builder.go
@@ -171,6 +171,20 @@
return outputList
}
+// DepFiles returns the list of paths that were passed to the RuleBuilderCommand methods that take depfile paths, such
+// as RuleBuilderCommand.DepFile or RuleBuilderCommand.FlagWithDepFile.
+func (r *RuleBuilder) DepFiles() WritablePaths {
+ var depFiles WritablePaths
+
+ for _, c := range r.commands {
+ for _, depFile := range c.depFiles {
+ depFiles = append(depFiles, depFile)
+ }
+ }
+
+ return depFiles
+}
+
// Installs returns the list of tuples passed to Install.
func (r *RuleBuilder) Installs() RuleBuilderInstalls {
return append(RuleBuilderInstalls(nil), r.installs...)
@@ -222,9 +236,17 @@
var _ BuilderContext = ModuleContext(nil)
var _ BuilderContext = SingletonContext(nil)
+func (r *RuleBuilder) depFileMergerCmd(ctx PathContext, depFiles WritablePaths) *RuleBuilderCommand {
+ return (&RuleBuilderCommand{}).
+ Tool(ctx.Config().HostToolPath(ctx, "dep_fixer")).
+ Flags(depFiles.Strings())
+}
+
// Build adds the built command line to the build graph, with dependencies on Inputs and Tools, and output files for
// Outputs.
func (r *RuleBuilder) Build(pctx PackageContext, ctx BuilderContext, name string, desc string) {
+ name = ninjaNameEscape(name)
+
if len(r.missingDeps) > 0 {
ctx.Build(pctx, BuildParams{
Rule: ErrorRule,
@@ -237,16 +259,45 @@
return
}
- if len(r.Commands()) > 0 {
+ tools := r.Tools()
+ commands := r.Commands()
+
+ var depFile WritablePath
+ var depFormat blueprint.Deps
+ if depFiles := r.DepFiles(); len(depFiles) > 0 {
+ depFile = depFiles[0]
+ depFormat = blueprint.DepsGCC
+ if len(depFiles) > 1 {
+ // Add a command locally that merges all depfiles together into the first depfile.
+ cmd := r.depFileMergerCmd(ctx, depFiles)
+ commands = append(commands, string(cmd.buf))
+ tools = append(tools, cmd.tools...)
+ }
+ }
+
+ // Ninja doesn't like multiple outputs when depfiles are enabled, move all but the first output to
+ // ImplicitOutputs. RuleBuilder never uses "$out", so the distinction between Outputs and ImplicitOutputs
+ // doesn't matter.
+ var output WritablePath
+ var implicitOutputs WritablePaths
+ if outputs := r.Outputs(); len(outputs) > 0 {
+ output = outputs[0]
+ implicitOutputs = outputs[1:]
+ }
+
+ if len(commands) > 0 {
ctx.Build(pctx, BuildParams{
Rule: ctx.Rule(pctx, name, blueprint.RuleParams{
- Command: strings.Join(proptools.NinjaEscapeList(r.Commands()), " && "),
- CommandDeps: r.Tools().Strings(),
+ Command: strings.Join(proptools.NinjaEscapeList(commands), " && "),
+ CommandDeps: tools.Strings(),
Restat: r.restat,
}),
- Implicits: r.Inputs(),
- Outputs: r.Outputs(),
- Description: desc,
+ Implicits: r.Inputs(),
+ Output: output,
+ ImplicitOutputs: implicitOutputs,
+ Depfile: depFile,
+ Deps: depFormat,
+ Description: desc,
})
}
}
@@ -256,10 +307,11 @@
// RuleBuilderCommand, so they can be used chained or unchained. All methods that add text implicitly add a single
// space as a separator from the previous method.
type RuleBuilderCommand struct {
- buf []byte
- inputs Paths
- outputs WritablePaths
- tools Paths
+ buf []byte
+ inputs Paths
+ outputs WritablePaths
+ depFiles WritablePaths
+ tools Paths
}
// Text adds the specified raw text to the command line. The text should not contain input or output paths or the
@@ -284,6 +336,15 @@
return c.Text(flag)
}
+// Flags adds the specified raw text to the command line. The text should not contain input or output paths or the
+// rule will not have them listed in its dependencies or outputs.
+func (c *RuleBuilderCommand) Flags(flags []string) *RuleBuilderCommand {
+ for _, flag := range flags {
+ c.Text(flag)
+ }
+ return c
+}
+
// FlagWithArg adds the specified flag and argument text to the command line, with no separator between them. The flag
// and argument should not contain input or output paths or the rule will not have them listed in its dependencies or
// outputs.
@@ -360,6 +421,14 @@
return c
}
+// DepFile adds the specified depfile path to the paths returned by RuleBuilder.DepFiles and adds it to the command
+// line, and causes RuleBuilder.Build file to set the depfile flag for ninja. If multiple depfiles are added to
+// commands in a single RuleBuilder then RuleBuilder.Build will add an extra command to merge the depfiles together.
+func (c *RuleBuilderCommand) DepFile(path WritablePath) *RuleBuilderCommand {
+ c.depFiles = append(c.depFiles, path)
+ return c.Text(path.String())
+}
+
// ImplicitOutput adds the specified output path to the dependencies returned by RuleBuilder.Outputs without modifying
// the command line.
func (c *RuleBuilderCommand) ImplicitOutput(path WritablePath) *RuleBuilderCommand {
@@ -374,6 +443,15 @@
return c
}
+// ImplicitDepFile adds the specified depfile path to the paths returned by RuleBuilder.DepFiles without modifying
+// the command line, and causes RuleBuilder.Build file to set the depfile flag for ninja. If multiple depfiles
+// are added to commands in a single RuleBuilder then RuleBuilder.Build will add an extra command to merge the
+// depfiles together.
+func (c *RuleBuilderCommand) ImplicitDepFile(path WritablePath) *RuleBuilderCommand {
+ c.depFiles = append(c.depFiles, path)
+ return c
+}
+
// FlagWithInput adds the specified flag and input path to the command line, with no separator between them. The path
// will also be added to the dependencies returned by RuleBuilder.Inputs.
func (c *RuleBuilderCommand) FlagWithInput(flag string, path Path) *RuleBuilderCommand {
@@ -406,7 +484,35 @@
return c.Text(flag + path.String())
}
+// FlagWithDepFile adds the specified flag and depfile path to the command line, with no separator between them. The path
+// will also be added to the outputs returned by RuleBuilder.Outputs.
+func (c *RuleBuilderCommand) FlagWithDepFile(flag string, path WritablePath) *RuleBuilderCommand {
+ c.depFiles = append(c.depFiles, path)
+ return c.Text(flag + path.String())
+}
+
// String returns the command line.
func (c *RuleBuilderCommand) String() string {
return string(c.buf)
}
+
+func ninjaNameEscape(s string) string {
+ b := []byte(s)
+ escaped := false
+ for i, c := range b {
+ valid := (c >= 'a' && c <= 'z') ||
+ (c >= 'A' && c <= 'Z') ||
+ (c >= '0' && c <= '9') ||
+ (c == '_') ||
+ (c == '-') ||
+ (c == '.')
+ if !valid {
+ b[i] = '_'
+ escaped = true
+ }
+ }
+ if escaped {
+ s = string(b)
+ }
+ return s
+}
diff --git a/android/rule_builder_test.go b/android/rule_builder_test.go
index 01d23e5..7bad025 100644
--- a/android/rule_builder_test.go
+++ b/android/rule_builder_test.go
@@ -171,6 +171,14 @@
// ls -l
}
+func ExampleRuleBuilderCommand_Flags() {
+ ctx := pathContext()
+ fmt.Println(NewRuleBuilder().Command().
+ Tool(PathForSource(ctx, "ls")).Flags([]string{"-l", "-a"}))
+ // Output:
+ // ls -l -a
+}
+
func ExampleRuleBuilderCommand_FlagWithArg() {
ctx := pathContext()
fmt.Println(NewRuleBuilder().Command().
@@ -229,23 +237,27 @@
rule := NewRuleBuilder()
fs := map[string][]byte{
- "input": nil,
- "Implicit": nil,
- "Input": nil,
- "Tool": nil,
- "input2": nil,
- "tool2": nil,
- "input3": nil,
+ "dep_fixer": nil,
+ "input": nil,
+ "Implicit": nil,
+ "Input": nil,
+ "Tool": nil,
+ "input2": nil,
+ "tool2": nil,
+ "input3": nil,
}
ctx := PathContextForTesting(TestConfig("out", nil), fs)
cmd := rule.Command().
+ DepFile(PathForOutput(ctx, "DepFile")).
Flag("Flag").
FlagWithArg("FlagWithArg=", "arg").
+ FlagWithDepFile("FlagWithDepFile=", PathForOutput(ctx, "depfile")).
FlagWithInput("FlagWithInput=", PathForSource(ctx, "input")).
FlagWithOutput("FlagWithOutput=", PathForOutput(ctx, "output")).
Implicit(PathForSource(ctx, "Implicit")).
+ ImplicitDepFile(PathForOutput(ctx, "ImplicitDepFile")).
ImplicitOutput(PathForOutput(ctx, "ImplicitOutput")).
Input(PathForSource(ctx, "Input")).
Output(PathForOutput(ctx, "Output")).
@@ -254,6 +266,7 @@
rule.Command().
Text("command2").
+ DepFile(PathForOutput(ctx, "depfile2")).
Input(PathForSource(ctx, "input2")).
Output(PathForOutput(ctx, "output2")).
Tool(PathForSource(ctx, "tool2"))
@@ -271,25 +284,37 @@
Output(PathForOutput(ctx, "output3"))
wantCommands := []string{
- "Flag FlagWithArg=arg FlagWithInput=input FlagWithOutput=out/output Input out/Output Text Tool after command2 old cmd",
- "command2 input2 out/output2 tool2",
+ "out/DepFile Flag FlagWithArg=arg FlagWithDepFile=out/depfile FlagWithInput=input FlagWithOutput=out/output Input out/Output Text Tool after command2 old cmd",
+ "command2 out/depfile2 input2 out/output2 tool2",
"command3 input3 out/output2 out/output3",
}
+
+ wantDepMergerCommand := "out/host/" + ctx.Config().PrebuiltOS() + "/bin/dep_fixer out/DepFile out/depfile out/ImplicitDepFile out/depfile2"
+
wantInputs := PathsForSource(ctx, []string{"Implicit", "Input", "input", "input2", "input3"})
wantOutputs := PathsForOutput(ctx, []string{"ImplicitOutput", "Output", "output", "output2", "output3"})
+ wantDepFiles := PathsForOutput(ctx, []string{"DepFile", "depfile", "ImplicitDepFile", "depfile2"})
wantTools := PathsForSource(ctx, []string{"Tool", "tool2"})
- if !reflect.DeepEqual(rule.Commands(), wantCommands) {
- t.Errorf("\nwant rule.Commands() = %#v\n got %#v", wantCommands, rule.Commands())
+ if g, w := rule.Commands(), wantCommands; !reflect.DeepEqual(g, w) {
+ t.Errorf("\nwant rule.Commands() = %#v\n got %#v", w, g)
}
- if !reflect.DeepEqual(rule.Inputs(), wantInputs) {
- t.Errorf("\nwant rule.Inputs() = %#v\n got %#v", wantInputs, rule.Inputs())
+
+ if g, w := rule.depFileMergerCmd(ctx, rule.DepFiles()).String(), wantDepMergerCommand; g != w {
+ t.Errorf("\nwant rule.depFileMergerCmd() = %#v\n got %#v", w, g)
}
- if !reflect.DeepEqual(rule.Outputs(), wantOutputs) {
- t.Errorf("\nwant rule.Outputs() = %#v\n got %#v", wantOutputs, rule.Outputs())
+
+ if g, w := rule.Inputs(), wantInputs; !reflect.DeepEqual(w, g) {
+ t.Errorf("\nwant rule.Inputs() = %#v\n got %#v", w, g)
}
- if !reflect.DeepEqual(rule.Tools(), wantTools) {
- t.Errorf("\nwant rule.Tools() = %#v\n got %#v", wantTools, rule.Tools())
+ if g, w := rule.Outputs(), wantOutputs; !reflect.DeepEqual(w, g) {
+ t.Errorf("\nwant rule.Outputs() = %#v\n got %#v", w, g)
+ }
+ if g, w := rule.DepFiles(), wantDepFiles; !reflect.DeepEqual(w, g) {
+ t.Errorf("\nwant rule.DepFiles() = %#v\n got %#v", w, g)
+ }
+ if g, w := rule.Tools(), wantTools; !reflect.DeepEqual(w, g) {
+ t.Errorf("\nwant rule.Tools() = %#v\n got %#v", w, g)
}
}
@@ -375,8 +400,8 @@
t.Errorf("want Implicits = [%q], got %q", "bar", params.Implicits.Strings())
}
- if len(params.Outputs) != 1 || params.Outputs[0].String() != wantOutput {
- t.Errorf("want Outputs = [%q], got %q", wantOutput, params.Outputs.Strings())
+ if params.Output.String() != wantOutput {
+ t.Errorf("want Output = %q, got %q", wantOutput, params.Output)
}
if !params.RuleParams.Restat {
diff --git a/android/testing.go b/android/testing.go
index 7f443a3..0ec5af5 100644
--- a/android/testing.go
+++ b/android/testing.go
@@ -196,6 +196,7 @@
var searchedOutputs []string
for _, p := range provider.BuildParamsForTests() {
outputs := append(WritablePaths(nil), p.Outputs...)
+ outputs = append(outputs, p.ImplicitOutputs...)
if p.Output != nil {
outputs = append(outputs, p.Output)
}
@@ -222,6 +223,7 @@
var outputFullPaths []string
for _, p := range provider.BuildParamsForTests() {
outputs := append(WritablePaths(nil), p.Outputs...)
+ outputs = append(outputs, p.ImplicitOutputs...)
if p.Output != nil {
outputs = append(outputs, p.Output)
}
diff --git a/apex/apex.go b/apex/apex.go
index e07fae0..ce1ed46 100644
--- a/apex/apex.go
+++ b/apex/apex.go
@@ -396,9 +396,8 @@
outputFiles map[apexPackaging]android.WritablePath
installDir android.OutputPath
- public_key_file android.Path
- private_key_file android.Path
- bundle_public_key bool
+ public_key_file android.Path
+ private_key_file android.Path
container_certificate_file android.Path
container_private_key_file android.Path
@@ -746,10 +745,6 @@
if key, ok := child.(*apexKey); ok {
a.private_key_file = key.private_key_file
a.public_key_file = key.public_key_file
- // If the key is not installed, bundled it with the APEX.
- // Note: this bundled key is valid only for non-production builds
- // (eng/userdebug).
- a.bundle_public_key = !key.installable() && ctx.Config().Debuggable()
return false
} else {
ctx.PropertyErrorf("key", "%q is not an apex_key module", depName)
@@ -968,11 +963,8 @@
optFlags := []string{}
// Additional implicit inputs.
- implicitInputs = append(implicitInputs, cannedFsConfig, fileContexts, a.private_key_file)
- if a.bundle_public_key {
- implicitInputs = append(implicitInputs, a.public_key_file)
- optFlags = append(optFlags, "--pubkey "+a.public_key_file.String())
- }
+ implicitInputs = append(implicitInputs, cannedFsConfig, fileContexts, a.private_key_file, a.public_key_file)
+ optFlags = append(optFlags, "--pubkey "+a.public_key_file.String())
manifestPackageName, overridden := ctx.DeviceConfig().OverrideManifestPackageNameFor(ctx.ModuleName())
if overridden {
@@ -1057,7 +1049,7 @@
func (a *apexBundle) buildFlattenedApex(ctx android.ModuleContext) {
if a.installable() {
- // For flattened APEX, do nothing but make sure that apex_manifest.json file is also copied along
+ // For flattened APEX, do nothing but make sure that apex_manifest.json and apex_pubkey are also copied along
// with other ordinary files.
manifest := android.PathForModuleSrc(ctx, proptools.StringDefault(a.properties.Manifest, "apex_manifest.json"))
@@ -1070,6 +1062,15 @@
})
a.filesInfo = append(a.filesInfo, apexFile{copiedManifest, ctx.ModuleName() + ".apex_manifest.json", ".", etc, nil, nil})
+ // rename to apex_pubkey
+ copiedPubkey := android.PathForModuleOut(ctx, "apex_pubkey")
+ ctx.Build(pctx, android.BuildParams{
+ Rule: android.Cp,
+ Input: a.public_key_file,
+ Output: copiedPubkey,
+ })
+ a.filesInfo = append(a.filesInfo, apexFile{copiedPubkey, ctx.ModuleName() + ".apex_pubkey", ".", etc, nil, nil})
+
if ctx.Config().FlattenApex() {
for _, fi := range a.filesInfo {
dir := filepath.Join("apex", ctx.ModuleName(), fi.installDir)
@@ -1215,7 +1216,6 @@
fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", a.installDir.RelPathString()))
fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", name+apexType.suffix())
fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !a.installable())
- fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES :=", String(a.properties.Key))
if a.installable() && a.mergedNoticeFile != nil {
fmt.Fprintln(w, "LOCAL_NOTICE_FILE :=", a.mergedNoticeFile.String())
}
@@ -1317,18 +1317,9 @@
Src *string
}
}
-
- // the name of the apex_key module that contains the matching public key to be installed.
- Key *string
}
func (p *Prebuilt) DepsMutator(ctx android.BottomUpMutatorContext) {
- if String(p.properties.Key) == "" {
- ctx.ModuleErrorf("key is missing")
- return
- }
- ctx.AddDependency(ctx.Module(), keyTag, *p.properties.Key)
-
// This is called before prebuilt_select and prebuilt_postdeps mutators
// The mutators requires that src to be set correctly for each arch so that
// arch variants are disabled when src is not provided for the arch.
@@ -1380,7 +1371,6 @@
func(w io.Writer, outputFile android.Path) {
fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", p.installDir.RelPathString()))
fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", p.BaseModuleName()+imageApexSuffix)
- fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES :=", String(p.properties.Key))
},
},
}
diff --git a/apex/apex_test.go b/apex/apex_test.go
index 1e8d5b4..6d101d8 100644
--- a/apex/apex_test.go
+++ b/apex/apex_test.go
@@ -15,8 +15,6 @@
package apex
import (
- "bufio"
- "bytes"
"io/ioutil"
"os"
"strings"
@@ -299,6 +297,10 @@
`)
apexRule := ctx.ModuleForTests("myapex", "android_common_myapex").Rule("apexRule")
+
+ optFlags := apexRule.Args["opt_flags"]
+ ensureContains(t, optFlags, "--pubkey vendor/foo/devkeys/testkey.avbpubkey")
+
copyCmds := apexRule.Args["copy_commands"]
// Ensure that main rule creates an output
@@ -1197,14 +1199,6 @@
if actual != expected {
t.Errorf("wrong install path. expected %q. actual %q", expected, actual)
}
-
- apex_key := ctx.ModuleForTests("myapex.key", "android_common").Module().(*apexKey)
- expected = "target/product/test_device/product/etc/security/apex"
- actual = apex_key.installDir.RelPathString()
- if actual != expected {
- t.Errorf("wrong install path. expected %q. actual %q", expected, actual)
- }
-
}
func TestApexKeyFromOtherModule(t *testing.T) {
@@ -1252,14 +1246,6 @@
src: "myapex-arm.apex",
},
},
- key: "myapex.key"
- }
-
- apex_key {
- name: "myapex.key",
- public_key: "testkey.avbpubkey",
- private_key: "testkey.pem",
- product_specific: true,
}
`)
@@ -1269,24 +1255,4 @@
if prebuilt.inputApex.String() != expectedInput {
t.Errorf("inputApex invalid. expected: %q, actual: %q", expectedInput, prebuilt.inputApex.String())
}
-
- // Check if the key module is added as a required module.
- buf := &bytes.Buffer{}
- prebuilt.AndroidMk().Extra[0](buf, nil)
- found := false
- scanner := bufio.NewScanner(bytes.NewReader(buf.Bytes()))
- expected := "myapex.key"
- for scanner.Scan() {
- line := scanner.Text()
- tok := strings.Split(line, " := ")
- if tok[0] == "LOCAL_REQUIRED_MODULES" {
- found = true
- if tok[1] != "myapex.key" {
- t.Errorf("Unexpected LOCAL_REQUIRED_MODULES '%s', expected '%s'", tok[1], expected)
- }
- }
- }
- if !found {
- t.Errorf("Couldn't find a LOCAL_REQUIRED_MODULES entry")
- }
}
diff --git a/apex/key.go b/apex/key.go
index fbd29bc..a627e4b 100644
--- a/apex/key.go
+++ b/apex/key.go
@@ -16,8 +16,6 @@
import (
"fmt"
- "io"
- "path/filepath"
"strings"
"android/soong/android"
@@ -39,7 +37,6 @@
public_key_file android.Path
private_key_file android.Path
- installDir android.OutputPath
keyName string
}
@@ -64,7 +61,7 @@
}
func (m *apexKey) installable() bool {
- return m.properties.Installable == nil || proptools.Bool(m.properties.Installable)
+ return false
}
func (m *apexKey) GenerateAndroidBuildActions(ctx android.ModuleContext) {
@@ -99,25 +96,6 @@
return
}
m.keyName = pubKeyName
-
- m.installDir = android.PathForModuleInstall(ctx, "etc/security/apex")
- if m.installable() {
- ctx.InstallFile(m.installDir, m.keyName, m.public_key_file)
- }
-}
-
-func (m *apexKey) AndroidMk() android.AndroidMkData {
- return android.AndroidMkData{
- Class: "ETC",
- OutputFile: android.OptionalPathForPath(m.public_key_file),
- Extra: []android.AndroidMkExtraFunc{
- func(w io.Writer, outputFile android.Path) {
- fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", m.installDir.RelPathString()))
- fmt.Fprintln(w, "LOCAL_INSTALLED_MODULE_STEM :=", m.keyName)
- fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !m.installable())
- },
- },
- }
}
////////////////////////////////////////////////////////////////////////
diff --git a/cmd/dep_fixer/main.go b/cmd/dep_fixer/main.go
index 0647fb2..f94cf2f 100644
--- a/cmd/dep_fixer/main.go
+++ b/cmd/dep_fixer/main.go
@@ -29,30 +29,42 @@
func main() {
flag.Usage = func() {
- fmt.Fprintf(os.Stderr, "Usage: %s <depfile.d>", os.Args[0])
+ fmt.Fprintf(os.Stderr, "Usage: %s [-o <output>] <depfile.d> [<depfile.d>...]", os.Args[0])
flag.PrintDefaults()
}
output := flag.String("o", "", "Optional output file (defaults to rewriting source if necessary)")
flag.Parse()
- if flag.NArg() != 1 {
- log.Fatal("Expected a single file as an argument")
+ if flag.NArg() < 1 {
+ log.Fatal("Expected at least one input file as an argument")
}
- old, err := ioutil.ReadFile(flag.Arg(0))
- if err != nil {
- log.Fatalf("Error opening %q: %v", flag.Arg(0), err)
+ var mergedDeps *Deps
+ var firstInput []byte
+
+ for i, arg := range flag.Args() {
+ input, err := ioutil.ReadFile(arg)
+ if err != nil {
+ log.Fatalf("Error opening %q: %v", arg, err)
+ }
+
+ deps, err := Parse(arg, bytes.NewBuffer(append([]byte(nil), input...)))
+ if err != nil {
+ log.Fatalf("Failed to parse: %v", err)
+ }
+
+ if i == 0 {
+ mergedDeps = deps
+ firstInput = input
+ } else {
+ mergedDeps.Inputs = append(mergedDeps.Inputs, deps.Inputs...)
+ }
}
- deps, err := Parse(flag.Arg(0), bytes.NewBuffer(append([]byte(nil), old...)))
- if err != nil {
- log.Fatalf("Failed to parse: %v", err)
- }
-
- new := deps.Print()
+ new := mergedDeps.Print()
if *output == "" || *output == flag.Arg(0) {
- if !bytes.Equal(old, new) {
+ if !bytes.Equal(firstInput, new) {
err := ioutil.WriteFile(flag.Arg(0), new, 0666)
if err != nil {
log.Fatalf("Failed to write: %v", err)
diff --git a/cmd/diff_target_files/Android.bp b/cmd/diff_target_files/Android.bp
new file mode 100644
index 0000000..5397f4b
--- /dev/null
+++ b/cmd/diff_target_files/Android.bp
@@ -0,0 +1,16 @@
+blueprint_go_binary {
+ name: "diff_target_files",
+ srcs: [
+ "compare.go",
+ "diff_target_files.go",
+ "glob.go",
+ "target_files.go",
+ "whitelist.go",
+ "zip_artifact.go",
+ ],
+ testSrcs: [
+ "compare_test.go",
+ "glob_test.go",
+ "whitelist_test.go",
+ ],
+}
diff --git a/cmd/diff_target_files/compare.go b/cmd/diff_target_files/compare.go
new file mode 100644
index 0000000..00cd9ca
--- /dev/null
+++ b/cmd/diff_target_files/compare.go
@@ -0,0 +1,133 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// compareTargetFiles takes two ZipArtifacts and compares the files they contain by examining
+// the path, size, and CRC of each file.
+func compareTargetFiles(priZip, refZip ZipArtifact, artifact string, whitelists []whitelist, filters []string) (zipDiff, error) {
+ priZipFiles, err := priZip.Files()
+ if err != nil {
+ return zipDiff{}, fmt.Errorf("error fetching target file lists from primary zip %v", err)
+ }
+
+ refZipFiles, err := refZip.Files()
+ if err != nil {
+ return zipDiff{}, fmt.Errorf("error fetching target file lists from reference zip %v", err)
+ }
+
+ priZipFiles, err = filterTargetZipFiles(priZipFiles, artifact, filters)
+ if err != nil {
+ return zipDiff{}, err
+ }
+
+ refZipFiles, err = filterTargetZipFiles(refZipFiles, artifact, filters)
+ if err != nil {
+ return zipDiff{}, err
+ }
+
+ // Compare the file lists from both builds
+ diff := diffTargetFilesLists(refZipFiles, priZipFiles)
+
+ return applyWhitelists(diff, whitelists)
+}
+
+// zipDiff contains the list of files that differ between two zip files.
+type zipDiff struct {
+ modified [][2]*ZipArtifactFile
+ onlyInA, onlyInB []*ZipArtifactFile
+}
+
+// String pretty-prints the list of files that differ between two zip files.
+func (d *zipDiff) String() string {
+ buf := &bytes.Buffer{}
+
+ must := func(n int, err error) {
+ if err != nil {
+ panic(err)
+ }
+ }
+
+ var sizeChange int64
+
+ if len(d.modified) > 0 {
+ must(fmt.Fprintln(buf, "files modified:"))
+ for _, f := range d.modified {
+ must(fmt.Fprintf(buf, " %v (%v bytes -> %v bytes)\n", f[0].Name, f[0].UncompressedSize64, f[1].UncompressedSize64))
+ sizeChange += int64(f[1].UncompressedSize64) - int64(f[0].UncompressedSize64)
+ }
+ }
+
+ if len(d.onlyInA) > 0 {
+ must(fmt.Fprintln(buf, "files removed:"))
+ for _, f := range d.onlyInA {
+ must(fmt.Fprintf(buf, " - %v (%v bytes)\n", f.Name, f.UncompressedSize64))
+ sizeChange -= int64(f.UncompressedSize64)
+ }
+ }
+
+ if len(d.onlyInB) > 0 {
+ must(fmt.Fprintln(buf, "files added:"))
+ for _, f := range d.onlyInB {
+ must(fmt.Fprintf(buf, " + %v (%v bytes)\n", f.Name, f.UncompressedSize64))
+ sizeChange += int64(f.UncompressedSize64)
+ }
+ }
+
+ if len(d.modified) > 0 || len(d.onlyInA) > 0 || len(d.onlyInB) > 0 {
+ must(fmt.Fprintf(buf, "total size change: %v bytes\n", sizeChange))
+ }
+
+ return buf.String()
+}
+
+func diffTargetFilesLists(a, b []*ZipArtifactFile) zipDiff {
+ i := 0
+ j := 0
+
+ diff := zipDiff{}
+
+ for i < len(a) && j < len(b) {
+ if a[i].Name == b[j].Name {
+ if a[i].UncompressedSize64 != b[j].UncompressedSize64 || a[i].CRC32 != b[j].CRC32 {
+ diff.modified = append(diff.modified, [2]*ZipArtifactFile{a[i], b[j]})
+ }
+ i++
+ j++
+ } else if a[i].Name < b[j].Name {
+ // a[i] is not present in b
+ diff.onlyInA = append(diff.onlyInA, a[i])
+ i++
+ } else {
+ // b[j] is not present in a
+ diff.onlyInB = append(diff.onlyInB, b[j])
+ j++
+ }
+ }
+ for i < len(a) {
+ diff.onlyInA = append(diff.onlyInA, a[i])
+ i++
+ }
+ for j < len(b) {
+ diff.onlyInB = append(diff.onlyInB, b[j])
+ j++
+ }
+
+ return diff
+}
diff --git a/cmd/diff_target_files/compare_test.go b/cmd/diff_target_files/compare_test.go
new file mode 100644
index 0000000..9d3f8a5
--- /dev/null
+++ b/cmd/diff_target_files/compare_test.go
@@ -0,0 +1,131 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "reflect"
+ "testing"
+)
+
+func TestDiffTargetFilesLists(t *testing.T) {
+ zipArtifactFile := func(name string, crc32 uint32, size uint64) *ZipArtifactFile {
+ return &ZipArtifactFile{
+ File: &zip.File{
+ FileHeader: zip.FileHeader{
+ Name: name,
+ CRC32: crc32,
+ UncompressedSize64: size,
+ },
+ },
+ }
+ }
+ x0 := zipArtifactFile("x", 0, 0)
+ x1 := zipArtifactFile("x", 1, 0)
+ x2 := zipArtifactFile("x", 0, 2)
+ y0 := zipArtifactFile("y", 0, 0)
+ //y1 := zipArtifactFile("y", 1, 0)
+ //y2 := zipArtifactFile("y", 1, 2)
+ z0 := zipArtifactFile("z", 0, 0)
+ z1 := zipArtifactFile("z", 1, 0)
+ //z2 := zipArtifactFile("z", 1, 2)
+
+ testCases := []struct {
+ name string
+ a, b []*ZipArtifactFile
+ diff zipDiff
+ }{
+ {
+ name: "same",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, nil},
+ },
+ {
+ name: "first only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{y0, z0},
+ diff: zipDiff{nil, []*ZipArtifactFile{x0}, nil},
+ },
+ {
+ name: "middle only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, z0},
+ diff: zipDiff{nil, []*ZipArtifactFile{y0}, nil},
+ },
+ {
+ name: "last only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, y0},
+ diff: zipDiff{nil, []*ZipArtifactFile{z0}, nil},
+ },
+
+ {
+ name: "first only in b",
+ a: []*ZipArtifactFile{y0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{x0}},
+ },
+ {
+ name: "middle only in b",
+ a: []*ZipArtifactFile{x0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{y0}},
+ },
+ {
+ name: "last only in b",
+ a: []*ZipArtifactFile{x0, y0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{z0}},
+ },
+
+ {
+ name: "diff",
+ a: []*ZipArtifactFile{x0},
+ b: []*ZipArtifactFile{x1},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x1}}, nil, nil},
+ },
+ {
+ name: "diff plus unique last",
+ a: []*ZipArtifactFile{x0, y0},
+ b: []*ZipArtifactFile{x1, z0},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x1}}, []*ZipArtifactFile{y0}, []*ZipArtifactFile{z0}},
+ },
+ {
+ name: "diff plus unique first",
+ a: []*ZipArtifactFile{x0, z0},
+ b: []*ZipArtifactFile{y0, z1},
+ diff: zipDiff{[][2]*ZipArtifactFile{{z0, z1}}, []*ZipArtifactFile{x0}, []*ZipArtifactFile{y0}},
+ },
+ {
+ name: "diff size",
+ a: []*ZipArtifactFile{x0},
+ b: []*ZipArtifactFile{x2},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x2}}, nil, nil},
+ },
+ }
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ diff := diffTargetFilesLists(test.a, test.b)
+
+ if !reflect.DeepEqual(diff, test.diff) {
+
+ t.Errorf("diffTargetFilesLists = %v, %v, %v", diff.modified, diff.onlyInA, diff.onlyInB)
+ t.Errorf(" want %v, %v, %v", test.diff.modified, test.diff.onlyInA, test.diff.onlyInB)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/diff_target_files.go b/cmd/diff_target_files/diff_target_files.go
new file mode 100644
index 0000000..75bc8ee
--- /dev/null
+++ b/cmd/diff_target_files/diff_target_files.go
@@ -0,0 +1,82 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "os"
+ "strings"
+)
+
+var (
+ whitelists = newMultiString("whitelist", "whitelist patterns in the form <pattern>[:<regex of line to ignore>]")
+ whitelistFiles = newMultiString("whitelist_file", "files containing whitelist definitions")
+
+ filters = newMultiString("filter", "filter patterns to apply to files in target-files.zip before comparing")
+)
+
+func newMultiString(name, usage string) *multiString {
+ var f multiString
+ flag.Var(&f, name, usage)
+ return &f
+}
+
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ flag.Parse()
+
+ if flag.NArg() != 2 {
+ fmt.Fprintf(os.Stderr, "Error, exactly two arguments are required\n")
+ os.Exit(1)
+ }
+
+ whitelists, err := parseWhitelists(*whitelists, *whitelistFiles)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error parsing whitelists: %v\n", err)
+ os.Exit(1)
+ }
+
+ priZip, err := NewLocalZipArtifact(flag.Arg(0))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error opening zip file %v: %v\n", flag.Arg(0), err)
+ os.Exit(1)
+ }
+ defer priZip.Close()
+
+ refZip, err := NewLocalZipArtifact(flag.Arg(1))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error opening zip file %v: %v\n", flag.Arg(1), err)
+ os.Exit(1)
+ }
+ defer refZip.Close()
+
+ diff, err := compareTargetFiles(priZip, refZip, targetFilesPattern, whitelists, *filters)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error comparing zip files: %v\n", err)
+ os.Exit(1)
+ }
+
+ fmt.Print(diff.String())
+
+ if len(diff.modified) > 0 || len(diff.onlyInA) > 0 || len(diff.onlyInB) > 0 {
+ fmt.Fprintln(os.Stderr, "differences found")
+ os.Exit(1)
+ }
+}
diff --git a/cmd/diff_target_files/glob.go b/cmd/diff_target_files/glob.go
new file mode 100644
index 0000000..ed91af7
--- /dev/null
+++ b/cmd/diff_target_files/glob.go
@@ -0,0 +1,81 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "errors"
+ "path/filepath"
+ "strings"
+)
+
+// Match returns true if name matches pattern using the same rules as filepath.Match, but supporting
+// recursive globs (**).
+func Match(pattern, name string) (bool, error) {
+ if filepath.Base(pattern) == "**" {
+ return false, errors.New("pattern has '**' as last path element")
+ }
+
+ patternDir := pattern[len(pattern)-1] == '/'
+ nameDir := name[len(name)-1] == '/'
+
+ if patternDir != nameDir {
+ return false, nil
+ }
+
+ if nameDir {
+ name = name[:len(name)-1]
+ pattern = pattern[:len(pattern)-1]
+ }
+
+ for {
+ var patternFile, nameFile string
+ pattern, patternFile = filepath.Dir(pattern), filepath.Base(pattern)
+
+ if patternFile == "**" {
+ if strings.Contains(pattern, "**") {
+ return false, errors.New("pattern contains multiple '**'")
+ }
+ // Test if the any prefix of name matches the part of the pattern before **
+ for {
+ if name == "." || name == "/" {
+ return name == pattern, nil
+ }
+ if match, err := filepath.Match(pattern, name); err != nil {
+ return false, err
+ } else if match {
+ return true, nil
+ }
+ name = filepath.Dir(name)
+ }
+ } else if strings.Contains(patternFile, "**") {
+ return false, errors.New("pattern contains other characters between '**' and path separator")
+ }
+
+ name, nameFile = filepath.Dir(name), filepath.Base(name)
+
+ if nameFile == "." && patternFile == "." {
+ return true, nil
+ } else if nameFile == "/" && patternFile == "/" {
+ return true, nil
+ } else if nameFile == "." || patternFile == "." || nameFile == "/" || patternFile == "/" {
+ return false, nil
+ }
+
+ match, err := filepath.Match(patternFile, nameFile)
+ if err != nil || !match {
+ return match, err
+ }
+ }
+}
diff --git a/cmd/diff_target_files/glob_test.go b/cmd/diff_target_files/glob_test.go
new file mode 100644
index 0000000..63df68d
--- /dev/null
+++ b/cmd/diff_target_files/glob_test.go
@@ -0,0 +1,158 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "testing"
+)
+
+func TestMatch(t *testing.T) {
+ testCases := []struct {
+ pattern, name string
+ match bool
+ }{
+ {"a/*", "b/", false},
+ {"a/*", "b/a", false},
+ {"a/*", "b/b/", false},
+ {"a/*", "b/b/c", false},
+ {"a/**/*", "b/", false},
+ {"a/**/*", "b/a", false},
+ {"a/**/*", "b/b/", false},
+ {"a/**/*", "b/b/c", false},
+
+ {"a/*", "a/", false},
+ {"a/*", "a/a", true},
+ {"a/*", "a/b/", false},
+ {"a/*", "a/b/c", false},
+
+ {"a/*/", "a/", false},
+ {"a/*/", "a/a", false},
+ {"a/*/", "a/b/", true},
+ {"a/*/", "a/b/c", false},
+
+ {"a/**/*", "a/", false},
+ {"a/**/*", "a/a", true},
+ {"a/**/*", "a/b/", false},
+ {"a/**/*", "a/b/c", true},
+
+ {"a/**/*/", "a/", false},
+ {"a/**/*/", "a/a", false},
+ {"a/**/*/", "a/b/", true},
+ {"a/**/*/", "a/b/c", false},
+
+ {"**/*", "a/", false},
+ {"**/*", "a/a", true},
+ {"**/*", "a/b/", false},
+ {"**/*", "a/b/c", true},
+
+ {"**/*/", "a/", true},
+ {"**/*/", "a/a", false},
+ {"**/*/", "a/b/", true},
+ {"**/*/", "a/b/c", false},
+
+ {`a/\*\*/\*`, `a/**/*`, true},
+ {`a/\*\*/\*`, `a/a/*`, false},
+ {`a/\*\*/\*`, `a/**/a`, false},
+ {`a/\*\*/\*`, `a/a/a`, false},
+
+ {`a/**/\*`, `a/**/*`, true},
+ {`a/**/\*`, `a/a/*`, true},
+ {`a/**/\*`, `a/**/a`, false},
+ {`a/**/\*`, `a/a/a`, false},
+
+ {`a/\*\*/*`, `a/**/*`, true},
+ {`a/\*\*/*`, `a/a/*`, false},
+ {`a/\*\*/*`, `a/**/a`, true},
+ {`a/\*\*/*`, `a/a/a`, false},
+
+ {`*/**/a`, `a/a/a`, true},
+ {`*/**/a`, `*/a/a`, true},
+ {`*/**/a`, `a/**/a`, true},
+ {`*/**/a`, `*/**/a`, true},
+
+ {`\*/\*\*/a`, `a/a/a`, false},
+ {`\*/\*\*/a`, `*/a/a`, false},
+ {`\*/\*\*/a`, `a/**/a`, false},
+ {`\*/\*\*/a`, `*/**/a`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/[a-c]`, `a/b`, true},
+ {`a/[abc]`, `a/b`, true},
+
+ {`a/\[abc]`, `a/b`, false},
+ {`a/\[abc]`, `a/[abc]`, true},
+
+ {`a/\[abc\]`, `a/b`, false},
+ {`a/\[abc\]`, `a/[abc]`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {"/a/*", "/a/", false},
+ {"/a/*", "/a/a", true},
+ {"/a/*", "/a/b/", false},
+ {"/a/*", "/a/b/c", false},
+
+ {"/a/*/", "/a/", false},
+ {"/a/*/", "/a/a", false},
+ {"/a/*/", "/a/b/", true},
+ {"/a/*/", "/a/b/c", false},
+
+ {"/a/**/*", "/a/", false},
+ {"/a/**/*", "/a/a", true},
+ {"/a/**/*", "/a/b/", false},
+ {"/a/**/*", "/a/b/c", true},
+
+ {"/**/*", "/a/", false},
+ {"/**/*", "/a/a", true},
+ {"/**/*", "/a/b/", false},
+ {"/**/*", "/a/b/c", true},
+
+ {"/**/*/", "/a/", true},
+ {"/**/*/", "/a/a", false},
+ {"/**/*/", "/a/b/", true},
+ {"/**/*/", "/a/b/c", false},
+
+ {`a`, `/a`, false},
+ {`/a`, `a`, false},
+ {`*`, `/a`, false},
+ {`/*`, `a`, false},
+ {`**/*`, `/a`, false},
+ {`/**/*`, `a`, false},
+ }
+
+ for _, test := range testCases {
+ t.Run(test.pattern+","+test.name, func(t *testing.T) {
+ match, err := Match(test.pattern, test.name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if match != test.match {
+ t.Errorf("want: %v, got %v", test.match, match)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/known_nondeterminism.whitelist b/cmd/diff_target_files/known_nondeterminism.whitelist
new file mode 100644
index 0000000..6d71403
--- /dev/null
+++ b/cmd/diff_target_files/known_nondeterminism.whitelist
@@ -0,0 +1,10 @@
+// List of files that are known to be non-deterministic, along with the
+// bug number to tracking fixing the non-determinism.
+[
+ {
+ "Paths": [
+ // b/120039850
+ "system/framework/oat/*/services.art"
+ ]
+ }
+]
diff --git a/cmd/diff_target_files/props.whitelist b/cmd/diff_target_files/props.whitelist
new file mode 100644
index 0000000..9245b8b
--- /dev/null
+++ b/cmd/diff_target_files/props.whitelist
@@ -0,0 +1,18 @@
+[
+ // Ignore date, version and hostname properties in build.prop and prop.default files.
+ {
+ "Paths": [
+ "**/build.prop",
+ "**/prop.default"
+ ],
+ "IgnoreMatchingLines": [
+ "ro\\..*build\\.date=.*",
+ "ro\\..*build\\.date\\.utc=.*",
+ "ro\\..*build\\.version\\.incremental=.*",
+ "ro\\..*build\\.fingerprint=.*",
+ "ro\\.build\\.display\\.id=.*",
+ "ro\\.build\\.description=.*",
+ "ro\\.build\\.host=.*"
+ ]
+ }
+]
\ No newline at end of file
diff --git a/cmd/diff_target_files/target_files.go b/cmd/diff_target_files/target_files.go
new file mode 100644
index 0000000..8705ca7
--- /dev/null
+++ b/cmd/diff_target_files/target_files.go
@@ -0,0 +1,86 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "strings"
+)
+
+const targetFilesPattern = "*-target_files-*.zip"
+
+var targetZipPartitions = []string{
+ "BOOT/RAMDISK/",
+ "BOOT/",
+ "DATA/",
+ "ODM/",
+ "OEM/",
+ "PRODUCT/",
+ "PRODUCT_SERVICES/",
+ "ROOT/",
+ "SYSTEM/",
+ "SYSTEM_OTHER/",
+ "VENDOR/",
+}
+
+var targetZipFilter = []string{
+ "IMAGES/",
+ "OTA/",
+ "META/",
+ "PREBUILT_IMAGES/",
+ "RADIO/",
+}
+
+func filterTargetZipFiles(files []*ZipArtifactFile, artifact string, patterns []string) ([]*ZipArtifactFile, error) {
+ var ret []*ZipArtifactFile
+outer:
+ for _, f := range files {
+ if f.FileInfo().IsDir() {
+ continue
+ }
+
+ if artifact == targetFilesPattern {
+ found := false
+ for _, p := range targetZipPartitions {
+ if strings.HasPrefix(f.Name, p) {
+ f.Name = strings.ToLower(p) + strings.TrimPrefix(f.Name, p)
+ found = true
+ }
+ }
+ for _, filter := range targetZipFilter {
+ if strings.HasPrefix(f.Name, filter) {
+ continue outer
+ }
+ }
+
+ if !found {
+ return nil, fmt.Errorf("unmatched prefix for %s", f.Name)
+ }
+ }
+
+ if patterns != nil {
+ for _, pattern := range patterns {
+ match, _ := Match(pattern, f.Name)
+ if match {
+ ret = append(ret, f)
+ }
+ }
+ } else {
+ ret = append(ret, f)
+ }
+ }
+
+ return ret, nil
+}
diff --git a/cmd/diff_target_files/whitelist.go b/cmd/diff_target_files/whitelist.go
new file mode 100644
index 0000000..f00fc1e
--- /dev/null
+++ b/cmd/diff_target_files/whitelist.go
@@ -0,0 +1,251 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "unicode"
+)
+
+type jsonWhitelist struct {
+ Paths []string
+ IgnoreMatchingLines []string
+}
+
+type whitelist struct {
+ path string
+ ignoreMatchingLines []string
+}
+
+func parseWhitelists(whitelists []string, whitelistFiles []string) ([]whitelist, error) {
+ var ret []whitelist
+
+ add := func(path string, ignoreMatchingLines []string) {
+ for _, x := range ret {
+ if x.path == path {
+ x.ignoreMatchingLines = append(x.ignoreMatchingLines, ignoreMatchingLines...)
+ return
+ }
+ }
+
+ ret = append(ret, whitelist{
+ path: path,
+ ignoreMatchingLines: ignoreMatchingLines,
+ })
+ }
+
+ for _, file := range whitelistFiles {
+ newWhitelists, err := parseWhitelistFile(file)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, w := range newWhitelists {
+ add(w.path, w.ignoreMatchingLines)
+ }
+ }
+
+ for _, s := range whitelists {
+ colon := strings.IndexRune(s, ':')
+ var ignoreMatchingLines []string
+ if colon >= 0 {
+ ignoreMatchingLines = []string{s[colon+1:]}
+ }
+ add(s, ignoreMatchingLines)
+ }
+
+ return ret, nil
+}
+
+func parseWhitelistFile(file string) ([]whitelist, error) {
+ r, err := os.Open(file)
+ if err != nil {
+ return nil, err
+ }
+ defer r.Close()
+
+ d := json.NewDecoder(newJSONCommentStripper(r))
+
+ var jsonWhitelists []jsonWhitelist
+
+ err = d.Decode(&jsonWhitelists)
+
+ var whitelists []whitelist
+ for _, w := range jsonWhitelists {
+ for _, p := range w.Paths {
+ whitelists = append(whitelists, whitelist{
+ path: p,
+ ignoreMatchingLines: w.IgnoreMatchingLines,
+ })
+ }
+ }
+
+ return whitelists, err
+}
+
+func filterModifiedPaths(l [][2]*ZipArtifactFile, whitelists []whitelist) ([][2]*ZipArtifactFile, error) {
+outer:
+ for i := 0; i < len(l); i++ {
+ for _, w := range whitelists {
+ if match, err := Match(w.path, l[i][0].Name); err != nil {
+ return l, err
+ } else if match {
+ if match, err := diffIgnoringMatchingLines(l[i][0], l[i][1], w.ignoreMatchingLines); err != nil {
+ return l, err
+ } else if match || len(w.ignoreMatchingLines) == 0 {
+ l = append(l[:i], l[i+1:]...)
+ i--
+ }
+ continue outer
+ }
+ }
+ }
+
+ if len(l) == 0 {
+ l = nil
+ }
+
+ return l, nil
+}
+
+func filterNewPaths(l []*ZipArtifactFile, whitelists []whitelist) ([]*ZipArtifactFile, error) {
+outer:
+ for i := 0; i < len(l); i++ {
+ for _, w := range whitelists {
+ if match, err := Match(w.path, l[i].Name); err != nil {
+ return l, err
+ } else if match && len(w.ignoreMatchingLines) == 0 {
+ l = append(l[:i], l[i+1:]...)
+ i--
+ }
+ continue outer
+ }
+ }
+
+ if len(l) == 0 {
+ l = nil
+ }
+
+ return l, nil
+}
+
+func diffIgnoringMatchingLines(a *ZipArtifactFile, b *ZipArtifactFile, ignoreMatchingLines []string) (match bool, err error) {
+ lineMatchesIgnores := func(b []byte) (bool, error) {
+ for _, m := range ignoreMatchingLines {
+ if match, err := regexp.Match(m, b); err != nil {
+ return false, err
+ } else if match {
+ return match, nil
+ }
+ }
+ return false, nil
+ }
+
+ filter := func(z *ZipArtifactFile) ([]byte, error) {
+ var ret []byte
+
+ r, err := z.Open()
+ if err != nil {
+ return nil, err
+ }
+ s := bufio.NewScanner(r)
+
+ for s.Scan() {
+ if match, err := lineMatchesIgnores(s.Bytes()); err != nil {
+ return nil, err
+ } else if !match {
+ ret = append(ret, "\n"...)
+ ret = append(ret, s.Bytes()...)
+ }
+ }
+
+ return ret, nil
+ }
+
+ bufA, err := filter(a)
+ if err != nil {
+ return false, err
+ }
+ bufB, err := filter(b)
+ if err != nil {
+ return false, err
+ }
+
+ return bytes.Compare(bufA, bufB) == 0, nil
+}
+
+func applyWhitelists(diff zipDiff, whitelists []whitelist) (zipDiff, error) {
+ var err error
+
+ diff.modified, err = filterModifiedPaths(diff.modified, whitelists)
+ if err != nil {
+ return diff, err
+ }
+ diff.onlyInA, err = filterNewPaths(diff.onlyInA, whitelists)
+ if err != nil {
+ return diff, err
+ }
+ diff.onlyInB, err = filterNewPaths(diff.onlyInB, whitelists)
+ if err != nil {
+ return diff, err
+ }
+
+ return diff, nil
+}
+
+func newJSONCommentStripper(r io.Reader) *jsonCommentStripper {
+ return &jsonCommentStripper{
+ r: bufio.NewReader(r),
+ }
+}
+
+type jsonCommentStripper struct {
+ r *bufio.Reader
+ b []byte
+ err error
+}
+
+func (j *jsonCommentStripper) Read(buf []byte) (int, error) {
+ for len(j.b) == 0 {
+ if j.err != nil {
+ return 0, j.err
+ }
+
+ j.b, j.err = j.r.ReadBytes('\n')
+
+ if isComment(j.b) {
+ j.b = nil
+ }
+ }
+
+ n := copy(buf, j.b)
+ j.b = j.b[n:]
+ return n, nil
+}
+
+var commentPrefix = []byte("//")
+
+func isComment(b []byte) bool {
+ for len(b) > 0 && unicode.IsSpace(rune(b[0])) {
+ b = b[1:]
+ }
+ return bytes.HasPrefix(b, commentPrefix)
+}
diff --git a/cmd/diff_target_files/whitelist_test.go b/cmd/diff_target_files/whitelist_test.go
new file mode 100644
index 0000000..4b19fdd
--- /dev/null
+++ b/cmd/diff_target_files/whitelist_test.go
@@ -0,0 +1,126 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "bytes"
+ "reflect"
+ "testing"
+)
+
+func bytesToZipArtifactFile(name string, data []byte) *ZipArtifactFile {
+ buf := &bytes.Buffer{}
+ w := zip.NewWriter(buf)
+ f, err := w.Create(name)
+ if err != nil {
+ panic(err)
+ }
+ _, err = f.Write(data)
+ if err != nil {
+ panic(err)
+ }
+
+ w.Close()
+
+ r, err := zip.NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()))
+ if err != nil {
+ panic(err)
+ }
+
+ return &ZipArtifactFile{r.File[0]}
+}
+
+var f1a = bytesToZipArtifactFile("dir/f1", []byte(`
+a
+foo: bar
+c
+`))
+
+var f1b = bytesToZipArtifactFile("dir/f1", []byte(`
+a
+foo: baz
+c
+`))
+
+var f2 = bytesToZipArtifactFile("dir/f2", nil)
+
+func Test_applyWhitelists(t *testing.T) {
+ type args struct {
+ diff zipDiff
+ whitelists []whitelist
+ }
+ tests := []struct {
+ name string
+ args args
+ want zipDiff
+ wantErr bool
+ }{
+ {
+ name: "simple",
+ args: args{
+ diff: zipDiff{
+ onlyInA: []*ZipArtifactFile{f1a, f2},
+ },
+ whitelists: []whitelist{{path: "dir/f1"}},
+ },
+ want: zipDiff{
+ onlyInA: []*ZipArtifactFile{f2},
+ },
+ },
+ {
+ name: "glob",
+ args: args{
+ diff: zipDiff{
+ onlyInA: []*ZipArtifactFile{f1a, f2},
+ },
+ whitelists: []whitelist{{path: "dir/*"}},
+ },
+ want: zipDiff{},
+ },
+ {
+ name: "modified",
+ args: args{
+ diff: zipDiff{
+ modified: [][2]*ZipArtifactFile{{f1a, f1b}},
+ },
+ whitelists: []whitelist{{path: "dir/*"}},
+ },
+ want: zipDiff{},
+ },
+ {
+ name: "matching lines",
+ args: args{
+ diff: zipDiff{
+ modified: [][2]*ZipArtifactFile{{f1a, f1b}},
+ },
+ whitelists: []whitelist{{path: "dir/*", ignoreMatchingLines: []string{"foo: .*"}}},
+ },
+ want: zipDiff{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := applyWhitelists(tt.args.diff, tt.args.whitelists)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("applyWhitelists() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("applyWhitelists() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/zip_artifact.go b/cmd/diff_target_files/zip_artifact.go
new file mode 100644
index 0000000..08ce889
--- /dev/null
+++ b/cmd/diff_target_files/zip_artifact.go
@@ -0,0 +1,174 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "context"
+ "fmt"
+ "hash/crc32"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+)
+
+// ZipArtifact represents a zip file that may be local or remote.
+type ZipArtifact interface {
+ // Files returns the list of files contained in the zip file.
+ Files() ([]*ZipArtifactFile, error)
+
+ // Close closes the zip file artifact.
+ Close()
+}
+
+// localZipArtifact is a handle to a local zip file artifact.
+type localZipArtifact struct {
+ zr *zip.ReadCloser
+ files []*ZipArtifactFile
+}
+
+// NewLocalZipArtifact returns a ZipArtifact for a local zip file..
+func NewLocalZipArtifact(name string) (ZipArtifact, error) {
+ zr, err := zip.OpenReader(name)
+ if err != nil {
+ return nil, err
+ }
+
+ var files []*ZipArtifactFile
+ for _, zf := range zr.File {
+ files = append(files, &ZipArtifactFile{zf})
+ }
+
+ return &localZipArtifact{
+ zr: zr,
+ files: files,
+ }, nil
+}
+
+// Files returns the list of files contained in the local zip file artifact.
+func (z *localZipArtifact) Files() ([]*ZipArtifactFile, error) {
+ return z.files, nil
+}
+
+// Close closes the buffered reader of the local zip file artifact.
+func (z *localZipArtifact) Close() {
+ z.zr.Close()
+}
+
+// ZipArtifactFile contains a zip.File handle to the data inside the remote *-target_files-*.zip
+// build artifact.
+type ZipArtifactFile struct {
+ *zip.File
+}
+
+// Extract begins extract a file from inside a ZipArtifact. It returns an
+// ExtractedZipArtifactFile handle.
+func (zf *ZipArtifactFile) Extract(ctx context.Context, dir string,
+ limiter chan bool) *ExtractedZipArtifactFile {
+
+ d := &ExtractedZipArtifactFile{
+ initCh: make(chan struct{}),
+ }
+
+ go func() {
+ defer close(d.initCh)
+ limiter <- true
+ defer func() { <-limiter }()
+
+ zr, err := zf.Open()
+ if err != nil {
+ d.err = err
+ return
+ }
+ defer zr.Close()
+
+ crc := crc32.NewIEEE()
+ r := io.TeeReader(zr, crc)
+
+ if filepath.Clean(zf.Name) != zf.Name {
+ d.err = fmt.Errorf("invalid filename %q", zf.Name)
+ return
+ }
+ path := filepath.Join(dir, zf.Name)
+
+ err = os.MkdirAll(filepath.Dir(path), 0777)
+ if err != nil {
+ d.err = err
+ return
+ }
+
+ err = os.Remove(path)
+ if err != nil && !os.IsNotExist(err) {
+ d.err = err
+ return
+ }
+
+ if zf.Mode().IsRegular() {
+ w, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, zf.Mode())
+ if err != nil {
+ d.err = err
+ return
+ }
+ defer w.Close()
+
+ _, err = io.Copy(w, r)
+ if err != nil {
+ d.err = err
+ return
+ }
+ } else if zf.Mode()&os.ModeSymlink != 0 {
+ target, err := ioutil.ReadAll(r)
+ if err != nil {
+ d.err = err
+ return
+ }
+
+ err = os.Symlink(string(target), path)
+ if err != nil {
+ d.err = err
+ return
+ }
+ } else {
+ d.err = fmt.Errorf("unknown mode %q", zf.Mode())
+ return
+ }
+
+ if crc.Sum32() != zf.CRC32 {
+ d.err = fmt.Errorf("crc mismatch for %v", zf.Name)
+ return
+ }
+
+ d.path = path
+ }()
+
+ return d
+}
+
+// ExtractedZipArtifactFile is a handle to a downloaded file from a remoteZipArtifact. The download
+// may still be in progress, and will be complete with Path() returns.
+type ExtractedZipArtifactFile struct {
+ initCh chan struct{}
+ err error
+
+ path string
+}
+
+// Path returns the path to the downloaded file and any errors that occurred during the download.
+// It will block until the download is complete.
+func (d *ExtractedZipArtifactFile) Path() (string, error) {
+ <-d.initCh
+ return d.path, d.err
+}
diff --git a/cmd/sbox/sbox.go b/cmd/sbox/sbox.go
index 0af1886..4167edb 100644
--- a/cmd/sbox/sbox.go
+++ b/cmd/sbox/sbox.go
@@ -24,6 +24,7 @@
"path"
"path/filepath"
"strings"
+ "time"
)
var (
@@ -265,6 +266,15 @@
if err != nil {
return err
}
+
+ // Update the timestamp of the output file in case the tool wrote an old timestamp (for example, tar can extract
+ // files with old timestamps).
+ now := time.Now()
+ err = os.Chtimes(tempPath, now, now)
+ if err != nil {
+ return err
+ }
+
err = os.Rename(tempPath, destPath)
if err != nil {
return err
diff --git a/java/dexpreopt_bootjars_test.go b/java/dexpreopt_bootjars_test.go
index 141f7ba..cbb52f1 100644
--- a/java/dexpreopt_bootjars_test.go
+++ b/java/dexpreopt_bootjars_test.go
@@ -103,7 +103,7 @@
expectedOutputs[i] = filepath.Join(buildDir, "test_device", expectedOutputs[i])
}
- outputs := bootArt.Outputs.Strings()
+ outputs := append(android.WritablePaths{bootArt.Output}, bootArt.ImplicitOutputs...).Strings()
sort.Strings(outputs)
sort.Strings(expectedOutputs)
diff --git a/java/hiddenapi_singleton.go b/java/hiddenapi_singleton.go
index 86531eb..139114b 100644
--- a/java/hiddenapi_singleton.go
+++ b/java/hiddenapi_singleton.go
@@ -236,6 +236,8 @@
android.PathForSource(ctx, "frameworks/base/config/hiddenapi-greylist-max-o.txt")).
FlagWithInput("--blacklist ",
android.PathForSource(ctx, "frameworks/base/config/hiddenapi-force-blacklist.txt")).
+ FlagWithInput("--greylist-packages ",
+ android.PathForSource(ctx, "frameworks/base/config/hiddenapi-greylist-packages.txt")).
FlagWithOutput("--output ", tempPath)
commitChangeForRestat(rule, tempPath, outputPath)