Merge changes I7dd4ba7f,I7887a67a
* changes:
Add depfile support to RuleBuilder
Add RuleBuilderCommand.Flags
diff --git a/apex/apex.go b/apex/apex.go
index 685a774..ce1ed46 100644
--- a/apex/apex.go
+++ b/apex/apex.go
@@ -396,9 +396,8 @@
outputFiles map[apexPackaging]android.WritablePath
installDir android.OutputPath
- public_key_file android.Path
- private_key_file android.Path
- bundle_public_key bool
+ public_key_file android.Path
+ private_key_file android.Path
container_certificate_file android.Path
container_private_key_file android.Path
@@ -746,10 +745,6 @@
if key, ok := child.(*apexKey); ok {
a.private_key_file = key.private_key_file
a.public_key_file = key.public_key_file
- // If the key is not installed, bundled it with the APEX.
- // Note: this bundled key is valid only for non-production builds
- // (eng/userdebug).
- a.bundle_public_key = !key.installable() && ctx.Config().Debuggable()
return false
} else {
ctx.PropertyErrorf("key", "%q is not an apex_key module", depName)
@@ -968,11 +963,8 @@
optFlags := []string{}
// Additional implicit inputs.
- implicitInputs = append(implicitInputs, cannedFsConfig, fileContexts, a.private_key_file)
- if a.bundle_public_key {
- implicitInputs = append(implicitInputs, a.public_key_file)
- optFlags = append(optFlags, "--pubkey "+a.public_key_file.String())
- }
+ implicitInputs = append(implicitInputs, cannedFsConfig, fileContexts, a.private_key_file, a.public_key_file)
+ optFlags = append(optFlags, "--pubkey "+a.public_key_file.String())
manifestPackageName, overridden := ctx.DeviceConfig().OverrideManifestPackageNameFor(ctx.ModuleName())
if overridden {
@@ -1057,7 +1049,7 @@
func (a *apexBundle) buildFlattenedApex(ctx android.ModuleContext) {
if a.installable() {
- // For flattened APEX, do nothing but make sure that apex_manifest.json file is also copied along
+ // For flattened APEX, do nothing but make sure that apex_manifest.json and apex_pubkey are also copied along
// with other ordinary files.
manifest := android.PathForModuleSrc(ctx, proptools.StringDefault(a.properties.Manifest, "apex_manifest.json"))
@@ -1070,6 +1062,15 @@
})
a.filesInfo = append(a.filesInfo, apexFile{copiedManifest, ctx.ModuleName() + ".apex_manifest.json", ".", etc, nil, nil})
+ // rename to apex_pubkey
+ copiedPubkey := android.PathForModuleOut(ctx, "apex_pubkey")
+ ctx.Build(pctx, android.BuildParams{
+ Rule: android.Cp,
+ Input: a.public_key_file,
+ Output: copiedPubkey,
+ })
+ a.filesInfo = append(a.filesInfo, apexFile{copiedPubkey, ctx.ModuleName() + ".apex_pubkey", ".", etc, nil, nil})
+
if ctx.Config().FlattenApex() {
for _, fi := range a.filesInfo {
dir := filepath.Join("apex", ctx.ModuleName(), fi.installDir)
@@ -1215,7 +1216,6 @@
fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", a.installDir.RelPathString()))
fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", name+apexType.suffix())
fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !a.installable())
- fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES :=", String(a.properties.Key))
if a.installable() && a.mergedNoticeFile != nil {
fmt.Fprintln(w, "LOCAL_NOTICE_FILE :=", a.mergedNoticeFile.String())
}
@@ -1300,23 +1300,56 @@
type PrebuiltProperties struct {
// the path to the prebuilt .apex file to import.
- Src string `android:"arch_variant"`
+ Source string `blueprint:"mutated"`
- // the name of the apex_key module that contains the matching public key to be installed.
- Key *string
+ Src *string
+ Arch struct {
+ Arm struct {
+ Src *string
+ }
+ Arm64 struct {
+ Src *string
+ }
+ X86 struct {
+ Src *string
+ }
+ X86_64 struct {
+ Src *string
+ }
+ }
}
func (p *Prebuilt) DepsMutator(ctx android.BottomUpMutatorContext) {
- if String(p.properties.Key) == "" {
- ctx.ModuleErrorf("key is missing")
+ // This is called before prebuilt_select and prebuilt_postdeps mutators
+ // The mutators requires that src to be set correctly for each arch so that
+ // arch variants are disabled when src is not provided for the arch.
+ if len(ctx.MultiTargets()) != 1 {
+ ctx.ModuleErrorf("compile_multilib shouldn't be \"both\" for prebuilt_apex")
return
}
- ctx.AddDependency(ctx.Module(), keyTag, *p.properties.Key)
+ var src string
+ switch ctx.MultiTargets()[0].Arch.ArchType {
+ case android.Arm:
+ src = String(p.properties.Arch.Arm.Src)
+ case android.Arm64:
+ src = String(p.properties.Arch.Arm64.Src)
+ case android.X86:
+ src = String(p.properties.Arch.X86.Src)
+ case android.X86_64:
+ src = String(p.properties.Arch.X86_64.Src)
+ default:
+ ctx.ModuleErrorf("prebuilt_apex does not support %q", ctx.MultiTargets()[0].Arch.String())
+ return
+ }
+ if src == "" {
+ src = String(p.properties.Src)
+ }
+ p.properties.Source = src
}
func (p *Prebuilt) GenerateAndroidBuildActions(ctx android.ModuleContext) {
// TODO(jungjw): Check the key validity.
- p.inputApex = p.prebuilt.SingleSourcePath(ctx)
+ p.inputApex = p.Prebuilt().SingleSourcePath(ctx)
p.installDir = android.PathForModuleInstall(ctx, "apex")
ctx.InstallFile(p.installDir, ctx.ModuleName()+imageApexSuffix, p.inputApex)
}
@@ -1338,7 +1371,6 @@
func(w io.Writer, outputFile android.Path) {
fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", p.installDir.RelPathString()))
fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", p.BaseModuleName()+imageApexSuffix)
- fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES :=", String(p.properties.Key))
},
},
}
@@ -1348,7 +1380,7 @@
func PrebuiltFactory() android.Module {
module := &Prebuilt{}
module.AddProperties(&module.properties)
- android.InitSingleSourcePrebuiltModule(module, &module.properties.Src)
- android.InitAndroidArchModule(module, android.DeviceSupported, android.MultilibCommon)
+ android.InitSingleSourcePrebuiltModule(module, &module.properties.Source)
+ android.InitAndroidMultiTargetsArchModule(module, android.DeviceSupported, android.MultilibCommon)
return module
}
diff --git a/apex/apex_test.go b/apex/apex_test.go
index 2d9cca6..6d101d8 100644
--- a/apex/apex_test.go
+++ b/apex/apex_test.go
@@ -15,8 +15,6 @@
package apex
import (
- "bufio"
- "bytes"
"io/ioutil"
"os"
"strings"
@@ -171,7 +169,8 @@
"custom_notice": nil,
"testkey2.avbpubkey": nil,
"testkey2.pem": nil,
- "myapex.apex": nil,
+ "myapex-arm64.apex": nil,
+ "myapex-arm.apex": nil,
})
_, errs := ctx.ParseFileList(".", []string{"Android.bp"})
android.FailIfErrored(t, errs)
@@ -298,6 +297,10 @@
`)
apexRule := ctx.ModuleForTests("myapex", "android_common_myapex").Rule("apexRule")
+
+ optFlags := apexRule.Args["opt_flags"]
+ ensureContains(t, optFlags, "--pubkey vendor/foo/devkeys/testkey.avbpubkey")
+
copyCmds := apexRule.Args["copy_commands"]
// Ensure that main rule creates an output
@@ -1196,14 +1199,6 @@
if actual != expected {
t.Errorf("wrong install path. expected %q. actual %q", expected, actual)
}
-
- apex_key := ctx.ModuleForTests("myapex.key", "android_common").Module().(*apexKey)
- expected = "target/product/test_device/product/etc/security/apex"
- actual = apex_key.installDir.RelPathString()
- if actual != expected {
- t.Errorf("wrong install path. expected %q. actual %q", expected, actual)
- }
-
}
func TestApexKeyFromOtherModule(t *testing.T) {
@@ -1243,37 +1238,21 @@
ctx := testApex(t, `
prebuilt_apex {
name: "myapex",
- src: "myapex.apex",
- key: "myapex.key"
- }
-
- apex_key {
- name: "myapex.key",
- public_key: "testkey.avbpubkey",
- private_key: "testkey.pem",
- product_specific: true,
+ arch: {
+ arm64: {
+ src: "myapex-arm64.apex",
+ },
+ arm: {
+ src: "myapex-arm.apex",
+ },
+ },
}
`)
prebuilt := ctx.ModuleForTests("myapex", "android_common").Module().(*Prebuilt)
- // Check if the key module is added as a required module.
- buf := &bytes.Buffer{}
- prebuilt.AndroidMk().Extra[0](buf, nil)
- found := false
- scanner := bufio.NewScanner(bytes.NewReader(buf.Bytes()))
- expected := "myapex.key"
- for scanner.Scan() {
- line := scanner.Text()
- tok := strings.Split(line, " := ")
- if tok[0] == "LOCAL_REQUIRED_MODULES" {
- found = true
- if tok[1] != "myapex.key" {
- t.Errorf("Unexpected LOCAL_REQUIRED_MODULES '%s', expected '%s'", tok[1], expected)
- }
- }
- }
- if !found {
- t.Errorf("Couldn't find a LOCAL_REQUIRED_MODULES entry")
+ expectedInput := "myapex-arm64.apex"
+ if prebuilt.inputApex.String() != expectedInput {
+ t.Errorf("inputApex invalid. expected: %q, actual: %q", expectedInput, prebuilt.inputApex.String())
}
}
diff --git a/apex/key.go b/apex/key.go
index fbd29bc..a627e4b 100644
--- a/apex/key.go
+++ b/apex/key.go
@@ -16,8 +16,6 @@
import (
"fmt"
- "io"
- "path/filepath"
"strings"
"android/soong/android"
@@ -39,7 +37,6 @@
public_key_file android.Path
private_key_file android.Path
- installDir android.OutputPath
keyName string
}
@@ -64,7 +61,7 @@
}
func (m *apexKey) installable() bool {
- return m.properties.Installable == nil || proptools.Bool(m.properties.Installable)
+ return false
}
func (m *apexKey) GenerateAndroidBuildActions(ctx android.ModuleContext) {
@@ -99,25 +96,6 @@
return
}
m.keyName = pubKeyName
-
- m.installDir = android.PathForModuleInstall(ctx, "etc/security/apex")
- if m.installable() {
- ctx.InstallFile(m.installDir, m.keyName, m.public_key_file)
- }
-}
-
-func (m *apexKey) AndroidMk() android.AndroidMkData {
- return android.AndroidMkData{
- Class: "ETC",
- OutputFile: android.OptionalPathForPath(m.public_key_file),
- Extra: []android.AndroidMkExtraFunc{
- func(w io.Writer, outputFile android.Path) {
- fmt.Fprintln(w, "LOCAL_MODULE_PATH :=", filepath.Join("$(OUT_DIR)", m.installDir.RelPathString()))
- fmt.Fprintln(w, "LOCAL_INSTALLED_MODULE_STEM :=", m.keyName)
- fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !m.installable())
- },
- },
- }
}
////////////////////////////////////////////////////////////////////////
diff --git a/cc/builder.go b/cc/builder.go
index dab887c..6dd7c05 100644
--- a/cc/builder.go
+++ b/cc/builder.go
@@ -70,6 +70,8 @@
CommandDeps: []string{"$ldCmd"},
Rspfile: "${out}.rsp",
RspfileContent: "${in}",
+ // clang -Wl,--out-implib doesn't update its output file if it hasn't changed.
+ Restat: true,
},
"ldCmd", "crtBegin", "libFlags", "crtEnd", "ldFlags")
diff --git a/cmd/diff_target_files/Android.bp b/cmd/diff_target_files/Android.bp
new file mode 100644
index 0000000..5397f4b
--- /dev/null
+++ b/cmd/diff_target_files/Android.bp
@@ -0,0 +1,16 @@
+blueprint_go_binary {
+ name: "diff_target_files",
+ srcs: [
+ "compare.go",
+ "diff_target_files.go",
+ "glob.go",
+ "target_files.go",
+ "whitelist.go",
+ "zip_artifact.go",
+ ],
+ testSrcs: [
+ "compare_test.go",
+ "glob_test.go",
+ "whitelist_test.go",
+ ],
+}
diff --git a/cmd/diff_target_files/compare.go b/cmd/diff_target_files/compare.go
new file mode 100644
index 0000000..00cd9ca
--- /dev/null
+++ b/cmd/diff_target_files/compare.go
@@ -0,0 +1,133 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// compareTargetFiles takes two ZipArtifacts and compares the files they contain by examining
+// the path, size, and CRC of each file.
+func compareTargetFiles(priZip, refZip ZipArtifact, artifact string, whitelists []whitelist, filters []string) (zipDiff, error) {
+ priZipFiles, err := priZip.Files()
+ if err != nil {
+ return zipDiff{}, fmt.Errorf("error fetching target file lists from primary zip %v", err)
+ }
+
+ refZipFiles, err := refZip.Files()
+ if err != nil {
+ return zipDiff{}, fmt.Errorf("error fetching target file lists from reference zip %v", err)
+ }
+
+ priZipFiles, err = filterTargetZipFiles(priZipFiles, artifact, filters)
+ if err != nil {
+ return zipDiff{}, err
+ }
+
+ refZipFiles, err = filterTargetZipFiles(refZipFiles, artifact, filters)
+ if err != nil {
+ return zipDiff{}, err
+ }
+
+ // Compare the file lists from both builds
+ diff := diffTargetFilesLists(refZipFiles, priZipFiles)
+
+ return applyWhitelists(diff, whitelists)
+}
+
+// zipDiff contains the list of files that differ between two zip files.
+type zipDiff struct {
+ modified [][2]*ZipArtifactFile
+ onlyInA, onlyInB []*ZipArtifactFile
+}
+
+// String pretty-prints the list of files that differ between two zip files.
+func (d *zipDiff) String() string {
+ buf := &bytes.Buffer{}
+
+ must := func(n int, err error) {
+ if err != nil {
+ panic(err)
+ }
+ }
+
+ var sizeChange int64
+
+ if len(d.modified) > 0 {
+ must(fmt.Fprintln(buf, "files modified:"))
+ for _, f := range d.modified {
+ must(fmt.Fprintf(buf, " %v (%v bytes -> %v bytes)\n", f[0].Name, f[0].UncompressedSize64, f[1].UncompressedSize64))
+ sizeChange += int64(f[1].UncompressedSize64) - int64(f[0].UncompressedSize64)
+ }
+ }
+
+ if len(d.onlyInA) > 0 {
+ must(fmt.Fprintln(buf, "files removed:"))
+ for _, f := range d.onlyInA {
+ must(fmt.Fprintf(buf, " - %v (%v bytes)\n", f.Name, f.UncompressedSize64))
+ sizeChange -= int64(f.UncompressedSize64)
+ }
+ }
+
+ if len(d.onlyInB) > 0 {
+ must(fmt.Fprintln(buf, "files added:"))
+ for _, f := range d.onlyInB {
+ must(fmt.Fprintf(buf, " + %v (%v bytes)\n", f.Name, f.UncompressedSize64))
+ sizeChange += int64(f.UncompressedSize64)
+ }
+ }
+
+ if len(d.modified) > 0 || len(d.onlyInA) > 0 || len(d.onlyInB) > 0 {
+ must(fmt.Fprintf(buf, "total size change: %v bytes\n", sizeChange))
+ }
+
+ return buf.String()
+}
+
+func diffTargetFilesLists(a, b []*ZipArtifactFile) zipDiff {
+ i := 0
+ j := 0
+
+ diff := zipDiff{}
+
+ for i < len(a) && j < len(b) {
+ if a[i].Name == b[j].Name {
+ if a[i].UncompressedSize64 != b[j].UncompressedSize64 || a[i].CRC32 != b[j].CRC32 {
+ diff.modified = append(diff.modified, [2]*ZipArtifactFile{a[i], b[j]})
+ }
+ i++
+ j++
+ } else if a[i].Name < b[j].Name {
+ // a[i] is not present in b
+ diff.onlyInA = append(diff.onlyInA, a[i])
+ i++
+ } else {
+ // b[j] is not present in a
+ diff.onlyInB = append(diff.onlyInB, b[j])
+ j++
+ }
+ }
+ for i < len(a) {
+ diff.onlyInA = append(diff.onlyInA, a[i])
+ i++
+ }
+ for j < len(b) {
+ diff.onlyInB = append(diff.onlyInB, b[j])
+ j++
+ }
+
+ return diff
+}
diff --git a/cmd/diff_target_files/compare_test.go b/cmd/diff_target_files/compare_test.go
new file mode 100644
index 0000000..9d3f8a5
--- /dev/null
+++ b/cmd/diff_target_files/compare_test.go
@@ -0,0 +1,131 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "reflect"
+ "testing"
+)
+
+func TestDiffTargetFilesLists(t *testing.T) {
+ zipArtifactFile := func(name string, crc32 uint32, size uint64) *ZipArtifactFile {
+ return &ZipArtifactFile{
+ File: &zip.File{
+ FileHeader: zip.FileHeader{
+ Name: name,
+ CRC32: crc32,
+ UncompressedSize64: size,
+ },
+ },
+ }
+ }
+ x0 := zipArtifactFile("x", 0, 0)
+ x1 := zipArtifactFile("x", 1, 0)
+ x2 := zipArtifactFile("x", 0, 2)
+ y0 := zipArtifactFile("y", 0, 0)
+ //y1 := zipArtifactFile("y", 1, 0)
+ //y2 := zipArtifactFile("y", 1, 2)
+ z0 := zipArtifactFile("z", 0, 0)
+ z1 := zipArtifactFile("z", 1, 0)
+ //z2 := zipArtifactFile("z", 1, 2)
+
+ testCases := []struct {
+ name string
+ a, b []*ZipArtifactFile
+ diff zipDiff
+ }{
+ {
+ name: "same",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, nil},
+ },
+ {
+ name: "first only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{y0, z0},
+ diff: zipDiff{nil, []*ZipArtifactFile{x0}, nil},
+ },
+ {
+ name: "middle only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, z0},
+ diff: zipDiff{nil, []*ZipArtifactFile{y0}, nil},
+ },
+ {
+ name: "last only in a",
+ a: []*ZipArtifactFile{x0, y0, z0},
+ b: []*ZipArtifactFile{x0, y0},
+ diff: zipDiff{nil, []*ZipArtifactFile{z0}, nil},
+ },
+
+ {
+ name: "first only in b",
+ a: []*ZipArtifactFile{y0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{x0}},
+ },
+ {
+ name: "middle only in b",
+ a: []*ZipArtifactFile{x0, z0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{y0}},
+ },
+ {
+ name: "last only in b",
+ a: []*ZipArtifactFile{x0, y0},
+ b: []*ZipArtifactFile{x0, y0, z0},
+ diff: zipDiff{nil, nil, []*ZipArtifactFile{z0}},
+ },
+
+ {
+ name: "diff",
+ a: []*ZipArtifactFile{x0},
+ b: []*ZipArtifactFile{x1},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x1}}, nil, nil},
+ },
+ {
+ name: "diff plus unique last",
+ a: []*ZipArtifactFile{x0, y0},
+ b: []*ZipArtifactFile{x1, z0},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x1}}, []*ZipArtifactFile{y0}, []*ZipArtifactFile{z0}},
+ },
+ {
+ name: "diff plus unique first",
+ a: []*ZipArtifactFile{x0, z0},
+ b: []*ZipArtifactFile{y0, z1},
+ diff: zipDiff{[][2]*ZipArtifactFile{{z0, z1}}, []*ZipArtifactFile{x0}, []*ZipArtifactFile{y0}},
+ },
+ {
+ name: "diff size",
+ a: []*ZipArtifactFile{x0},
+ b: []*ZipArtifactFile{x2},
+ diff: zipDiff{[][2]*ZipArtifactFile{{x0, x2}}, nil, nil},
+ },
+ }
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ diff := diffTargetFilesLists(test.a, test.b)
+
+ if !reflect.DeepEqual(diff, test.diff) {
+
+ t.Errorf("diffTargetFilesLists = %v, %v, %v", diff.modified, diff.onlyInA, diff.onlyInB)
+ t.Errorf(" want %v, %v, %v", test.diff.modified, test.diff.onlyInA, test.diff.onlyInB)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/diff_target_files.go b/cmd/diff_target_files/diff_target_files.go
new file mode 100644
index 0000000..75bc8ee
--- /dev/null
+++ b/cmd/diff_target_files/diff_target_files.go
@@ -0,0 +1,82 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "os"
+ "strings"
+)
+
+var (
+ whitelists = newMultiString("whitelist", "whitelist patterns in the form <pattern>[:<regex of line to ignore>]")
+ whitelistFiles = newMultiString("whitelist_file", "files containing whitelist definitions")
+
+ filters = newMultiString("filter", "filter patterns to apply to files in target-files.zip before comparing")
+)
+
+func newMultiString(name, usage string) *multiString {
+ var f multiString
+ flag.Var(&f, name, usage)
+ return &f
+}
+
+type multiString []string
+
+func (ms *multiString) String() string { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+ flag.Parse()
+
+ if flag.NArg() != 2 {
+ fmt.Fprintf(os.Stderr, "Error, exactly two arguments are required\n")
+ os.Exit(1)
+ }
+
+ whitelists, err := parseWhitelists(*whitelists, *whitelistFiles)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error parsing whitelists: %v\n", err)
+ os.Exit(1)
+ }
+
+ priZip, err := NewLocalZipArtifact(flag.Arg(0))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error opening zip file %v: %v\n", flag.Arg(0), err)
+ os.Exit(1)
+ }
+ defer priZip.Close()
+
+ refZip, err := NewLocalZipArtifact(flag.Arg(1))
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error opening zip file %v: %v\n", flag.Arg(1), err)
+ os.Exit(1)
+ }
+ defer refZip.Close()
+
+ diff, err := compareTargetFiles(priZip, refZip, targetFilesPattern, whitelists, *filters)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error comparing zip files: %v\n", err)
+ os.Exit(1)
+ }
+
+ fmt.Print(diff.String())
+
+ if len(diff.modified) > 0 || len(diff.onlyInA) > 0 || len(diff.onlyInB) > 0 {
+ fmt.Fprintln(os.Stderr, "differences found")
+ os.Exit(1)
+ }
+}
diff --git a/cmd/diff_target_files/glob.go b/cmd/diff_target_files/glob.go
new file mode 100644
index 0000000..ed91af7
--- /dev/null
+++ b/cmd/diff_target_files/glob.go
@@ -0,0 +1,81 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "errors"
+ "path/filepath"
+ "strings"
+)
+
+// Match returns true if name matches pattern using the same rules as filepath.Match, but supporting
+// recursive globs (**).
+func Match(pattern, name string) (bool, error) {
+ if filepath.Base(pattern) == "**" {
+ return false, errors.New("pattern has '**' as last path element")
+ }
+
+ patternDir := pattern[len(pattern)-1] == '/'
+ nameDir := name[len(name)-1] == '/'
+
+ if patternDir != nameDir {
+ return false, nil
+ }
+
+ if nameDir {
+ name = name[:len(name)-1]
+ pattern = pattern[:len(pattern)-1]
+ }
+
+ for {
+ var patternFile, nameFile string
+ pattern, patternFile = filepath.Dir(pattern), filepath.Base(pattern)
+
+ if patternFile == "**" {
+ if strings.Contains(pattern, "**") {
+ return false, errors.New("pattern contains multiple '**'")
+ }
+ // Test if the any prefix of name matches the part of the pattern before **
+ for {
+ if name == "." || name == "/" {
+ return name == pattern, nil
+ }
+ if match, err := filepath.Match(pattern, name); err != nil {
+ return false, err
+ } else if match {
+ return true, nil
+ }
+ name = filepath.Dir(name)
+ }
+ } else if strings.Contains(patternFile, "**") {
+ return false, errors.New("pattern contains other characters between '**' and path separator")
+ }
+
+ name, nameFile = filepath.Dir(name), filepath.Base(name)
+
+ if nameFile == "." && patternFile == "." {
+ return true, nil
+ } else if nameFile == "/" && patternFile == "/" {
+ return true, nil
+ } else if nameFile == "." || patternFile == "." || nameFile == "/" || patternFile == "/" {
+ return false, nil
+ }
+
+ match, err := filepath.Match(patternFile, nameFile)
+ if err != nil || !match {
+ return match, err
+ }
+ }
+}
diff --git a/cmd/diff_target_files/glob_test.go b/cmd/diff_target_files/glob_test.go
new file mode 100644
index 0000000..63df68d
--- /dev/null
+++ b/cmd/diff_target_files/glob_test.go
@@ -0,0 +1,158 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "testing"
+)
+
+func TestMatch(t *testing.T) {
+ testCases := []struct {
+ pattern, name string
+ match bool
+ }{
+ {"a/*", "b/", false},
+ {"a/*", "b/a", false},
+ {"a/*", "b/b/", false},
+ {"a/*", "b/b/c", false},
+ {"a/**/*", "b/", false},
+ {"a/**/*", "b/a", false},
+ {"a/**/*", "b/b/", false},
+ {"a/**/*", "b/b/c", false},
+
+ {"a/*", "a/", false},
+ {"a/*", "a/a", true},
+ {"a/*", "a/b/", false},
+ {"a/*", "a/b/c", false},
+
+ {"a/*/", "a/", false},
+ {"a/*/", "a/a", false},
+ {"a/*/", "a/b/", true},
+ {"a/*/", "a/b/c", false},
+
+ {"a/**/*", "a/", false},
+ {"a/**/*", "a/a", true},
+ {"a/**/*", "a/b/", false},
+ {"a/**/*", "a/b/c", true},
+
+ {"a/**/*/", "a/", false},
+ {"a/**/*/", "a/a", false},
+ {"a/**/*/", "a/b/", true},
+ {"a/**/*/", "a/b/c", false},
+
+ {"**/*", "a/", false},
+ {"**/*", "a/a", true},
+ {"**/*", "a/b/", false},
+ {"**/*", "a/b/c", true},
+
+ {"**/*/", "a/", true},
+ {"**/*/", "a/a", false},
+ {"**/*/", "a/b/", true},
+ {"**/*/", "a/b/c", false},
+
+ {`a/\*\*/\*`, `a/**/*`, true},
+ {`a/\*\*/\*`, `a/a/*`, false},
+ {`a/\*\*/\*`, `a/**/a`, false},
+ {`a/\*\*/\*`, `a/a/a`, false},
+
+ {`a/**/\*`, `a/**/*`, true},
+ {`a/**/\*`, `a/a/*`, true},
+ {`a/**/\*`, `a/**/a`, false},
+ {`a/**/\*`, `a/a/a`, false},
+
+ {`a/\*\*/*`, `a/**/*`, true},
+ {`a/\*\*/*`, `a/a/*`, false},
+ {`a/\*\*/*`, `a/**/a`, true},
+ {`a/\*\*/*`, `a/a/a`, false},
+
+ {`*/**/a`, `a/a/a`, true},
+ {`*/**/a`, `*/a/a`, true},
+ {`*/**/a`, `a/**/a`, true},
+ {`*/**/a`, `*/**/a`, true},
+
+ {`\*/\*\*/a`, `a/a/a`, false},
+ {`\*/\*\*/a`, `*/a/a`, false},
+ {`\*/\*\*/a`, `a/**/a`, false},
+ {`\*/\*\*/a`, `*/**/a`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/[a-c]`, `a/b`, true},
+ {`a/[abc]`, `a/b`, true},
+
+ {`a/\[abc]`, `a/b`, false},
+ {`a/\[abc]`, `a/[abc]`, true},
+
+ {`a/\[abc\]`, `a/b`, false},
+ {`a/\[abc\]`, `a/[abc]`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {"/a/*", "/a/", false},
+ {"/a/*", "/a/a", true},
+ {"/a/*", "/a/b/", false},
+ {"/a/*", "/a/b/c", false},
+
+ {"/a/*/", "/a/", false},
+ {"/a/*/", "/a/a", false},
+ {"/a/*/", "/a/b/", true},
+ {"/a/*/", "/a/b/c", false},
+
+ {"/a/**/*", "/a/", false},
+ {"/a/**/*", "/a/a", true},
+ {"/a/**/*", "/a/b/", false},
+ {"/a/**/*", "/a/b/c", true},
+
+ {"/**/*", "/a/", false},
+ {"/**/*", "/a/a", true},
+ {"/**/*", "/a/b/", false},
+ {"/**/*", "/a/b/c", true},
+
+ {"/**/*/", "/a/", true},
+ {"/**/*/", "/a/a", false},
+ {"/**/*/", "/a/b/", true},
+ {"/**/*/", "/a/b/c", false},
+
+ {`a`, `/a`, false},
+ {`/a`, `a`, false},
+ {`*`, `/a`, false},
+ {`/*`, `a`, false},
+ {`**/*`, `/a`, false},
+ {`/**/*`, `a`, false},
+ }
+
+ for _, test := range testCases {
+ t.Run(test.pattern+","+test.name, func(t *testing.T) {
+ match, err := Match(test.pattern, test.name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if match != test.match {
+ t.Errorf("want: %v, got %v", test.match, match)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/known_nondeterminism.whitelist b/cmd/diff_target_files/known_nondeterminism.whitelist
new file mode 100644
index 0000000..6d71403
--- /dev/null
+++ b/cmd/diff_target_files/known_nondeterminism.whitelist
@@ -0,0 +1,10 @@
+// List of files that are known to be non-deterministic, along with the
+// bug number to tracking fixing the non-determinism.
+[
+ {
+ "Paths": [
+ // b/120039850
+ "system/framework/oat/*/services.art"
+ ]
+ }
+]
diff --git a/cmd/diff_target_files/props.whitelist b/cmd/diff_target_files/props.whitelist
new file mode 100644
index 0000000..9245b8b
--- /dev/null
+++ b/cmd/diff_target_files/props.whitelist
@@ -0,0 +1,18 @@
+[
+ // Ignore date, version and hostname properties in build.prop and prop.default files.
+ {
+ "Paths": [
+ "**/build.prop",
+ "**/prop.default"
+ ],
+ "IgnoreMatchingLines": [
+ "ro\\..*build\\.date=.*",
+ "ro\\..*build\\.date\\.utc=.*",
+ "ro\\..*build\\.version\\.incremental=.*",
+ "ro\\..*build\\.fingerprint=.*",
+ "ro\\.build\\.display\\.id=.*",
+ "ro\\.build\\.description=.*",
+ "ro\\.build\\.host=.*"
+ ]
+ }
+]
\ No newline at end of file
diff --git a/cmd/diff_target_files/target_files.go b/cmd/diff_target_files/target_files.go
new file mode 100644
index 0000000..8705ca7
--- /dev/null
+++ b/cmd/diff_target_files/target_files.go
@@ -0,0 +1,86 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "fmt"
+ "strings"
+)
+
+const targetFilesPattern = "*-target_files-*.zip"
+
+var targetZipPartitions = []string{
+ "BOOT/RAMDISK/",
+ "BOOT/",
+ "DATA/",
+ "ODM/",
+ "OEM/",
+ "PRODUCT/",
+ "PRODUCT_SERVICES/",
+ "ROOT/",
+ "SYSTEM/",
+ "SYSTEM_OTHER/",
+ "VENDOR/",
+}
+
+var targetZipFilter = []string{
+ "IMAGES/",
+ "OTA/",
+ "META/",
+ "PREBUILT_IMAGES/",
+ "RADIO/",
+}
+
+func filterTargetZipFiles(files []*ZipArtifactFile, artifact string, patterns []string) ([]*ZipArtifactFile, error) {
+ var ret []*ZipArtifactFile
+outer:
+ for _, f := range files {
+ if f.FileInfo().IsDir() {
+ continue
+ }
+
+ if artifact == targetFilesPattern {
+ found := false
+ for _, p := range targetZipPartitions {
+ if strings.HasPrefix(f.Name, p) {
+ f.Name = strings.ToLower(p) + strings.TrimPrefix(f.Name, p)
+ found = true
+ }
+ }
+ for _, filter := range targetZipFilter {
+ if strings.HasPrefix(f.Name, filter) {
+ continue outer
+ }
+ }
+
+ if !found {
+ return nil, fmt.Errorf("unmatched prefix for %s", f.Name)
+ }
+ }
+
+ if patterns != nil {
+ for _, pattern := range patterns {
+ match, _ := Match(pattern, f.Name)
+ if match {
+ ret = append(ret, f)
+ }
+ }
+ } else {
+ ret = append(ret, f)
+ }
+ }
+
+ return ret, nil
+}
diff --git a/cmd/diff_target_files/whitelist.go b/cmd/diff_target_files/whitelist.go
new file mode 100644
index 0000000..f00fc1e
--- /dev/null
+++ b/cmd/diff_target_files/whitelist.go
@@ -0,0 +1,251 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/json"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "unicode"
+)
+
+type jsonWhitelist struct {
+ Paths []string
+ IgnoreMatchingLines []string
+}
+
+type whitelist struct {
+ path string
+ ignoreMatchingLines []string
+}
+
+func parseWhitelists(whitelists []string, whitelistFiles []string) ([]whitelist, error) {
+ var ret []whitelist
+
+ add := func(path string, ignoreMatchingLines []string) {
+ for _, x := range ret {
+ if x.path == path {
+ x.ignoreMatchingLines = append(x.ignoreMatchingLines, ignoreMatchingLines...)
+ return
+ }
+ }
+
+ ret = append(ret, whitelist{
+ path: path,
+ ignoreMatchingLines: ignoreMatchingLines,
+ })
+ }
+
+ for _, file := range whitelistFiles {
+ newWhitelists, err := parseWhitelistFile(file)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, w := range newWhitelists {
+ add(w.path, w.ignoreMatchingLines)
+ }
+ }
+
+ for _, s := range whitelists {
+ colon := strings.IndexRune(s, ':')
+ var ignoreMatchingLines []string
+ if colon >= 0 {
+ ignoreMatchingLines = []string{s[colon+1:]}
+ }
+ add(s, ignoreMatchingLines)
+ }
+
+ return ret, nil
+}
+
+func parseWhitelistFile(file string) ([]whitelist, error) {
+ r, err := os.Open(file)
+ if err != nil {
+ return nil, err
+ }
+ defer r.Close()
+
+ d := json.NewDecoder(newJSONCommentStripper(r))
+
+ var jsonWhitelists []jsonWhitelist
+
+ err = d.Decode(&jsonWhitelists)
+
+ var whitelists []whitelist
+ for _, w := range jsonWhitelists {
+ for _, p := range w.Paths {
+ whitelists = append(whitelists, whitelist{
+ path: p,
+ ignoreMatchingLines: w.IgnoreMatchingLines,
+ })
+ }
+ }
+
+ return whitelists, err
+}
+
+func filterModifiedPaths(l [][2]*ZipArtifactFile, whitelists []whitelist) ([][2]*ZipArtifactFile, error) {
+outer:
+ for i := 0; i < len(l); i++ {
+ for _, w := range whitelists {
+ if match, err := Match(w.path, l[i][0].Name); err != nil {
+ return l, err
+ } else if match {
+ if match, err := diffIgnoringMatchingLines(l[i][0], l[i][1], w.ignoreMatchingLines); err != nil {
+ return l, err
+ } else if match || len(w.ignoreMatchingLines) == 0 {
+ l = append(l[:i], l[i+1:]...)
+ i--
+ }
+ continue outer
+ }
+ }
+ }
+
+ if len(l) == 0 {
+ l = nil
+ }
+
+ return l, nil
+}
+
+func filterNewPaths(l []*ZipArtifactFile, whitelists []whitelist) ([]*ZipArtifactFile, error) {
+outer:
+ for i := 0; i < len(l); i++ {
+ for _, w := range whitelists {
+ if match, err := Match(w.path, l[i].Name); err != nil {
+ return l, err
+ } else if match && len(w.ignoreMatchingLines) == 0 {
+ l = append(l[:i], l[i+1:]...)
+ i--
+ }
+ continue outer
+ }
+ }
+
+ if len(l) == 0 {
+ l = nil
+ }
+
+ return l, nil
+}
+
+func diffIgnoringMatchingLines(a *ZipArtifactFile, b *ZipArtifactFile, ignoreMatchingLines []string) (match bool, err error) {
+ lineMatchesIgnores := func(b []byte) (bool, error) {
+ for _, m := range ignoreMatchingLines {
+ if match, err := regexp.Match(m, b); err != nil {
+ return false, err
+ } else if match {
+ return match, nil
+ }
+ }
+ return false, nil
+ }
+
+ filter := func(z *ZipArtifactFile) ([]byte, error) {
+ var ret []byte
+
+ r, err := z.Open()
+ if err != nil {
+ return nil, err
+ }
+ s := bufio.NewScanner(r)
+
+ for s.Scan() {
+ if match, err := lineMatchesIgnores(s.Bytes()); err != nil {
+ return nil, err
+ } else if !match {
+ ret = append(ret, "\n"...)
+ ret = append(ret, s.Bytes()...)
+ }
+ }
+
+ return ret, nil
+ }
+
+ bufA, err := filter(a)
+ if err != nil {
+ return false, err
+ }
+ bufB, err := filter(b)
+ if err != nil {
+ return false, err
+ }
+
+ return bytes.Compare(bufA, bufB) == 0, nil
+}
+
+func applyWhitelists(diff zipDiff, whitelists []whitelist) (zipDiff, error) {
+ var err error
+
+ diff.modified, err = filterModifiedPaths(diff.modified, whitelists)
+ if err != nil {
+ return diff, err
+ }
+ diff.onlyInA, err = filterNewPaths(diff.onlyInA, whitelists)
+ if err != nil {
+ return diff, err
+ }
+ diff.onlyInB, err = filterNewPaths(diff.onlyInB, whitelists)
+ if err != nil {
+ return diff, err
+ }
+
+ return diff, nil
+}
+
+func newJSONCommentStripper(r io.Reader) *jsonCommentStripper {
+ return &jsonCommentStripper{
+ r: bufio.NewReader(r),
+ }
+}
+
+type jsonCommentStripper struct {
+ r *bufio.Reader
+ b []byte
+ err error
+}
+
+func (j *jsonCommentStripper) Read(buf []byte) (int, error) {
+ for len(j.b) == 0 {
+ if j.err != nil {
+ return 0, j.err
+ }
+
+ j.b, j.err = j.r.ReadBytes('\n')
+
+ if isComment(j.b) {
+ j.b = nil
+ }
+ }
+
+ n := copy(buf, j.b)
+ j.b = j.b[n:]
+ return n, nil
+}
+
+var commentPrefix = []byte("//")
+
+func isComment(b []byte) bool {
+ for len(b) > 0 && unicode.IsSpace(rune(b[0])) {
+ b = b[1:]
+ }
+ return bytes.HasPrefix(b, commentPrefix)
+}
diff --git a/cmd/diff_target_files/whitelist_test.go b/cmd/diff_target_files/whitelist_test.go
new file mode 100644
index 0000000..4b19fdd
--- /dev/null
+++ b/cmd/diff_target_files/whitelist_test.go
@@ -0,0 +1,126 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "bytes"
+ "reflect"
+ "testing"
+)
+
+func bytesToZipArtifactFile(name string, data []byte) *ZipArtifactFile {
+ buf := &bytes.Buffer{}
+ w := zip.NewWriter(buf)
+ f, err := w.Create(name)
+ if err != nil {
+ panic(err)
+ }
+ _, err = f.Write(data)
+ if err != nil {
+ panic(err)
+ }
+
+ w.Close()
+
+ r, err := zip.NewReader(bytes.NewReader(buf.Bytes()), int64(buf.Len()))
+ if err != nil {
+ panic(err)
+ }
+
+ return &ZipArtifactFile{r.File[0]}
+}
+
+var f1a = bytesToZipArtifactFile("dir/f1", []byte(`
+a
+foo: bar
+c
+`))
+
+var f1b = bytesToZipArtifactFile("dir/f1", []byte(`
+a
+foo: baz
+c
+`))
+
+var f2 = bytesToZipArtifactFile("dir/f2", nil)
+
+func Test_applyWhitelists(t *testing.T) {
+ type args struct {
+ diff zipDiff
+ whitelists []whitelist
+ }
+ tests := []struct {
+ name string
+ args args
+ want zipDiff
+ wantErr bool
+ }{
+ {
+ name: "simple",
+ args: args{
+ diff: zipDiff{
+ onlyInA: []*ZipArtifactFile{f1a, f2},
+ },
+ whitelists: []whitelist{{path: "dir/f1"}},
+ },
+ want: zipDiff{
+ onlyInA: []*ZipArtifactFile{f2},
+ },
+ },
+ {
+ name: "glob",
+ args: args{
+ diff: zipDiff{
+ onlyInA: []*ZipArtifactFile{f1a, f2},
+ },
+ whitelists: []whitelist{{path: "dir/*"}},
+ },
+ want: zipDiff{},
+ },
+ {
+ name: "modified",
+ args: args{
+ diff: zipDiff{
+ modified: [][2]*ZipArtifactFile{{f1a, f1b}},
+ },
+ whitelists: []whitelist{{path: "dir/*"}},
+ },
+ want: zipDiff{},
+ },
+ {
+ name: "matching lines",
+ args: args{
+ diff: zipDiff{
+ modified: [][2]*ZipArtifactFile{{f1a, f1b}},
+ },
+ whitelists: []whitelist{{path: "dir/*", ignoreMatchingLines: []string{"foo: .*"}}},
+ },
+ want: zipDiff{},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, err := applyWhitelists(tt.args.diff, tt.args.whitelists)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("applyWhitelists() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("applyWhitelists() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/cmd/diff_target_files/zip_artifact.go b/cmd/diff_target_files/zip_artifact.go
new file mode 100644
index 0000000..08ce889
--- /dev/null
+++ b/cmd/diff_target_files/zip_artifact.go
@@ -0,0 +1,174 @@
+// Copyright 2019 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "archive/zip"
+ "context"
+ "fmt"
+ "hash/crc32"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+)
+
+// ZipArtifact represents a zip file that may be local or remote.
+type ZipArtifact interface {
+ // Files returns the list of files contained in the zip file.
+ Files() ([]*ZipArtifactFile, error)
+
+ // Close closes the zip file artifact.
+ Close()
+}
+
+// localZipArtifact is a handle to a local zip file artifact.
+type localZipArtifact struct {
+ zr *zip.ReadCloser
+ files []*ZipArtifactFile
+}
+
+// NewLocalZipArtifact returns a ZipArtifact for a local zip file..
+func NewLocalZipArtifact(name string) (ZipArtifact, error) {
+ zr, err := zip.OpenReader(name)
+ if err != nil {
+ return nil, err
+ }
+
+ var files []*ZipArtifactFile
+ for _, zf := range zr.File {
+ files = append(files, &ZipArtifactFile{zf})
+ }
+
+ return &localZipArtifact{
+ zr: zr,
+ files: files,
+ }, nil
+}
+
+// Files returns the list of files contained in the local zip file artifact.
+func (z *localZipArtifact) Files() ([]*ZipArtifactFile, error) {
+ return z.files, nil
+}
+
+// Close closes the buffered reader of the local zip file artifact.
+func (z *localZipArtifact) Close() {
+ z.zr.Close()
+}
+
+// ZipArtifactFile contains a zip.File handle to the data inside the remote *-target_files-*.zip
+// build artifact.
+type ZipArtifactFile struct {
+ *zip.File
+}
+
+// Extract begins extract a file from inside a ZipArtifact. It returns an
+// ExtractedZipArtifactFile handle.
+func (zf *ZipArtifactFile) Extract(ctx context.Context, dir string,
+ limiter chan bool) *ExtractedZipArtifactFile {
+
+ d := &ExtractedZipArtifactFile{
+ initCh: make(chan struct{}),
+ }
+
+ go func() {
+ defer close(d.initCh)
+ limiter <- true
+ defer func() { <-limiter }()
+
+ zr, err := zf.Open()
+ if err != nil {
+ d.err = err
+ return
+ }
+ defer zr.Close()
+
+ crc := crc32.NewIEEE()
+ r := io.TeeReader(zr, crc)
+
+ if filepath.Clean(zf.Name) != zf.Name {
+ d.err = fmt.Errorf("invalid filename %q", zf.Name)
+ return
+ }
+ path := filepath.Join(dir, zf.Name)
+
+ err = os.MkdirAll(filepath.Dir(path), 0777)
+ if err != nil {
+ d.err = err
+ return
+ }
+
+ err = os.Remove(path)
+ if err != nil && !os.IsNotExist(err) {
+ d.err = err
+ return
+ }
+
+ if zf.Mode().IsRegular() {
+ w, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, zf.Mode())
+ if err != nil {
+ d.err = err
+ return
+ }
+ defer w.Close()
+
+ _, err = io.Copy(w, r)
+ if err != nil {
+ d.err = err
+ return
+ }
+ } else if zf.Mode()&os.ModeSymlink != 0 {
+ target, err := ioutil.ReadAll(r)
+ if err != nil {
+ d.err = err
+ return
+ }
+
+ err = os.Symlink(string(target), path)
+ if err != nil {
+ d.err = err
+ return
+ }
+ } else {
+ d.err = fmt.Errorf("unknown mode %q", zf.Mode())
+ return
+ }
+
+ if crc.Sum32() != zf.CRC32 {
+ d.err = fmt.Errorf("crc mismatch for %v", zf.Name)
+ return
+ }
+
+ d.path = path
+ }()
+
+ return d
+}
+
+// ExtractedZipArtifactFile is a handle to a downloaded file from a remoteZipArtifact. The download
+// may still be in progress, and will be complete with Path() returns.
+type ExtractedZipArtifactFile struct {
+ initCh chan struct{}
+ err error
+
+ path string
+}
+
+// Path returns the path to the downloaded file and any errors that occurred during the download.
+// It will block until the download is complete.
+func (d *ExtractedZipArtifactFile) Path() (string, error) {
+ <-d.initCh
+ return d.path, d.err
+}
diff --git a/cmd/sbox/sbox.go b/cmd/sbox/sbox.go
index 0af1886..4167edb 100644
--- a/cmd/sbox/sbox.go
+++ b/cmd/sbox/sbox.go
@@ -24,6 +24,7 @@
"path"
"path/filepath"
"strings"
+ "time"
)
var (
@@ -265,6 +266,15 @@
if err != nil {
return err
}
+
+ // Update the timestamp of the output file in case the tool wrote an old timestamp (for example, tar can extract
+ // files with old timestamps).
+ now := time.Now()
+ err = os.Chtimes(tempPath, now, now)
+ if err != nil {
+ return err
+ }
+
err = os.Rename(tempPath, destPath)
if err != nil {
return err
diff --git a/java/hiddenapi_singleton.go b/java/hiddenapi_singleton.go
index 86531eb..139114b 100644
--- a/java/hiddenapi_singleton.go
+++ b/java/hiddenapi_singleton.go
@@ -236,6 +236,8 @@
android.PathForSource(ctx, "frameworks/base/config/hiddenapi-greylist-max-o.txt")).
FlagWithInput("--blacklist ",
android.PathForSource(ctx, "frameworks/base/config/hiddenapi-force-blacklist.txt")).
+ FlagWithInput("--greylist-packages ",
+ android.PathForSource(ctx, "frameworks/base/config/hiddenapi-greylist-packages.txt")).
FlagWithOutput("--output ", tempPath)
commitChangeForRestat(rule, tempPath, outputPath)
diff --git a/scripts/setup_go_workspace_for_soong.sh b/scripts/setup_go_workspace_for_soong.sh
index e2fb9fa..6374aae 100755
--- a/scripts/setup_go_workspace_for_soong.sh
+++ b/scripts/setup_go_workspace_for_soong.sh
@@ -1,7 +1,7 @@
#!/bin/bash
set -e
-# Copyright 2017 Google Inc. All rights reserved.
+# Copyright 2019 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,23 +15,174 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#mounts the components of soong into a directory structure that Go tools and editors expect
+# Mounts the components of soong into a directory structure that Go tools
+# and editors expect.
-#move to the script's directory
-cd "$(dirname $0)"
-SCRIPT_PATH="$PWD"
-#find the root of the Repo checkout
-cd "${SCRIPT_PATH}"/../../..
-ANDROID_PATH="${PWD}"
-OUTPUT_PATH="$(echo ${GOPATH} | sed 's/\:.*//')" #if GOPATH contains multiple paths, use the first one
-
-if [ -z "${OUTPUT_PATH}" ]; then
- echo "Error; could not determine the desired location at which to create a Go-compatible workspace. Please update GOPATH to specify the desired destination directory"
+#####################################################################
+# Print the message to stderr with the prefix ERROR and abort this
+# script.
+#####################################################################
+function log_FATAL() {
+ echo "ERROR:" "$*" >&2
exit 1
-fi
+}
-function confirm() {
+#####################################################################
+# Print the message to stderr with the prefix WARN
+#####################################################################
+function log_WARN() {
+ echo "WARN:" "$*" >&2
+}
+
+
+#####################################################################
+# Print the message with the prefix INFO.
+#####################################################################
+function log_INFO() {
+ echo "INFO:" "$*"
+}
+
+
+#####################################################################
+# Find the root project directory of this repo. This is done by
+# finding the directory of where this script lives and then go up one
+# directory to check the ".repo" directory exist. If not, keep going
+# up until we find the ".repo" file or we reached to the filesystem
+# root. Project root directory is printed to stdout.
+#####################################################################
+function root_dir() (
+ local dir
+ if ! dir="$("${readlink}" -e $(dirname "$0"))"; then
+ log_FATAL "failed to read the script's current directory."
+ fi
+
+ dir=${dir}/../../..
+ if ! dir="$("${readlink}" -e "${dir}")"; then
+ log_FATAL "Cannot find the root project directory"
+ fi
+
+ echo "${dir}"
+)
+
+
+#####################################################################
+# executes a shell command by printing out to the screen first and
+# then evaluating the command.
+#####################################################################
+function execute() {
+ echo "$@"
+ eval "$@"
+}
+
+
+#####################################################################
+# Returns the source directory of a passed in path from BIND_PATHS
+# array.
+#####################################################################
+function bind_path_src_dir() (
+ local -r bind_path="$1"
+ echo "${bind_path/%|*/}"
+)
+
+
+#####################################################################
+# Returns the destination directory of a passed in path from
+# BIND_PATHS array.
+#####################################################################
+function bind_path_dst_dir() (
+ local -r bind_path="$1"
+ echo "${bind_path/#*|}"
+)
+
+
+#####################################################################
+# Executes the bindfs command in linux. Expects $1 to be src
+# directory and $2 to be destination directory.
+#####################################################################
+function linux_bind_dir() (
+ execute bindfs "$1" "$2"
+)
+
+#####################################################################
+# Executes the fusermount -u command in linux. Expects $1 to be the
+# destination directory.
+#####################################################################
+function linux_unbind_dir() (
+ execute fusermount -u "$1"
+)
+
+#####################################################################
+# Executes the bindfs command in darwin. Expects $1 to be src
+# directory and $2 to be destination directory.
+#####################################################################
+function darwin_bind_dir() (
+ execute bindfs -o allow_recursion -n "$1" "$2"
+)
+
+
+#####################################################################
+# Execute the umount command in darwin to unbind a directory. Expects
+# $1 to be the destination directory
+#####################################################################
+function darwin_unbind_dir() (
+ execute umount -f "$1"
+)
+
+
+#####################################################################
+# Bind all the paths that are specified in the BIND_PATHS array.
+#####################################################################
+function bind_all() (
+ local src_dir
+ local dst_dir
+
+ for path in ${BIND_PATHS[@]}; do
+ src_dir=$(bind_path_src_dir "${path}")
+
+ dst_dir=$(bind_path_dst_dir "${path}")
+ mkdir -p "${dst_dir}"
+
+ "${bind_dir}" ${src_dir} "${dst_dir}"
+ done
+
+ echo
+ log_INFO "Created GOPATH-compatible directory structure at ${OUTPUT_PATH}."
+)
+
+
+#####################################################################
+# Unbind all the paths that are specified in the BIND_PATHS array.
+#####################################################################
+function unbind_all() (
+ local dst_dir
+ local exit_code=0
+
+ # need to go into reverse since several parent directory may have been
+ # first before the child one.
+ for (( i=${#BIND_PATHS[@]}-1; i>=0; i-- )); do
+ dst_dir=$(bind_path_dst_dir "${BIND_PATHS[$i]}")
+
+ # continue to unmount even one of them fails
+ if ! "${unbind_dir}" "${dst_dir}"; then
+ log_WARN "Failed to umount ${dst_dir}."
+ exit_code=1
+ fi
+ done
+
+ if [[ ${exit_code} -ne 0 ]]; then
+ exit ${exit_code}
+ fi
+
+ echo
+ log_INFO "Unmounted the GOPATH-compatible directory structure at ${OUTPUT_PATH}."
+)
+
+
+#####################################################################
+# Asks the user to create the GOPATH-compatible directory structure.
+#####################################################################
+function confirm() (
while true; do
echo "Will create GOPATH-compatible directory structure at ${OUTPUT_PATH}"
echo -n "Ok [Y/n]?"
@@ -42,48 +193,162 @@
if [ "${decision}" == "n" ]; then
return 1
else
- echo "Invalid choice ${decision}; choose either 'y' or 'n'"
+ log_WARN "Invalid choice ${decision}; choose either 'y' or 'n'"
fi
fi
done
+)
+
+
+#####################################################################
+# Help function.
+#####################################################################
+function help() (
+ cat <<EOF
+Mounts the components of soong into a directory structure that Go tools
+and editors expect.
+
+ --help
+ This help
+
+ --bind
+ Create the directory structure that Go tools and editors expect by
+ binding the one to aosp build directory.
+
+ --unbind
+ Reverse operation of bind.
+
+If no flags were specified, the --bind one is selected by default.
+EOF
+)
+
+
+#####################################################################
+# Parse the arguments passed in to this script.
+#####################################################################
+function parse_arguments() {
+ while [[ -n "$1" ]]; do
+ case "$1" in
+ --bind)
+ ACTION="bind"
+ shift
+ ;;
+ --unbind)
+ ACTION="unbind"
+ shift
+ ;;
+ --help )
+ help
+ shift
+ exit 0
+ ;;
+ *)
+ log_WARN "Unknown option: $1"
+ help
+ exit 1
+ ;;
+ esac
+ done
+
+ if [[ -z "${ACTION}" ]]; then
+ ACTION=bind
+ fi
}
-function bindAll() {
- bindOne "${ANDROID_PATH}/build/blueprint" "${OUTPUT_PATH}/src/github.com/google/blueprint"
- bindOne "${ANDROID_PATH}/build/soong" "${OUTPUT_PATH}/src/android/soong"
- bindOne "${ANDROID_PATH}/art/build" "${OUTPUT_PATH}/src/android/soong/art"
- bindOne "${ANDROID_PATH}/external/golang-protobuf" "${OUTPUT_PATH}/src/github.com/golang/protobuf"
- bindOne "${ANDROID_PATH}/external/llvm/soong" "${OUTPUT_PATH}/src/android/soong/llvm"
- bindOne "${ANDROID_PATH}/external/clang/soong" "${OUTPUT_PATH}/src/android/soong/clang"
- echo
- echo "Created GOPATH-compatible directory structure at ${OUTPUT_PATH}"
-}
+#####################################################################
+# Verifies that a list of required binaries are installed in the
+# host in order to run this script.
+#####################################################################
+function check_exec_existence() (
+ function check() {
+ if ! hash "$1" &>/dev/null; then
+ log_FATAL "missing $1"
+ fi
+ }
-function bindOne() {
- #causes $newPath to mirror $existingPath
- existingPath="$1"
- newPath="$2"
- mkdir -p "$newPath"
- case $(uname -s) in
+ local bins
+ case "${os_type}" in
Darwin)
- echoAndDo bindfs -o allow_recursion -n "${existingPath}" "${newPath}"
+ bins=("bindfs" "greadlink")
;;
Linux)
- echoAndDo bindfs "${existingPath}" "${newPath}"
+ bins=("bindfs" "fusermount")
;;
+ *)
+ log_FATAL "${os_type} is not a recognized system."
esac
+
+ for bin in "${bins[@]}"; do
+ check "${bin}"
+ done
+)
+
+
+function main() {
+ parse_arguments "$@"
+
+ check_exec_existence
+
+ if [[ "${ACTION}" == "bind" ]]; then
+ if confirm; then
+ echo
+ bind_all
+ else
+ echo "skipping due to user request"
+ exit 1
+ fi
+ else
+ echo
+ unbind_all
+ fi
}
-function echoAndDo() {
- echo "$@"
- eval "$@"
-}
+readonly os_type="$(uname -s)"
+case "${os_type}" in
+ Darwin)
+ bind_dir=darwin_bind_dir
+ unbind_dir=darwin_unbind_dir
+ readlink=greadlink
+ ;;
+ Linux)
+ bind_dir=linux_bind_dir
+ unbind_dir=linux_unbind_dir
+ readlink=readlink
+ ;;
+ *)
+ log_FATAL "${os_type} is not a recognized system."
+esac
+readonly bind_dir
+readonly unbind_dir
+readonly readlink
-if confirm; then
- echo
- bindAll
-else
- echo "skipping due to user request"
- exit 1
+
+if ! ANDROID_PATH="$(root_dir)"; then
+ log_FATAL "failed to find the root of the repo checkout"
fi
+readonly ANDROID_PATH
+
+#if GOPATH contains multiple paths, use the first one
+if ! OUTPUT_PATH="$(echo ${GOPATH} | sed 's/\:.*//')"; then
+ log_FATAL "failed to extract the first GOPATH environment variable"
+fi
+readonly OUTPUT_PATH
+if [ -z "${OUTPUT_PATH}" ]; then
+ log_FATAL "Could not determine the desired location at which to create a" \
+ "Go-compatible workspace. Please update GOPATH to specify the" \
+ "desired destination directory."
+fi
+
+# Below are the paths to bind from src to dst. The paths are separated by |
+# where the left side is the source and the right side is destination.
+readonly BIND_PATHS=(
+ "${ANDROID_PATH}/build/blueprint|${OUTPUT_PATH}/src/github.com/google/blueprint"
+ "${ANDROID_PATH}/build/soong|${OUTPUT_PATH}/src/android/soong"
+ "${ANDROID_PATH}/art/build|${OUTPUT_PATH}/src/android/soong/art"
+ "${ANDROID_PATH}/external/golang-protobuf|${OUTPUT_PATH}/src/github.com/golang/protobuf"
+ "${ANDROID_PATH}/external/llvm/soong|${OUTPUT_PATH}/src/android/soong/llvm"
+ "${ANDROID_PATH}/external/clang/soong|${OUTPUT_PATH}/src/android/soong/clang"
+)
+
+main "$@"