Merge "Move bazel from being a shell function to being a script at build/bazel/bin/bazel"
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 8fe5214..e325760 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -153,7 +153,6 @@
LOCAL_JAR_PROCESSOR_ARGS:=
LOCAL_JAVACFLAGS:=
LOCAL_JAVA_LANGUAGE_VERSION:=
-LOCAL_JAVA_LAYERS_FILE:=
LOCAL_JAVA_LIBRARIES:=
LOCAL_JAVA_RESOURCE_DIRS:=
LOCAL_JAVA_RESOURCE_FILES:=
diff --git a/core/definitions.mk b/core/definitions.mk
index 98fdd6c..afa7f7b 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2604,8 +2604,6 @@
$(if $(PRIVATE_SRCJARS),\@$(PRIVATE_SRCJAR_LIST_FILE)) \
|| ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
fi
-$(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/make/tools/java-layers.py \
- $(PRIVATE_JAVA_LAYERS_FILE) @$(PRIVATE_JAVA_SOURCE_LIST),)
$(if $(PRIVATE_JAR_EXCLUDE_FILES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
-name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
$(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 0f95202..89aa53c 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -56,10 +56,6 @@
include $(BUILD_SYSTEM)/java_common.mk
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-
# List of dependencies for anything that needs all java sources in place
java_sources_deps := \
$(java_sources) \
@@ -72,7 +68,6 @@
# TODO(b/143658984): goma can't handle the --system argument to javac.
#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
diff --git a/core/java.mk b/core/java.mk
index 01951c0..b13ef4d 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -200,10 +200,6 @@
$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_stubs_jar)))
ALL_MODULES.$(my_register_name).STUBS := $(full_classes_stubs_jar)
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
# Compile the java files to a .jar file.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index baa3d3a..21c0c10 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -19,8 +19,11 @@
ifndef board_info_txt
board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
endif
-$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) build/make/tools/check_radio_versions.py
- $(hide) build/make/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
+CHECK_RADIO_VERSIONS := $(HOST_OUT_EXECUTABLES)/check_radio_versions$(HOST_EXECUTABLE_SUFFIX)
+$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) $(CHECK_RADIO_VERSIONS)
+ $(hide) $(CHECK_RADIO_VERSIONS) \
+ --board_info_txt $(board_info_txt) \
+ --board_info_check $(BOARD_INFO_CHECK)
$(call pretty,"Generated: ($@)")
ifdef board_info_txt
$(hide) grep -v '#' $< > $@
diff --git a/tools/Android.bp b/tools/Android.bp
index bd326f1..f401058 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -54,3 +54,8 @@
name: "build-runfiles",
srcs: ["build-runfiles.cc"],
}
+
+python_binary_host {
+ name: "check_radio_versions",
+ srcs: ["check_radio_versions.py"],
+}
diff --git a/tools/check_radio_versions.py b/tools/check_radio_versions.py
index ebe621f..d1d50e6 100755
--- a/tools/check_radio_versions.py
+++ b/tools/check_radio_versions.py
@@ -22,11 +22,18 @@
except ImportError:
from sha import sha as sha1
-if len(sys.argv) < 2:
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--board_info_txt", nargs="?", required=True)
+parser.add_argument("--board_info_check", nargs="*", required=True)
+args = parser.parse_args()
+
+if not args.board_info_txt:
sys.exit(0)
build_info = {}
-f = open(sys.argv[1])
+f = open(args.board_info_txt)
for line in f:
line = line.strip()
if line.startswith("require"):
@@ -36,7 +43,7 @@
bad = False
-for item in sys.argv[2:]:
+for item in args.board_info_check:
key, fn = item.split(":", 1)
values = build_info.get(key, None)
@@ -52,8 +59,8 @@
try:
f = open(fn + ".sha1")
except IOError:
- if not bad: print
- print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)
+ if not bad: print()
+ print("*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key))
bad = True
continue
for line in f:
@@ -63,17 +70,17 @@
versions[h] = v
if digest not in versions:
- if not bad: print
- print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)
+ if not bad: print()
+ print("*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn))
bad = True
continue
if versions[digest] not in values:
- if not bad: print
- print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
- fn, versions[digest], key, sys.argv[1])
+ if not bad: print()
+ print("*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
+ fn, versions[digest], key, args.board_info_txt))
bad = True
if bad:
- print
+ print()
sys.exit(1)
diff --git a/tools/compliance/projectmetadata/projectmetadata.go b/tools/compliance/projectmetadata/projectmetadata.go
index b31413d..1861b47 100644
--- a/tools/compliance/projectmetadata/projectmetadata.go
+++ b/tools/compliance/projectmetadata/projectmetadata.go
@@ -40,11 +40,39 @@
project string
}
+// ProjectUrlMap maps url type name to url value
+type ProjectUrlMap map[string]string
+
+// DownloadUrl returns the address of a download location
+func (m ProjectUrlMap) DownloadUrl() string {
+ for _, urlType := range []string{"GIT", "SVN", "HG", "DARCS"} {
+ if url, ok := m[urlType]; ok {
+ return url
+ }
+ }
+ return ""
+}
+
// String returns a string representation of the metadata for error messages.
func (pm *ProjectMetadata) String() string {
return fmt.Sprintf("project: %q\n%s", pm.project, pm.proto.String())
}
+// ProjectName returns the name of the project.
+func (pm *ProjectMetadata) Name() string {
+ return pm.proto.GetName()
+}
+
+// ProjectVersion returns the version of the project if available.
+func (pm *ProjectMetadata) Version() string {
+ tp := pm.proto.GetThirdParty()
+ if tp != nil {
+ version := tp.GetVersion()
+ return version
+ }
+ return ""
+}
+
// VersionedName returns the name of the project including the version if any.
func (pm *ProjectMetadata) VersionedName() string {
name := pm.proto.GetName()
@@ -65,13 +93,34 @@
return pm.proto.GetDescription()
}
+// UrlsByTypeName returns a map of URLs by Type Name
+func (pm *ProjectMetadata) UrlsByTypeName() ProjectUrlMap {
+ tp := pm.proto.GetThirdParty()
+ if tp == nil {
+ return nil
+ }
+ if len(tp.Url) == 0 {
+ return nil
+ }
+ urls := make(ProjectUrlMap)
+
+ for _, url := range tp.Url {
+ uri := url.GetValue()
+ if uri == "" {
+ continue
+ }
+ urls[project_metadata_proto.URL_Type_name[int32(url.GetType())]] = uri
+ }
+ return urls
+}
+
// projectIndex describes a project to be read; after `wg.Wait()`, will contain either
// a `ProjectMetadata`, pm (can be nil even without error), or a non-nil `err`.
type projectIndex struct {
project string
- pm *ProjectMetadata
- err error
- done chan struct{}
+ pm *ProjectMetadata
+ err error
+ done chan struct{}
}
// finish marks the task to read the `projectIndex` completed.
diff --git a/tools/compliance/projectmetadata/projectmetadata_test.go b/tools/compliance/projectmetadata/projectmetadata_test.go
index 1e4256f..0af0cd7 100644
--- a/tools/compliance/projectmetadata/projectmetadata_test.go
+++ b/tools/compliance/projectmetadata/projectmetadata_test.go
@@ -19,6 +19,7 @@
"strings"
"testing"
+ "android/soong/compliance/project_metadata_proto"
"android/soong/tools/compliance/testfs"
)
@@ -40,8 +41,79 @@
// NO_NAME_0_1 represents a METADATA file with a description but no name
NO_NAME_0_1 = `description: "my library" third_party { version: "0.1" }`
+
+ // URL values per type
+ GIT_URL = "http://example.github.com/my_lib"
+ SVN_URL = "http://example.svn.com/my_lib"
+ HG_URL = "http://example.hg.com/my_lib"
+ DARCS_URL = "http://example.darcs.com/my_lib"
+ PIPER_URL = "http://google3/third_party/my/package"
+ HOMEPAGE_URL = "http://example.com/homepage"
+ OTHER_URL = "http://google.com/"
+ ARCHIVE_URL = "http://ftp.example.com/"
+ LOCAL_SOURCE_URL = "https://android.googlesource.com/platform/external/apache-http/"
)
+// libWithUrl returns a METADATA file with the right download url
+func libWithUrl(urlTypes ...string) string {
+ var sb strings.Builder
+
+ fmt.Fprintln(&sb, `name: "mylib" description: "my library"
+ third_party {
+ version: "1.0"`)
+
+ for _, urltype := range urlTypes {
+ var urlValue string
+ switch urltype {
+ case "GIT":
+ urlValue = GIT_URL
+ case "SVN":
+ urlValue = SVN_URL
+ case "HG":
+ urlValue = HG_URL
+ case "DARCS":
+ urlValue = DARCS_URL
+ case "PIPER":
+ urlValue = PIPER_URL
+ case "HOMEPAGE":
+ urlValue = HOMEPAGE_URL
+ case "OTHER":
+ urlValue = OTHER_URL
+ case "ARCHIVE":
+ urlValue = ARCHIVE_URL
+ case "LOCAL_SOURCE":
+ urlValue = LOCAL_SOURCE_URL
+ default:
+ panic(fmt.Errorf("unknown url type: %q. Please update libWithUrl() in build/make/tools/compliance/projectmetadata/projectmetadata_test.go", urltype))
+ }
+ fmt.Fprintf(&sb, " url { type: %s value: %q }\n", urltype, urlValue)
+ }
+ fmt.Fprintln(&sb, `}`)
+
+ return sb.String()
+}
+
+func TestVerifyAllUrlTypes(t *testing.T) {
+ t.Run("verifyAllUrlTypes", func(t *testing.T) {
+ types := make([]string, 0, len(project_metadata_proto.URL_Type_value))
+ for t := range project_metadata_proto.URL_Type_value {
+ types = append(types, t)
+ }
+ libWithUrl(types...)
+ })
+}
+
+func TestUnknownPanics(t *testing.T) {
+ t.Run("Unknown panics", func(t *testing.T) {
+ defer func() {
+ if r := recover(); r == nil {
+ t.Errorf("unexpected success: got no error, want panic")
+ }
+ }()
+ libWithUrl("SOME WILD VALUE THAT DOES NOT EXIST")
+ })
+}
+
func TestReadMetadataForProjects(t *testing.T) {
tests := []struct {
name string
@@ -56,7 +128,13 @@
"/a/METADATA": []byte("name: \"Android\"\n"),
},
projects: []string{"/a"},
- expected: []pmeta{{project: "/a", versionedName: "Android"}},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "Android",
+ name: "Android",
+ version: "",
+ downloadUrl: "",
+ }},
},
{
name: "versioned",
@@ -64,7 +142,237 @@
"/a/METADATA": []byte(MY_LIB_1_0),
},
projects: []string{"/a"},
- expected: []pmeta{{project: "/a", versionedName: "mylib_v_1.0"}},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_homepage",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_svn",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("SVN")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: SVN_URL,
+ }},
+ },
+ {
+ name: "lib_with_hg",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: HG_URL,
+ }},
+ },
+ {
+ name: "lib_with_darcs",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: DARCS_URL,
+ }},
+ },
+ {
+ name: "lib_with_piper",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("PIPER")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_other",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("OTHER")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_local_source",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("LOCAL_SOURCE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_archive",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("ARCHIVE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_all_downloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN", "GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_downloads_in_different_order",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "GIT", "SVN", "HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_but_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: SVN_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_but_git_and_svn",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("DARCS", "HG")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: HG_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads_and_git",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE", "GIT")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: GIT_URL,
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE")),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
+ },
+ {
+ name: "lib_with_all_nondownloads",
+ fs: &testfs.TestFS{
+ "/a/METADATA": []byte(libWithUrl()),
+ },
+ projects: []string{"/a"},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
},
{
name: "versioneddesc",
@@ -72,7 +380,13 @@
"/a/METADATA": []byte(NO_NAME_0_1),
},
projects: []string{"/a"},
- expected: []pmeta{{project: "/a", versionedName: "my library"}},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ }},
},
{
name: "unterminated",
@@ -91,9 +405,27 @@
},
projects: []string{"/a", "/b", "/c"},
expected: []pmeta{
- {project: "/a", versionedName: ""},
- {project: "/b", versionedName: "mylib_v_1.0"},
- {project: "/c", versionedName: "my library"},
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
},
},
{
@@ -104,8 +436,20 @@
},
projects: []string{"/a", "/b", "/c"},
expected: []pmeta{
- {project: "/a", versionedName: ""},
- {project: "/b", versionedName: "mylib_v_1.0"},
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
},
},
{
@@ -116,8 +460,20 @@
},
projects: []string{"/a", "/b", "/c"},
expected: []pmeta{
- {project: "/a", versionedName: ""},
- {project: "/c", versionedName: "my library"},
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
},
},
{
@@ -128,8 +484,20 @@
},
projects: []string{"/a", "/b", "/c"},
expected: []pmeta{
- {project: "/b", versionedName: "mylib_v_1.0"},
- {project: "/c", versionedName: "my library"},
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
},
},
{
@@ -170,7 +538,13 @@
"/a/METADATA": []byte(EMPTY),
},
projects: []string{"/a"},
- expected: []pmeta{{project: "/a", versionedName: ""}},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ }},
},
{
name: "emptyother",
@@ -191,7 +565,13 @@
"/a/METADATA.android": []byte(MY_LIB_1_0),
},
projects: []string{"/a"},
- expected: []pmeta{{project: "/a", versionedName: "mylib_v_1.0"}},
+ expected: []pmeta{{
+ project: "/a",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ }},
},
{
name: "enchilada",
@@ -203,9 +583,27 @@
},
projects: []string{"/a", "/b", "/c"},
expected: []pmeta{
- {project: "/a", versionedName: ""},
- {project: "/b", versionedName: "mylib_v_1.0"},
- {project: "/c", versionedName: "my library"},
+ {
+ project: "/a",
+ versionedName: "",
+ name: "",
+ version: "",
+ downloadUrl: "",
+ },
+ {
+ project: "/b",
+ versionedName: "mylib_v_1.0",
+ name: "mylib",
+ version: "1.0",
+ downloadUrl: "",
+ },
+ {
+ project: "/c",
+ versionedName: "my library",
+ name: "",
+ version: "0.1",
+ downloadUrl: "",
+ },
},
},
}
@@ -255,10 +653,13 @@
type pmeta struct {
project string
versionedName string
+ name string
+ version string
+ downloadUrl string
}
func (pm pmeta) String() string {
- return fmt.Sprintf("project: %q versionedName: %q\n", pm.project, pm.versionedName)
+ return fmt.Sprintf("project: %q versionedName: %q name: %q version: %q downloadUrl: %q\n", pm.project, pm.versionedName, pm.name, pm.version, pm.downloadUrl)
}
func (pm pmeta) equals(other *ProjectMetadata) bool {
@@ -268,6 +669,15 @@
if pm.versionedName != other.VersionedName() {
return false
}
+ if pm.name != other.Name() {
+ return false
+ }
+ if pm.version != other.Version() {
+ return false
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ return false
+ }
return true
}
@@ -283,6 +693,15 @@
if pm.versionedName != other.VersionedName() {
fmt.Fprintf(&sb, " versionedName: %q", other.VersionedName())
}
+ if pm.name != other.Name() {
+ fmt.Fprintf(&sb, " name: %q", other.Name())
+ }
+ if pm.version != other.Version() {
+ fmt.Fprintf(&sb, " version: %q", other.Version())
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ fmt.Fprintf(&sb, " downloadUrl: %q", other.UrlsByTypeName().DownloadUrl())
+ }
fmt.Fprintf(&sb, ", want")
if pm.project != other.project {
fmt.Fprintf(&sb, " project: %q", pm.project)
@@ -290,5 +709,14 @@
if pm.versionedName != other.VersionedName() {
fmt.Fprintf(&sb, " versionedName: %q", pm.versionedName)
}
+ if pm.name != other.Name() {
+ fmt.Fprintf(&sb, " name: %q", pm.name)
+ }
+ if pm.version != other.Version() {
+ fmt.Fprintf(&sb, " version: %q", pm.version)
+ }
+ if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+ fmt.Fprintf(&sb, " downloadUrl: %q", pm.downloadUrl)
+ }
return sb.String()
}
diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py
index ff40d51..a1b1197 100755
--- a/tools/fileslist_util.py
+++ b/tools/fileslist_util.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2016 The Android Open Source Project
#
@@ -15,7 +15,9 @@
# limitations under the License.
#
-import getopt, json, sys
+import argparse
+import json
+import sys
def PrintFileNames(path):
with open(path) as jf:
@@ -27,42 +29,25 @@
with open(path) as jf:
data = json.load(jf)
for line in data:
- print "{0:12d} {1}".format(line["Size"], line["Name"])
+ print(f"{line['Size']:12d} {line['Name']}")
-def PrintUsage(name):
- print("""
-Usage: %s -[nc] json_files_list
- -n produces list of files only
- -c produces classic installed-files.txt
-""" % (name))
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-n", action="store_true",
+ help="produces list of files only")
+ parser.add_argument("-c", action="store_true",
+ help="produces classic installed-files.txt")
+ parser.add_argument("json_files_list")
+ args = parser.parse_args()
-def main(argv):
- try:
- opts, args = getopt.getopt(argv[1:], "nc", "")
- except getopt.GetoptError, err:
- print(err)
- PrintUsage(argv[0])
- sys.exit(2)
-
- if len(opts) == 0:
- print("No conversion option specified")
- PrintUsage(argv[0])
- sys.exit(2)
-
- if len(args) == 0:
- print("No input file specified")
- PrintUsage(argv[0])
- sys.exit(2)
-
- for o, a in opts:
- if o == ("-n"):
- PrintFileNames(args[0])
- sys.exit()
- elif o == ("-c"):
- PrintCanonicalList(args[0])
- sys.exit()
- else:
- assert False, "Unsupported option"
+ if args.n and args.c:
+ sys.exit("Cannot specify both -n and -c")
+ elif args.n:
+ PrintFileNames(args.json_files_list)
+ elif args.c:
+ PrintCanonicalList(args.json_files_list)
+ else:
+ sys.exit("No conversion option specified")
if __name__ == '__main__':
- main(sys.argv)
+ main()
diff --git a/tools/findleaves.py b/tools/findleaves.py
index 97302e9..86f3f3a 100755
--- a/tools/findleaves.py
+++ b/tools/findleaves.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2009 The Android Open Source Project
#
@@ -121,7 +121,7 @@
results = list(set(perform_find(mindepth, prune, dirlist, filenames)))
results.sort()
for r in results:
- print r
+ print(r)
if __name__ == "__main__":
main(sys.argv)
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index d57893c..55fdca4 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -43,14 +43,6 @@
python_binary_host {
name: "fs_config_generator",
srcs: ["fs_config_generator.py"],
- version: {
- py2: {
- enabled: true,
- },
- py3: {
- enabled: false,
- },
- },
}
python_test_host {
@@ -60,14 +52,6 @@
"test_fs_config_generator.py",
"fs_config_generator.py",
],
- version: {
- py2: {
- enabled: true,
- },
- py3: {
- enabled: false,
- },
- },
}
target_fs_config_gen_filegroup {
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index cb1616a..44480b8 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
"""Generates config files for Android file system properties.
This script is used for generating configuration files for configuring
@@ -11,7 +11,7 @@
"""
import argparse
-import ConfigParser
+import configparser
import ctypes
import re
import sys
@@ -463,7 +463,7 @@
# No core AIDs should be within any oem range.
for aid in self._aid_value_to_name:
for ranges in self._ranges.values():
- if Utils.in_any_range(aid, ranges):
+ if Utils.in_any_range(int(aid, 0), ranges):
name = self._aid_value_to_name[aid]
raise ValueError(
'AID "%s" value: %u within reserved OEM Range: "%s"' %
@@ -569,7 +569,7 @@
# override previous
# sections.
- config = ConfigParser.ConfigParser()
+ config = configparser.ConfigParser()
config.read(file_name)
for section in config.sections():
@@ -613,7 +613,7 @@
ranges = None
- partitions = self._ranges.keys()
+ partitions = list(self._ranges.keys())
partitions.sort(key=len, reverse=True)
for partition in partitions:
if aid.friendly.startswith(partition):
@@ -1073,7 +1073,7 @@
user_binary = bytearray(ctypes.c_uint16(int(user, 0)))
group_binary = bytearray(ctypes.c_uint16(int(group, 0)))
caps_binary = bytearray(ctypes.c_uint64(caps_value))
- path_binary = ctypes.create_string_buffer(path,
+ path_binary = ctypes.create_string_buffer(path.encode(),
path_length_aligned_64).raw
out_file.write(length_binary)
@@ -1169,21 +1169,21 @@
hdr = AIDHeaderParser(args['hdrfile'])
max_name_length = max(len(aid.friendly) + 1 for aid in hdr.aids)
- print AIDArrayGen._GENERATED
- print
- print AIDArrayGen._INCLUDE
- print
- print AIDArrayGen._STRUCT_FS_CONFIG % max_name_length
- print
- print AIDArrayGen._OPEN_ID_ARRAY
+ print(AIDArrayGen._GENERATED)
+ print()
+ print(AIDArrayGen._INCLUDE)
+ print()
+ print(AIDArrayGen._STRUCT_FS_CONFIG % max_name_length)
+ print()
+ print(AIDArrayGen._OPEN_ID_ARRAY)
for aid in hdr.aids:
- print AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier)
+ print(AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier))
- print AIDArrayGen._CLOSE_FILE_STRUCT
- print
- print AIDArrayGen._COUNT
- print
+ print(AIDArrayGen._CLOSE_FILE_STRUCT)
+ print()
+ print(AIDArrayGen._COUNT)
+ print()
@generator('oemaid')
@@ -1225,15 +1225,15 @@
parser = FSConfigFileParser(args['fsconfig'], hdr_parser.ranges)
- print OEMAidGen._GENERATED
+ print(OEMAidGen._GENERATED)
- print OEMAidGen._FILE_IFNDEF_DEFINE
+ print(OEMAidGen._FILE_IFNDEF_DEFINE)
for aid in parser.aids:
self._print_aid(aid)
- print
+ print()
- print OEMAidGen._FILE_ENDIF
+ print(OEMAidGen._FILE_ENDIF)
def _print_aid(self, aid):
"""Prints a valid #define AID identifier to stdout.
@@ -1245,10 +1245,10 @@
# print the source file location of the AID
found_file = aid.found
if found_file != self._old_file:
- print OEMAidGen._FILE_COMMENT % found_file
+ print(OEMAidGen._FILE_COMMENT % found_file)
self._old_file = found_file
- print OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value)
+ print(OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value))
@generator('passwd')
@@ -1292,7 +1292,7 @@
return
aids_by_partition = {}
- partitions = hdr_parser.ranges.keys()
+ partitions = list(hdr_parser.ranges.keys())
partitions.sort(key=len, reverse=True)
for aid in aids:
@@ -1331,7 +1331,7 @@
except ValueError as exception:
sys.exit(exception)
- print "%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell)
+ print("%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell))
@generator('group')
@@ -1356,7 +1356,7 @@
except ValueError as exception:
sys.exit(exception)
- print "%s::%s:" % (logon, uid)
+ print("%s::%s:" % (logon, uid))
@generator('print')
@@ -1379,7 +1379,7 @@
aids.sort(key=lambda item: int(item.normalized_value))
for aid in aids:
- print '%s %s' % (aid.identifier, aid.normalized_value)
+ print('%s %s' % (aid.identifier, aid.normalized_value))
def main():
@@ -1393,7 +1393,7 @@
gens = generator.get()
# for each gen, instantiate and add them as an option
- for name, gen in gens.iteritems():
+ for name, gen in gens.items():
generator_option_parser = subparser.add_parser(name, help=gen.__doc__)
generator_option_parser.set_defaults(which=name)
diff --git a/tools/fs_config/test_fs_config_generator.py b/tools/fs_config/test_fs_config_generator.py
index 76ed8f4..cbf46a1 100755
--- a/tools/fs_config/test_fs_config_generator.py
+++ b/tools/fs_config/test_fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""Unit test suite for the fs_config_genertor.py tool."""
import tempfile
@@ -64,7 +64,7 @@
def test_aid_header_parser_good(self):
"""Test AID Header Parser good input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 1000
@@ -91,7 +91,7 @@
def test_aid_header_parser_good_unordered(self):
"""Test AID Header Parser good unordered input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 1000
@@ -118,7 +118,7 @@
def test_aid_header_parser_bad_aid(self):
"""Test AID Header Parser bad aid input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO "bad"
@@ -131,7 +131,7 @@
def test_aid_header_parser_bad_oem_range(self):
"""Test AID Header Parser bad oem range input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -145,7 +145,7 @@
def test_aid_header_parser_bad_oem_range_no_end(self):
"""Test AID Header Parser bad oem range (no end) input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -158,7 +158,7 @@
def test_aid_header_parser_bad_oem_range_no_start(self):
"""Test AID Header Parser bad oem range (no start) input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_END 2900
@@ -171,7 +171,7 @@
def test_aid_header_parser_bad_oem_range_duplicated(self):
"""Test AID Header Parser bad oem range (no start) input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2000
@@ -187,7 +187,7 @@
def test_aid_header_parser_bad_oem_range_mismatch_start_end(self):
"""Test AID Header Parser bad oem range mismatched input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_OEM_RESERVED_START 2900
@@ -201,7 +201,7 @@
def test_aid_header_parser_bad_duplicate_ranges(self):
"""Test AID Header Parser exits cleanly on duplicate AIDs"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_FOO 100
@@ -222,7 +222,7 @@
- https://android-review.googlesource.com/#/c/313169
"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
#define AID_APP 10000 /* TODO: switch users over to AID_APP_START */
@@ -257,7 +257,7 @@
def test_fs_config_file_parser_good(self):
"""Test FSConfig Parser good input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[/system/bin/file]
@@ -305,7 +305,7 @@
def test_fs_config_file_parser_bad(self):
"""Test FSConfig Parser bad input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[/system/bin/file]
@@ -319,7 +319,7 @@
def test_fs_config_file_parser_bad_aid_range(self):
"""Test FSConfig Parser bad aid range value input file"""
- with tempfile.NamedTemporaryFile() as temp_file:
+ with tempfile.NamedTemporaryFile(mode='w') as temp_file:
temp_file.write(
textwrap.dedent("""
[AID_OEM1]
diff --git a/tools/java-layers.py b/tools/java-layers.py
deleted file mode 100755
index b3aec2b..0000000
--- a/tools/java-layers.py
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import re
-import sys
-
-def fail_with_usage():
- sys.stderr.write("usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\n")
- sys.stderr.write("\n")
- sys.stderr.write("Enforces layering between java packages. Scans\n")
- sys.stderr.write("DIRECTORY and prints errors when the packages violate\n")
- sys.stderr.write("the rules defined in the DEPENDENCY_FILE.\n")
- sys.stderr.write("\n")
- sys.stderr.write("Prints a warning when an unknown package is encountered\n")
- sys.stderr.write("on the assumption that it should fit somewhere into the\n")
- sys.stderr.write("layering.\n")
- sys.stderr.write("\n")
- sys.stderr.write("DEPENDENCY_FILE format\n")
- sys.stderr.write(" - # starts comment\n")
- sys.stderr.write(" - Lines consisting of two java package names: The\n")
- sys.stderr.write(" first package listed must not contain any references\n")
- sys.stderr.write(" to any classes present in the second package, or any\n")
- sys.stderr.write(" of its dependencies.\n")
- sys.stderr.write(" - Lines consisting of one java package name: The\n")
- sys.stderr.write(" packge is assumed to be a high level package and\n")
- sys.stderr.write(" nothing may depend on it.\n")
- sys.stderr.write(" - Lines consisting of a dash (+) followed by one java\n")
- sys.stderr.write(" package name: The package is considered a low level\n")
- sys.stderr.write(" package and may not import any of the other packages\n")
- sys.stderr.write(" listed in the dependency file.\n")
- sys.stderr.write(" - Lines consisting of a plus (-) followed by one java\n")
- sys.stderr.write(" package name: The package is considered \'legacy\'\n")
- sys.stderr.write(" and excluded from errors.\n")
- sys.stderr.write("\n")
- sys.exit(1)
-
-class Dependency:
- def __init__(self, filename, lineno, lower, top, lowlevel, legacy):
- self.filename = filename
- self.lineno = lineno
- self.lower = lower
- self.top = top
- self.lowlevel = lowlevel
- self.legacy = legacy
- self.uppers = []
- self.transitive = set()
-
- def matches(self, imp):
- for d in self.transitive:
- if imp.startswith(d):
- return True
- return False
-
-class Dependencies:
- def __init__(self, deps):
- def recurse(obj, dep, visited):
- global err
- if dep in visited:
- sys.stderr.write("%s:%d: Circular dependency found:\n"
- % (dep.filename, dep.lineno))
- for v in visited:
- sys.stderr.write("%s:%d: Dependency: %s\n"
- % (v.filename, v.lineno, v.lower))
- err = True
- return
- visited.append(dep)
- for upper in dep.uppers:
- obj.transitive.add(upper)
- if upper in deps:
- recurse(obj, deps[upper], visited)
- self.deps = deps
- self.parts = [(dep.lower.split('.'),dep) for dep in deps.itervalues()]
- # transitive closure of dependencies
- for dep in deps.itervalues():
- recurse(dep, dep, [])
- # disallow everything from the low level components
- for dep in deps.itervalues():
- if dep.lowlevel:
- for d in deps.itervalues():
- if dep != d and not d.legacy:
- dep.transitive.add(d.lower)
- # disallow the 'top' components everywhere but in their own package
- for dep in deps.itervalues():
- if dep.top and not dep.legacy:
- for d in deps.itervalues():
- if dep != d and not d.legacy:
- d.transitive.add(dep.lower)
- for dep in deps.itervalues():
- dep.transitive = set([x+"." for x in dep.transitive])
- if False:
- for dep in deps.itervalues():
- print "-->", dep.lower, "-->", dep.transitive
-
- # Lookup the dep object for the given package. If pkg is a subpackage
- # of one with a rule, that one will be returned. If no matches are found,
- # None is returned.
- def lookup(self, pkg):
- # Returns the number of parts that match
- def compare_parts(parts, pkg):
- if len(parts) > len(pkg):
- return 0
- n = 0
- for i in range(0, len(parts)):
- if parts[i] != pkg[i]:
- return 0
- n = n + 1
- return n
- pkg = pkg.split(".")
- matched = 0
- result = None
- for (parts,dep) in self.parts:
- x = compare_parts(parts, pkg)
- if x > matched:
- matched = x
- result = dep
- return result
-
-def parse_dependency_file(filename):
- global err
- f = file(filename)
- lines = f.readlines()
- f.close()
- def lineno(s, i):
- i[0] = i[0] + 1
- return (i[0],s)
- n = [0]
- lines = [lineno(x,n) for x in lines]
- lines = [(n,s.split("#")[0].strip()) for (n,s) in lines]
- lines = [(n,s) for (n,s) in lines if len(s) > 0]
- lines = [(n,s.split()) for (n,s) in lines]
- deps = {}
- for n,words in lines:
- if len(words) == 1:
- lower = words[0]
- top = True
- legacy = False
- lowlevel = False
- if lower[0] == '+':
- lower = lower[1:]
- top = False
- lowlevel = True
- elif lower[0] == '-':
- lower = lower[1:]
- legacy = True
- if lower in deps:
- sys.stderr.write(("%s:%d: Package '%s' already defined on"
- + " line %d.\n") % (filename, n, lower, deps[lower].lineno))
- err = True
- else:
- deps[lower] = Dependency(filename, n, lower, top, lowlevel, legacy)
- elif len(words) == 2:
- lower = words[0]
- upper = words[1]
- if lower in deps:
- dep = deps[lower]
- if dep.top:
- sys.stderr.write(("%s:%d: Can't add dependency to top level package "
- + "'%s'\n") % (filename, n, lower))
- err = True
- else:
- dep = Dependency(filename, n, lower, False, False, False)
- deps[lower] = dep
- dep.uppers.append(upper)
- else:
- sys.stderr.write("%s:%d: Too many words on line starting at \'%s\'\n" % (
- filename, n, words[2]))
- err = True
- return Dependencies(deps)
-
-def find_java_files(srcs):
- result = []
- for d in srcs:
- if d[0] == '@':
- f = file(d[1:])
- result.extend([fn for fn in [s.strip() for s in f.readlines()]
- if len(fn) != 0])
- f.close()
- else:
- for root, dirs, files in os.walk(d):
- result.extend([os.sep.join((root,f)) for f in files
- if f.lower().endswith(".java")])
- return result
-
-COMMENTS = re.compile("//.*?\n|/\*.*?\*/", re.S)
-PACKAGE = re.compile("package\s+(.*)")
-IMPORT = re.compile("import\s+(.*)")
-
-def examine_java_file(deps, filename):
- global err
- # Yes, this is a crappy java parser. Write a better one if you want to.
- f = file(filename)
- text = f.read()
- f.close()
- text = COMMENTS.sub("", text)
- index = text.find("{")
- if index < 0:
- sys.stderr.write(("%s: Error: Unable to parse java. Can't find class "
- + "declaration.\n") % filename)
- err = True
- return
- text = text[0:index]
- statements = [s.strip() for s in text.split(";")]
- # First comes the package declaration. Then iterate while we see import
- # statements. Anything else is either bad syntax that we don't care about
- # because the compiler will fail, or the beginning of the class declaration.
- m = PACKAGE.match(statements[0])
- if not m:
- sys.stderr.write(("%s: Error: Unable to parse java. Missing package "
- + "statement.\n") % filename)
- err = True
- return
- pkg = m.group(1)
- imports = []
- for statement in statements[1:]:
- m = IMPORT.match(statement)
- if not m:
- break
- imports.append(m.group(1))
- # Do the checking
- if False:
- print filename
- print "'%s' --> %s" % (pkg, imports)
- dep = deps.lookup(pkg)
- if not dep:
- sys.stderr.write(("%s: Error: Package does not appear in dependency file: "
- + "%s\n") % (filename, pkg))
- err = True
- return
- for imp in imports:
- if dep.matches(imp):
- sys.stderr.write("%s: Illegal import in package '%s' of '%s'\n"
- % (filename, pkg, imp))
- err = True
-
-err = False
-
-def main(argv):
- if len(argv) < 3:
- fail_with_usage()
- deps = parse_dependency_file(argv[1])
-
- if err:
- sys.exit(1)
-
- java = find_java_files(argv[2:])
- for filename in java:
- examine_java_file(deps, filename)
-
- if err:
- sys.stderr.write("%s: Using this file as dependency file.\n" % argv[1])
- sys.exit(1)
-
- sys.exit(0)
-
-if __name__ == "__main__":
- main(sys.argv)
-
diff --git a/tools/parsedeps.py b/tools/parsedeps.py
deleted file mode 100755
index 32d8ad7..0000000
--- a/tools/parsedeps.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python
-# vim: ts=2 sw=2
-
-import optparse
-import re
-import sys
-
-
-class Dependency:
- def __init__(self, tgt):
- self.tgt = tgt
- self.pos = ""
- self.prereqs = set()
- self.visit = 0
-
- def add(self, prereq):
- self.prereqs.add(prereq)
-
-
-class Dependencies:
- def __init__(self):
- self.lines = {}
- self.__visit = 0
- self.count = 0
-
- def add(self, tgt, prereq):
- t = self.lines.get(tgt)
- if not t:
- t = Dependency(tgt)
- self.lines[tgt] = t
- p = self.lines.get(prereq)
- if not p:
- p = Dependency(prereq)
- self.lines[prereq] = p
- t.add(p)
- self.count = self.count + 1
-
- def setPos(self, tgt, pos):
- t = self.lines.get(tgt)
- if not t:
- t = Dependency(tgt)
- self.lines[tgt] = t
- t.pos = pos
-
- def get(self, tgt):
- if self.lines.has_key(tgt):
- return self.lines[tgt]
- else:
- return None
-
- def __iter__(self):
- return self.lines.iteritems()
-
- def trace(self, tgt, prereq):
- self.__visit = self.__visit + 1
- d = self.lines.get(tgt)
- if not d:
- return
- return self.__trace(d, prereq)
-
- def __trace(self, d, prereq):
- if d.visit == self.__visit:
- return d.trace
- if d.tgt == prereq:
- return [ [ d ], ]
- d.visit = self.__visit
- result = []
- for pre in d.prereqs:
- recursed = self.__trace(pre, prereq)
- for r in recursed:
- result.append([ d ] + r)
- d.trace = result
- return result
-
-def help():
- print "Commands:"
- print " dep TARGET Print the prerequisites for TARGET"
- print " trace TARGET PREREQ Print the paths from TARGET to PREREQ"
-
-
-def main(argv):
- opts = optparse.OptionParser()
- opts.add_option("-i", "--interactive", action="store_true", dest="interactive",
- help="Interactive mode")
- (options, args) = opts.parse_args()
-
- deps = Dependencies()
-
- filename = args[0]
- print "Reading %s" % filename
-
- if True:
- f = open(filename)
- for line in f:
- line = line.strip()
- if len(line) > 0:
- if line[0] == '#':
- pos,tgt = line.rsplit(":", 1)
- pos = pos[1:].strip()
- tgt = tgt.strip()
- deps.setPos(tgt, pos)
- else:
- (tgt,prereq) = line.split(':', 1)
- tgt = tgt.strip()
- prereq = prereq.strip()
- deps.add(tgt, prereq)
- f.close()
-
- print "Read %d dependencies. %d targets." % (deps.count, len(deps.lines))
- while True:
- line = raw_input("target> ")
- if not line.strip():
- continue
- split = line.split()
- cmd = split[0]
- if len(split) == 2 and cmd == "dep":
- tgt = split[1]
- d = deps.get(tgt)
- if d:
- for prereq in d.prereqs:
- print prereq.tgt
- elif len(split) == 3 and cmd == "trace":
- tgt = split[1]
- prereq = split[2]
- if False:
- print "from %s to %s" % (tgt, prereq)
- trace = deps.trace(tgt, prereq)
- if trace:
- width = 0
- for g in trace:
- for t in g:
- if len(t.tgt) > width:
- width = len(t.tgt)
- for g in trace:
- for t in g:
- if t.pos:
- print t.tgt, " " * (width-len(t.tgt)), " #", t.pos
- else:
- print t.tgt
- print
- else:
- help()
-
-if __name__ == "__main__":
- try:
- main(sys.argv)
- except KeyboardInterrupt:
- print
- except EOFError:
- print
-
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3c5ba10..715802f 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -723,7 +723,16 @@
with open(tmp_file, 'wb') as f:
f.write(input_file.read(fn))
return tmp_file
+ elif zipfile.is_zipfile(input_file):
+ with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+ tmp_file = MakeTempFile(os.path.basename(fn))
+ with open(tmp_file, "wb") as fp:
+ fp.write(zfp.read(fn))
+ return tmp_file
else:
+ if not os.path.isdir(input_file):
+ raise ValueError(
+ "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
file = os.path.join(input_file, *fn.split("/"))
if not os.path.exists(file):
raise KeyError(fn)