Merge "Teach mgrep to find soong/*.go files."
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 88f9172..b955e25 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -417,6 +417,9 @@
# Soong module variant change, remove obsolete intermediates
$(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
+# Version checking moving to Soong
+$(call add-clean-step, rm -rf $(OUT_DIR)/versions_checked.mk)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/core/Makefile b/core/Makefile
index e20a5a2..0b63915 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -902,6 +902,7 @@
$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM)" >> $(1))
$(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
@@ -2274,6 +2275,10 @@
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+ $(hide) mkdir -p $(zip_root)/IMAGES
+ $(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
@# Run fs_config on all the system, vendor, boot ramdisk,
@# and recovery ramdisk files in the zip, and save the output
$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 2157517..c327d2c 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -248,6 +248,8 @@
###########################################################
include $(BUILD_SYSTEM)/configure_module_stem.mk
+LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem)
+
# OVERRIDE_BUILT_MODULE_PATH is only allowed to be used by the
# internal SHARED_LIBRARIES build files.
OVERRIDE_BUILT_MODULE_PATH := $(strip $(OVERRIDE_BUILT_MODULE_PATH))
@@ -255,11 +257,8 @@
ifneq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
$(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
endif
- built_module_path := $(OVERRIDE_BUILT_MODULE_PATH)
-else
- built_module_path := $(intermediates)
+ $(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE),$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)))
endif
-LOCAL_BUILT_MODULE := $(built_module_path)/$(my_built_module_stem)
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
# Apk and its attachments reside in its own subdir.
@@ -295,6 +294,11 @@
.KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
# Build .toc file when using mm, mma, or make $(my_register_name)
$(my_all_targets): $(LOCAL_BUILT_MODULE).toc
+
+ifdef OVERRIDE_BUILT_MODULE_PATH
+$(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE).toc,$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc))
+$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc: $(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)
+endif
endif
###########################################################
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index fa89758..b7109f6 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -14,7 +14,7 @@
#
# Don't bother with the cleanspecs if you are running mm/mmm
-ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother),)
+ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother)$(NO_ANDROID_CLEANSPEC),)
INTERNAL_CLEAN_STEPS :=
@@ -142,53 +142,7 @@
INTERNAL_CLEAN_STEPS :=
INTERNAL_CLEAN_BUILD_VERSION :=
-endif # if not ONE_SHOT_MAKEFILE dont_bother
-
-# Since products and build variants (unfortunately) share the same
-# PRODUCT_OUT staging directory, things can get out of sync if different
-# build configurations are built in the same tree. The following logic
-# will notice when the configuration has changed and remove the files
-# necessary to keep things consistent.
-
-previous_build_config_file := $(PRODUCT_OUT)/previous_build_config.mk
-current_build_config_file := $(PRODUCT_OUT)/current_build_config.mk
-
-current_build_config := \
- $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-force_installclean := false
-
-# Read the current state from the file, if present.
-# Will set PREVIOUS_BUILD_CONFIG.
-#
-PREVIOUS_BUILD_CONFIG :=
--include $(previous_build_config_file)
-PREVIOUS_BUILD_CONFIG := $(strip $(PREVIOUS_BUILD_CONFIG))
-
-ifdef PREVIOUS_BUILD_CONFIG
- ifneq ($(current_build_config),$(PREVIOUS_BUILD_CONFIG))
- $(info *** Build configuration changed: "$(PREVIOUS_BUILD_CONFIG)" -> "$(current_build_config)")
- ifneq ($(DISABLE_AUTO_INSTALLCLEAN),true)
- force_installclean := true
- else
- $(info DISABLE_AUTO_INSTALLCLEAN is set; skipping auto-clean. Your tree may be in an inconsistent state.)
- endif
- endif
-endif # else, this is the first build, so no need to clean.
-
-# Write the new state to the file.
-#
-$(shell \
- mkdir -p $(dir $(current_build_config_file)) && \
- echo "PREVIOUS_BUILD_CONFIG := $(current_build_config)" > \
- $(current_build_config_file) \
- )
-$(shell cmp $(current_build_config_file) $(previous_build_config_file) > /dev/null 2>&1 || \
- mv -f $(current_build_config_file) $(previous_build_config_file))
-
-PREVIOUS_BUILD_CONFIG :=
-previous_build_config_file :=
-current_build_config_file :=
-current_build_config :=
+endif # if not ONE_SHOT_MAKEFILE dont_bother NO_ANDROID_CLEANSPEC
#
# installclean logic
@@ -272,14 +226,6 @@
$(hide) rm -rf $(FILES)
@echo "Deleted images and staging directories."
-ifeq ($(force_installclean),true)
- $(info *** Forcing "make installclean"...)
- $(info *** rm -rf $(dataclean_files) $(installclean_files))
- $(shell rm -rf $(dataclean_files) $(installclean_files))
- $(info *** Done with the cleaning, now starting the real build.)
-endif
-force_installclean :=
-
###########################################################
.PHONY: clean-jack-files
diff --git a/core/config.mk b/core/config.mk
index 583909d..badb9b1 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -492,7 +492,16 @@
BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
+#TODO: use a smaller -Xmx value for most libraries;
+# only core.jar and framework.jar need a heap this big.
+ifndef DX_ALT_JAR
DX := $(HOST_OUT_EXECUTABLES)/dx
+DX_COMMAND := $(DX) -JXms16M -JXmx2048M
+else
+DX := $(DX_ALT_JAR)
+DX_COMMAND := java -Xms16M -Xmx2048M -jar $(DX)
+endif
+
MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
@@ -847,7 +856,7 @@
# These goals don't need to collect and include Android.mks/CleanSpec.mks
# in the source tree.
-dont_bother_goals := clean clobber dataclean installclean \
+dont_bother_goals := dataclean installclean \
help out \
snod systemimage-nodeps \
stnod systemtarball-nodeps \
diff --git a/core/definitions.mk b/core/definitions.mk
index 64bdfe6..804f2c3 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2546,14 +2546,11 @@
endef
-#TODO: use a smaller -Xmx value for most libraries;
-# only core.jar and framework.jar need a heap this big.
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
@mkdir -p $(dir $@)
$(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX) \
- -JXms16M -JXmx2048M \
+$(hide) $(DX_COMMAND) \
--dex --output=$(dir $@) \
--min-sdk-version=$(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
$(if $(NO_OPTIMIZE_DX), \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index ba9d7eb..a734cc7 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -187,6 +187,12 @@
TARGET_COPY_OUT_ODM := odm
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
+
+# Returns the non-sanitized version of the path provided in $1.
+define get_non_asan_path
+$(patsubst $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/%,$(PRODUCT_OUT)/%,$1)
+endef
+
###########################################
# Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
# we don't know if the device wants to build a separate vendor.img
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 35dece4..d30c90d 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -76,9 +76,11 @@
$(proto_java_sources_file_stamp) \
$(annotation_processor_deps) \
$(NORMALIZE_PATH) \
+ $(ZIPTIME) \
$(LOCAL_ADDITIONAL_DEPENDENCIES) \
| $(SOONG_JAVAC_WRAPPER)
$(transform-host-java-to-package)
+ $(remove-timestamps-from-package)
javac-check : $(full_classes_compiled_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 3898dc90..0e92153 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -54,7 +54,8 @@
# The jni libaries will be installed to the system.img.
my_jni_filenames := $(notdir $(my_jni_shared_libraries))
# Make sure the JNI libraries get installed
-my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
+my_shared_library_path := $(call get_non_asan_path,\
+ $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
# Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
diff --git a/core/java.mk b/core/java.mk
index ed05484..0a60b69 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -609,7 +609,7 @@
endif
# If not using jack and building against the current SDK version then filter
-# out junit and android.test classes from the application that are to be
+# out the junit, android.test and c.a.i.u.Predicate classes that are to be
# removed from the Android API as part of b/30188076 but which are still
# present in the Android API. This is to allow changes to be made to the
# build to statically include those classes into the application without
@@ -618,7 +618,7 @@
ifndef LOCAL_JACK_ENABLED
ifdef LOCAL_SDK_VERSION
ifeq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**)
+proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**,!com/android/internal/util/*)
endif
endif
endif
diff --git a/core/main.mk b/core/main.mk
index 6089cec..f22e63d 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -88,146 +88,6 @@
# Include the google-specific config
-include vendor/google/build/config.mk
-VERSION_CHECK_SEQUENCE_NUMBER := 6
-JAVA_NOT_REQUIRED_CHECKED :=
--include $(OUT_DIR)/versions_checked.mk
-ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER)$(JAVA_NOT_REQUIRED),$(VERSIONS_CHECKED)$(JAVA_NOT_REQUIRED_CHECKED))
-
-$(info Checking build tools versions...)
-
-# check for a case sensitive file system
-ifneq (a,$(shell mkdir -p $(OUT_DIR) ; \
- echo a > $(OUT_DIR)/casecheck.txt; \
- echo B > $(OUT_DIR)/CaseCheck.txt; \
- cat $(OUT_DIR)/casecheck.txt))
-$(warning ************************************************************)
-$(warning You are building on a case-insensitive filesystem.)
-$(warning Please move your source tree to a case-sensitive filesystem.)
-$(warning ************************************************************)
-$(error Case-insensitive filesystems not supported)
-endif
-
-# Make sure that there are no spaces in the absolute path; the
-# build system can't deal with them.
-ifneq ($(words $(shell pwd)),1)
-$(warning ************************************************************)
-$(warning You are building in a directory whose absolute path contains)
-$(warning a space character:)
-$(warning $(space))
-$(warning "$(shell pwd)")
-$(warning $(space))
-$(warning Please move your source tree to a path that does not contain)
-$(warning any spaces.)
-$(warning ************************************************************)
-$(error Directory names containing spaces not supported)
-endif
-
-ifneq ($(JAVA_NOT_REQUIRED),true)
-java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
-javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
-
-# Check for the correct version of java, should be 1.8 by
-# default and only 1.7 if LEGACY_USE_JAVA7 is set.
-ifeq ($(LEGACY_USE_JAVA7),) # if LEGACY_USE_JAVA7 == ''
-required_version := "1.8.x"
-required_javac_version := "1.8"
-java_version := $(shell echo '$(java_version_str)' | grep '[ "]1\.8[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.8[\. "$$]')
-else
-required_version := "1.7.x"
-required_javac_version := "1.7"
-java_version := $(shell echo '$(java_version_str)' | grep '^java .*[ "]1\.7[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.7[\. "$$]')
-endif # if LEGACY_USE_JAVA7 == ''
-
-ifeq ($(strip $(java_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of java.)
-$(info $(space))
-$(info Your version is: $(java_version_str).)
-$(info The required version is: $(required_version))
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/initializing.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-# Check for the current JDK.
-#
-# For Java 1.7/1.8, we require OpenJDK on linux and Oracle JDK on Mac OS.
-requires_openjdk := false
-ifeq ($(BUILD_OS),linux)
-requires_openjdk := true
-endif
-
-
-# Check for the current jdk
-ifeq ($(requires_openjdk), true)
-# The user asked for openjdk, so check that the host
-# java version is really openjdk and not some other JDK.
-ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You asked for an OpenJDK based build but your version is)
-$(info $(java_version_str).)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not OpenJdk
-else # if requires_openjdk
-ifneq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info You use OpenJDK but only Sun/Oracle JDK is supported.)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not Sun Oracle JDK
-endif # if requires_openjdk
-
-KNOWN_INCOMPATIBLE_JAVAC_VERSIONS := google
-incompat_javac := $(foreach v,$(KNOWN_INCOMPATIBLE_JAVAC_VERSIONS),$(findstring $(v),$(javac_version_str)))
-ifneq ($(incompat_javac),)
-javac_version :=
-endif
-
-# Check for the correct version of javac
-ifeq ($(strip $(javac_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of javac.)
-$(info $(space))
-$(info Your version is: $(javac_version_str).)
-ifneq ($(incompat_javac),)
-$(info This '$(incompat_javac)' version is not supported for Android platform builds.)
-$(info Use a publicly available JDK and make sure you have run envsetup.sh / lunch.)
-else
-$(info The required version is: $(required_javac_version))
-endif
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-endif # if JAVA_NOT_REQUIRED
-
-ifndef BUILD_EMULATOR
- # Emulator binaries are now provided under prebuilts/android-emulator/
- BUILD_EMULATOR := false
-endif
-
-$(shell echo 'VERSIONS_CHECKED := $(VERSION_CHECK_SEQUENCE_NUMBER)' \
- > $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'BUILD_EMULATOR ?= $(BUILD_EMULATOR)' \
- >> $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'JAVA_NOT_REQUIRED_CHECKED := $(JAVA_NOT_REQUIRED)' \
- >> $(OUT_DIR)/versions_checked.mk)
-endif
-
# These are the modifier targets that don't do anything themselves, but
# change the behavior of the build.
# (must be defined before including definitions.make)
@@ -1270,14 +1130,6 @@
.PHONY: findbugs
findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
-.PHONY: clean
-clean:
- @rm -rf $(OUT_DIR)/*
- @echo "Entire build directory removed."
-
-.PHONY: clobber
-clobber: clean
-
# The rules for dataclean and installclean are defined in cleanbuild.mk.
#xxx scrape this from ALL_MODULE_NAME_TAGS
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 6b07be6..6255ef1 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -636,7 +636,7 @@
# LOCAL_PACKAGE_SPLITS is a list of resource labels.
# aapt will convert comma inside resource lable to underscore in the file names.
my_split_suffixes := $(subst $(comma),_,$(LOCAL_PACKAGE_SPLITS))
-built_apk_splits := $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk)
+built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
# The splits should have been built in the same command building the base apk.
@@ -646,7 +646,7 @@
# That way the build system will rerun the aapt after the user changes the splitting parameters.
$(built_apk_splits): PRIVATE_PRIVATE_KEY := $(private_key)
$(built_apk_splits): PRIVATE_CERTIFICATE := $(certificate)
-$(built_apk_splits) : $(built_module_path)/%.apk : $(LOCAL_BUILT_MODULE)
+$(built_apk_splits) : $(intermediates)/%.apk : $(LOCAL_BUILT_MODULE)
$(hide) if [ ! -f $@ ]; then \
echo 'No $@ generated, check your apk splitting parameters.' 1>&2; \
rm $<; exit 1; \
@@ -654,14 +654,14 @@
$(sign-package)
# Rules to install the splits
-$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(intermediates)/package_%.apk
@echo "Install: $@"
$(copy-file-to-new-target)
# Register the additional built and installed files.
ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
- $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+ $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
# Make sure to install the splits when you run "make <module_name>".
$(my_all_targets): $(installed_apk_splits)
@@ -671,7 +671,7 @@
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
$(foreach s,$(my_split_suffixes),\
- $(built_module_path)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+ $(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
$(call create-suite-dependencies)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 741f0eb..d2b88fa 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -370,7 +370,7 @@
## Install split apks.
ifdef LOCAL_PACKAGE_SPLITS
# LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
-built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
# Rules to sign the split apks.
@@ -383,19 +383,19 @@
$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk
+$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
$(copy-file-to-new-target)
$(sign-package)
# Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
@echo "Install: $@"
$(copy-file-to-new-target)
# Register the additional built and installed files.
ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
- $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(built_module_path)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+ $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
# Make sure to install the splits when you run "make <module_name>".
$(my_all_targets): $(installed_apk_splits)
diff --git a/core/product.mk b/core/product.mk
index 1e5a30e..7742cc3 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -131,6 +131,7 @@
VENDOR_EXCEPTION_PATHS \
PRODUCT_ART_USE_READ_BARRIER \
PRODUCT_IOT \
+ PRODUCT_SYSTEM_HEADROOM \
diff --git a/core/product_config.mk b/core/product_config.mk
index 57b7669..e069ff1 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -429,3 +429,7 @@
# Package list to apply enforcing RRO.
PRODUCT_ENFORCE_RRO_TARGETS := \
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ENFORCE_RRO_TARGETS))
+
+# Add reserved headroom to a system image.
+PRODUCT_SYSTEM_HEADROOM := \
+ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM))
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index 8ebf89b..c5f2a96 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -54,6 +54,8 @@
CUSTOM_IMAGE_SELINUX \
CUSTOM_IMAGE_SUPPORT_VERITY \
CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
+ CUSTOM_IMAGE_AVB_ENABLE \
+ CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS \
# We don't expect product makefile to inherit/override PRODUCT_CUSTOM_IMAGE_MAKEFILES,
# so we don't put it in the _product_var_list.
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f0db476..f916e86 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -74,6 +74,11 @@
$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)
$(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
$(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
+$(my_built_custom_image): PRIVATE_AVB_ENABLE := $(CUSTOM_IMAGE_AVB_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS)
+ifeq (true,$(CUSTOM_IMAGE_AVB_ENABLE))
+ $(my_built_custom_image): $(AVBTOOL)
+endif
$(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) \
$(CUSTOM_IMAGE_DICT_FILE)
@echo "Build image $@"
@@ -97,6 +102,11 @@
echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+ $(if $(PRIVATE_AVB_ENABLE),\
+ $(hide) echo "avb_enable=$(PRIVATE_AVB_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+ echo "avb_avbtool=$(AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+ echo "avb_signing_args=$(INTERNAL_AVB_SIGNING_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+ echo "avb_add_hashtree_footer_args=$(PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(if $(PRIVATE_DICT_FILE),\
$(hide) echo "# Properties from $(PRIVATE_DICT_FILE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index ffdd904..a8e0dfa 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -63,6 +63,7 @@
# ART/dex helpers.
PRODUCT_PACKAGES += \
ahat \
+ dexdiag \
dexdump \
dexlist \
hprof-conv \
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 7c3679c..82394ca 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -166,6 +166,13 @@
block_list=block_list)
return img.name
+def FindDtboPrebuilt(prefix="IMAGES/"):
+ """Find the prebuilt image of DTBO partition."""
+
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "dtbo.img")
+ if os.path.exists(prebuilt_path):
+ return prebuilt_path
+ return None
def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
print("creating " + what + ".img...")
@@ -286,7 +293,7 @@
def AddVBMeta(output_zip, boot_img_path, system_img_path, vendor_img_path,
- prefix="IMAGES/"):
+ dtbo_img_path, prefix="IMAGES/"):
"""Create a VBMeta image and store it in output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
avbtool = os.getenv('AVBTOOL') or "avbtool"
@@ -296,6 +303,8 @@
"--include_descriptors_from_image", system_img_path]
if vendor_img_path is not None:
cmd.extend(["--include_descriptors_from_image", vendor_img_path])
+ if dtbo_img_path is not None:
+ cmd.extend(["--include_descriptors_from_image", dtbo_img_path])
if OPTIONS.info_dict.get("system_root_image", None) == "true":
cmd.extend(["--setup_rootfs_from_kernel", system_img_path])
common.AppendAVBSigningArgs(cmd)
@@ -481,7 +490,9 @@
if OPTIONS.info_dict.get("board_avb_enable", None) == "true":
banner("vbmeta")
boot_contents = boot_image.WriteToTemp()
- AddVBMeta(output_zip, boot_contents.name, system_img_path, vendor_img_path)
+ dtbo_img_path = FindDtboPrebuilt()
+ AddVBMeta(output_zip, boot_contents.name, system_img_path,
+ vendor_img_path, dtbo_img_path)
# For devices using A/B update, copy over images from RADIO/ and/or
# VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 16c8018..3094dca 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -493,11 +493,11 @@
shutil.rmtree(staging_system, ignore_errors=True)
shutil.copytree(origin_in, staging_system, symlinks=True)
- reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
+ has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
ext4fs_output = None
try:
- if reserved_blocks and fs_type.startswith("ext4"):
+ if fs_type.startswith("ext4"):
(ext4fs_output, exit_code) = RunCommand(build_command)
else:
(_, exit_code) = RunCommand(build_command)
@@ -518,7 +518,9 @@
# not writable even with root privilege. It only affects devices using
# file-based OTA and a kernel version of 3.10 or greater (currently just
# sprout).
- if reserved_blocks and fs_type.startswith("ext4"):
+ # Separately, check if there's enough headroom space available. This is useful for
+ # devices with low disk space that have system image variation between builds.
+ if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
assert ext4fs_output is not None
ext4fs_stats = re.compile(
r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
@@ -526,14 +528,21 @@
m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
used_blocks = int(m.groupdict().get('used_blocks'))
total_blocks = int(m.groupdict().get('total_blocks'))
- reserved_blocks = min(4096, int(total_blocks * 0.02))
- adjusted_blocks = total_blocks - reserved_blocks
+ reserved_blocks = 0
+ headroom_blocks = 0
+ adjusted_blocks = total_blocks
+ if has_reserved_blocks:
+ reserved_blocks = min(4096, int(total_blocks * 0.02))
+ adjusted_blocks -= reserved_blocks
+ if "partition_headroom" in prop_dict:
+ headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+ adjusted_blocks -= headroom_blocks
if used_blocks > adjusted_blocks:
mount_point = prop_dict.get("mount_point")
print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
- "reserved: %d blocks, available: %d blocks)" % (
+ "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
mount_point, total_blocks, used_blocks, reserved_blocks,
- adjusted_blocks))
+ headroom_blocks, adjusted_blocks))
return False
if not fs_spans_partition:
@@ -614,9 +623,10 @@
d["mount_point"] = mount_point
if mount_point == "system":
copy_prop("fs_type", "fs_type")
- # Copy the generic sysetem fs type first, override with specific one if
+ # Copy the generic system fs type first, override with specific one if
# available.
copy_prop("system_fs_type", "fs_type")
+ copy_prop("system_headroom", "partition_headroom")
copy_prop("system_size", "partition_size")
copy_prop("system_journal_size", "journal_size")
copy_prop("system_verity_block_device", "verity_block_device")
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 1a7e10e..2090400 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -181,14 +181,14 @@
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
OPTIONS.extracted_input = None
+OPTIONS.key_passwords = []
METADATA_NAME = 'META-INF/com/android/metadata'
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
def SignOutput(temp_zip_name, output_zip_name):
- key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
- pw = key_passwords[OPTIONS.package_key]
+ pw = OPTIONS.key_passwords[OPTIONS.package_key]
common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
whole_file=True)
@@ -1021,21 +1021,17 @@
# The place where the output from the subprocess should go.
log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
- # Setup signing keys.
- if OPTIONS.package_key is None:
- OPTIONS.package_key = OPTIONS.info_dict.get(
- "default_system_dev_certificate",
- "build/target/product/security/testkey")
-
# A/B updater expects a signing key in RSA format. Gets the key ready for
# later use in step 3, unless a payload_signer has been specified.
if OPTIONS.payload_signer is None:
cmd = ["openssl", "pkcs8",
"-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
- "-inform", "DER", "-nocrypt"]
+ "-inform", "DER"]
+ pw = OPTIONS.key_passwords[OPTIONS.package_key]
+ cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
cmd.extend(["-out", rsa_key])
- p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
+ p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
p1.communicate()
assert p1.returncode == 0, "openssl pkcs8 failed"
@@ -1383,6 +1379,17 @@
ab_update = OPTIONS.info_dict.get("ab_update") == "true"
+ # Use the default key to sign the package if not specified with package_key.
+ # package_keys are needed on ab_updates, so always define them if an
+ # ab_update is getting created.
+ if not OPTIONS.no_signing or ab_update:
+ if OPTIONS.package_key is None:
+ OPTIONS.package_key = OPTIONS.info_dict.get(
+ "default_system_dev_certificate",
+ "build/target/product/security/testkey")
+ # Get signing keys
+ OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+
if ab_update:
if OPTIONS.incremental_source is not None:
OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1448,13 +1455,6 @@
raise common.ExternalError(
"--- target build has specified no recovery ---")
- # Use the default key to sign the package if not specified with package_key.
- if not OPTIONS.no_signing:
- if OPTIONS.package_key is None:
- OPTIONS.package_key = OPTIONS.info_dict.get(
- "default_system_dev_certificate",
- "build/target/product/security/testkey")
-
# Set up the output zip. Create a temporary zip file if signing is needed.
if OPTIONS.no_signing:
if os.path.exists(args[1]):
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
new file mode 100755
index 0000000..331122b
--- /dev/null
+++ b/tools/releasetools/ota_package_parser.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import sys
+import traceback
+import zipfile
+
+from rangelib import RangeSet
+
+class Stash(object):
+ """Build a map to track stashed blocks during update simulation."""
+
+ def __init__(self):
+ self.blocks_stashed = 0
+ self.overlap_blocks_stashed = 0
+ self.max_stash_needed = 0
+ self.current_stash_size = 0
+ self.stash_map = {}
+
+ def StashBlocks(self, SHA1, blocks):
+ if SHA1 in self.stash_map:
+ logging.info("already stashed {}: {}".format(SHA1, blocks))
+ return
+ self.blocks_stashed += blocks.size()
+ self.current_stash_size += blocks.size()
+ self.max_stash_needed = max(self.current_stash_size, self.max_stash_needed)
+ self.stash_map[SHA1] = blocks
+
+ def FreeBlocks(self, SHA1):
+ assert self.stash_map.has_key(SHA1), "stash {} not found".format(SHA1)
+ self.current_stash_size -= self.stash_map[SHA1].size()
+ del self.stash_map[SHA1]
+
+ def HandleOverlapBlocks(self, SHA1, blocks):
+ self.StashBlocks(SHA1, blocks)
+ self.overlap_blocks_stashed += blocks.size()
+ self.FreeBlocks(SHA1)
+
+
+class OtaPackageParser(object):
+ """Parse a block-based OTA package."""
+
+ def __init__(self, package):
+ self.package = package
+ self.new_data_size = 0
+ self.patch_data_size = 0
+ self.block_written = 0
+ self.block_stashed = 0
+
+ @staticmethod
+ def GetSizeString(size):
+ assert size >= 0
+ base = 1024.0
+ if size <= base:
+ return "{} bytes".format(size)
+ for units in ['K', 'M', 'G']:
+ if size <= base * 1024 or units == 'G':
+ return "{:.1f}{}".format(size / base, units)
+ base *= 1024
+
+ def ParseTransferList(self, name):
+ """Simulate the transfer commands and calculate the amout of I/O."""
+
+ logging.info("\nSimulating commands in '{}':".format(name))
+ lines = self.package.read(name).strip().splitlines()
+ assert len(lines) >= 4, "{} is too short; Transfer list expects at least" \
+ "4 lines, it has {}".format(name, len(lines))
+ assert int(lines[0]) >= 3
+ logging.info("(version: {})".format(lines[0]))
+
+ blocks_written = 0
+ my_stash = Stash()
+ for line in lines[4:]:
+ cmd_list = line.strip().split(" ")
+ cmd_name = cmd_list[0]
+ try:
+ if cmd_name == "new" or cmd_name == "zero":
+ assert len(cmd_list) == 2, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[1])
+ blocks_written += target_range.size()
+ elif cmd_name == "move":
+ # Example: move <onehash> <tgt_range> <src_blk_count> <src_range>
+ # [<loc_range> <stashed_blocks>]
+ assert len(cmd_list) >= 5, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[2])
+ blocks_written += target_range.size()
+ if cmd_list[4] == '-':
+ continue
+ SHA1 = cmd_list[1]
+ source_range = RangeSet.parse_raw(cmd_list[4])
+ if target_range.overlaps(source_range):
+ my_stash.HandleOverlapBlocks(SHA1, source_range)
+ elif cmd_name == "bsdiff" or cmd_name == "imgdiff":
+ # Example: bsdiff <offset> <len> <src_hash> <tgt_hash> <tgt_range>
+ # <src_blk_count> <src_range> [<loc_range> <stashed_blocks>]
+ assert len(cmd_list) >= 8, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[5])
+ blocks_written += target_range.size()
+ if cmd_list[7] == '-':
+ continue
+ source_SHA1 = cmd_list[3]
+ source_range = RangeSet.parse_raw(cmd_list[7])
+ if target_range.overlaps(source_range):
+ my_stash.HandleOverlapBlocks(source_SHA1, source_range)
+ elif cmd_name == "stash":
+ assert len(cmd_list) == 3, "command format error: {}".format(line)
+ SHA1 = cmd_list[1]
+ source_range = RangeSet.parse_raw(cmd_list[2])
+ my_stash.StashBlocks(SHA1, source_range)
+ elif cmd_name == "free":
+ assert len(cmd_list) == 2, "command format error: {}".format(line)
+ SHA1 = cmd_list[1]
+ my_stash.FreeBlocks(SHA1)
+ except:
+ logging.error("failed to parse command in: " + line)
+ raise
+
+ self.block_written += blocks_written
+ self.block_stashed += my_stash.blocks_stashed
+
+ logging.info("blocks written: {} (expected: {})".format(
+ blocks_written, lines[1]))
+ logging.info("max blocks stashed simultaneously: {} (expected: {})".
+ format(my_stash.max_stash_needed, lines[3]))
+ logging.info("total blocks stashed: {}".format(my_stash.blocks_stashed))
+ logging.info("blocks stashed implicitly: {}".format(
+ my_stash.overlap_blocks_stashed))
+
+ def PrintDataInfo(self, partition):
+ logging.info("\nReading data info for {} partition:".format(partition))
+ new_data = self.package.getinfo(partition + ".new.dat")
+ patch_data = self.package.getinfo(partition + ".patch.dat")
+ logging.info("{:<40}{:<40}".format(new_data.filename, patch_data.filename))
+ logging.info("{:<40}{:<40}".format(
+ "compress_type: " + str(new_data.compress_type),
+ "compress_type: " + str(patch_data.compress_type)))
+ logging.info("{:<40}{:<40}".format(
+ "compressed_size: " + OtaPackageParser.GetSizeString(
+ new_data.compress_size),
+ "compressed_size: " + OtaPackageParser.GetSizeString(
+ patch_data.compress_size)))
+ logging.info("{:<40}{:<40}".format(
+ "file_size: " + OtaPackageParser.GetSizeString(new_data.file_size),
+ "file_size: " + OtaPackageParser.GetSizeString(patch_data.file_size)))
+
+ self.new_data_size += new_data.file_size
+ self.patch_data_size += patch_data.file_size
+
+ def AnalyzePartition(self, partition):
+ assert partition in ("system", "vendor")
+ assert partition + ".new.dat" in self.package.namelist()
+ assert partition + ".patch.dat" in self.package.namelist()
+ assert partition + ".transfer.list" in self.package.namelist()
+
+ self.PrintDataInfo(partition)
+ self.ParseTransferList(partition + ".transfer.list")
+
+ def PrintMetadata(self):
+ metadata_path = "META-INF/com/android/metadata"
+ logging.info("\nMetadata info:")
+ metadata_info = {}
+ for line in self.package.read(metadata_path).strip().splitlines():
+ index = line.find("=")
+ metadata_info[line[0 : index].strip()] = line[index + 1:].strip()
+ assert metadata_info.get("ota-type") == "BLOCK"
+ assert "pre-device" in metadata_info
+ logging.info("device: {}".format(metadata_info["pre-device"]))
+ if "pre-build" in metadata_info:
+ logging.info("pre-build: {}".format(metadata_info["pre-build"]))
+ assert "post-build" in metadata_info
+ logging.info("post-build: {}".format(metadata_info["post-build"]))
+
+ def Analyze(self):
+ logging.info("Analyzing ota package: " + self.package.filename)
+ self.PrintMetadata()
+ assert "system.new.dat" in self.package.namelist()
+ self.AnalyzePartition("system")
+ if "vendor.new.dat" in self.package.namelist():
+ self.AnalyzePartition("vendor")
+
+ #TODO Add analysis of other partitions(e.g. bootloader, boot, radio)
+
+ BLOCK_SIZE = 4096
+ logging.info("\nOTA package analyzed:")
+ logging.info("new data size (uncompressed): " +
+ OtaPackageParser.GetSizeString(self.new_data_size))
+ logging.info("patch data size (uncompressed): " +
+ OtaPackageParser.GetSizeString(self.patch_data_size))
+ logging.info("total data written: " +
+ OtaPackageParser.GetSizeString(self.block_written * BLOCK_SIZE))
+ logging.info("total data stashed: " +
+ OtaPackageParser.GetSizeString(self.block_stashed * BLOCK_SIZE))
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description='Analyze an OTA package.')
+ parser.add_argument("ota_package", help='Path of the OTA package.')
+ args = parser.parse_args(argv)
+
+ logging_format = '%(message)s'
+ logging.basicConfig(level=logging.INFO, format=logging_format)
+
+ try:
+ with zipfile.ZipFile(args.ota_package, 'r') as package:
+ package_parser = OtaPackageParser(package)
+ package_parser.Analyze()
+ except:
+ logging.error("Failed to read " + args.ota_package)
+ traceback.print_exc()
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])