Merge "Normalize the 2nd CPU architect of 64 bits arm GSI"
diff --git a/Changes.md b/Changes.md
index 3e48bad..37bbad0 100644
--- a/Changes.md
+++ b/Changes.md
@@ -92,6 +92,11 @@
 attribute to the root element `<manifest>`. If `PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE`
 is 26 or 27, you can add `"target-level"="1"` to your device manifest instead.
 
+### Stop using USE_CLANG_PLATFORM_BUILD {#USE_CLANG_PLATFORM_BUILD}
+
+Clang is the default and only supported Android compiler, so there is no reason
+for this option to exist.
+
 ### Other envsetup.sh variables  {#other_envsetup_variables}
 
 * ANDROID_TOOLCHAIN
diff --git a/core/Makefile b/core/Makefile
index 85ae96e..a3fbe33 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2312,9 +2312,7 @@
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
 ifeq ($(AB_OTA_UPDATER),true)
-# Build zlib fingerprint if using the AB Updater.
-updater_dep := $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint
-updater_dep += system/update_engine/update_engine.conf
+updater_dep := system/update_engine/update_engine.conf
 else
 # Build OTA tools if not using the AB Updater.
 updater_dep := $(built_ota_tools)
@@ -2547,7 +2545,6 @@
 ifeq ($(AB_OTA_UPDATER),true)
 	@# When using the A/B updater, include the updater config files in the zip.
 	$(hide) cp $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
-	$(hide) cp $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint $(zip_root)/META/zlib_fingerprint.txt
 	$(hide) for part in $(AB_OTA_PARTITIONS); do \
 	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
 	done
diff --git a/core/allowed_ndk_types.mk b/core/allowed_ndk_types.mk
new file mode 100644
index 0000000..b88b9e8
--- /dev/null
+++ b/core/allowed_ndk_types.mk
@@ -0,0 +1,84 @@
+# Determines the types of NDK modules the current module is allowed to link to.
+# Input variables:
+#   LOCAL_MODULE
+#   LOCAL_MODULE_CLASS
+#   LOCAL_NDK_STL_VARIANT
+#   LOCAL_SDK_VERSION
+# Output variables:
+#   my_ndk_stl_family: Family of the NDK STL.
+#   my_ndk_stl_link_type: STL link type, static or shared.
+#   my_allowed_ndk_types: Types of NDK modules that may be linked.
+#   my_warn_ndk_types: Types of NDK modules that shouldn't be linked, but are.
+
+my_allowed_ndk_types :=
+my_warn_ndk_types :=
+my_ndk_stl_family :=
+my_ndk_stl_link_type :=
+
+ifdef LOCAL_SDK_VERSION
+    ifeq ($(LOCAL_NDK_STL_VARIANT),)
+        my_ndk_stl_family := system
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),system)
+        my_ndk_stl_family := system
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
+        my_ndk_stl_family := libc++
+        my_ndk_stl_link_type := shared
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_static)
+        my_ndk_stl_family := libc++
+        my_ndk_stl_link_type := static
+    else ifeq ($(LOCAL_NDK_STL_VARIANT),none)
+        my_ndk_stl_family := none
+        my_ndk_stl_link_type := none
+    else
+        $(call pretty-error,invalid LOCAL_NDK_STL_VARIANT: $(LOCAL_NDK_STL_VARIANT))
+    endif
+
+    ifeq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
+        # The "none" link type indicates that nothing is actually linked. Since
+        # this is a static library, it's still up to the final use of the
+        # library whether a static or shared STL should be used.
+        my_ndk_stl_link_type := none
+    endif
+
+    # The system STL is only the C++ ABI layer, so it's compatible with any STL.
+    my_allowed_ndk_types += native:ndk:system:shared
+    my_allowed_ndk_types += native:ndk:system:none
+
+    # Libaries that don't use the STL can be linked to anything.
+    my_allowed_ndk_types += native:ndk:none:none
+
+    # And it's always okay to link a static library that uses your own STL type.
+    # Since nothing was actually linked for the static library, it is up to the
+    # first linked library in the dependency chain which gets used.
+    my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):none
+
+    ifeq ($(LOCAL_MODULE_CLASS),APPS)
+        # For an app package, it's actually okay to depend on any set of STLs.
+        # If any of the individual libraries depend on each other they've
+        # already been checked for consistency, and if they don't they'll be
+        # kept isolated by RTLD_LOCAL anyway.
+        my_allowed_ndk_types += \
+            native:ndk:libc++:shared native:ndk:libc++:static
+
+        # The "none" link type that used by static libraries is intentionally
+        # omitted here. We should only be dealing with shared libraries in
+        # LOCAL_JNI_SHARED_LIBRARIES.
+    else ifeq ($(my_ndk_stl_link_type),shared)
+        # Modules linked to a shared STL can only use another shared STL.
+        my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):shared
+    endif
+    # Else we are a non-static library that uses a static STL, and are
+    # incompatible with all other shared libraries that use an STL.
+else
+    my_allowed_ndk_types := \
+        native:ndk:none:none \
+        native:ndk:system:none \
+        native:ndk:system:shared \
+
+    ifeq ($(LOCAL_MODULE_CLASS),APPS)
+        # CTS is bad and it should feel bad: http://b/13249737
+        my_warn_ndk_types += native:ndk:libc++:static
+    endif
+endif
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index 9f3a2a6..c359bac 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -40,10 +40,14 @@
   my_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
 endif
 ifneq (,$(wildcard $(my_android_manifest)))
-$(autogen_test_config_file) : $(my_android_manifest) $(EMPTY_TEST_CONFIG) $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE)
+$(autogen_test_config_file): PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT := $(AUTOGEN_TEST_CONFIG_SCRIPT)
+$(autogen_test_config_file): PRIVATE_TEST_CONFIG_ANDROID_MANIFEST := $(my_android_manifest)
+$(autogen_test_config_file): PRIVATE_EMPTY_TEST_CONFIG := $(EMPTY_TEST_CONFIG)
+$(autogen_test_config_file): PRIVATE_TEMPLATE := $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE)
+$(autogen_test_config_file) : $(my_android_manifest) $(EMPTY_TEST_CONFIG) $(INSTRUMENTATION_TEST_CONFIG_TEMPLATE) $(AUTOGEN_TEST_CONFIG_SCRIPT)
 	@echo "Auto generating test config $(notdir $@)"
 	@rm -f $@
-	$(hide) $(AUTOGEN_TEST_CONFIG_SCRIPT) $@ $^
+	$(hide) $(PRIVATE_AUTOGEN_TEST_CONFIG_SCRIPT) $@ $(PRIVATE_TEST_CONFIG_ANDROID_MANIFEST) $(PRIVATE_EMPTY_TEST_CONFIG) $(PRIVATE_TEMPLATE)
 my_auto_generate_config := true
 endif # ifeq (,$(wildcard $(my_android_manifest)))
 endif # ifneq (true,$(is_native))
diff --git a/core/aux_config.mk b/core/aux_config.mk
index c40b8cc..41c14ae 100644
--- a/core/aux_config.mk
+++ b/core/aux_config.mk
@@ -32,7 +32,7 @@
 
 # setup AUX globals
 AUX_SHLIB_SUFFIX := .so
-AUX_GLOBAL_ARFLAGS := crsPD
+AUX_GLOBAL_ARFLAGS := cqsD
 AUX_STATIC_LIB_SUFFIX := .a
 
 # Load ever-lasting "indexed" version of AUX variant environment; it is treated as READ-ONLY from this
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 9234abe..313c302 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -499,18 +499,22 @@
 # separate the multiple architectures into subdirectories of the testcase folder.
 arch_dir :=
 is_native :=
+multi_arch :=
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
   is_native := true
+  multi_arch := true
 endif
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
   is_native := true
+  multi_arch := true
 endif
 ifdef LOCAL_MULTILIB
-  is_native := true
+  multi_arch := true
 endif
-ifdef is_native
+ifdef multi_arch
   arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
 endif
+multi_arch :=
 
 # The module itself.
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
@@ -537,14 +541,23 @@
     ifeq (true, $(LOCAL_IS_HOST_MODULE))
       is_instrumentation_test := false
     endif
+    # If LOCAL_MODULE_CLASS is not APPS, it's certainly not an instrumentation
+    # test. However, some packages for test data also have LOCAL_MODULE_CLASS
+    # set to APPS. These will require flag LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG
+    # to disable auto-generating test config file.
+    ifneq (APPS, $(LOCAL_MODULE_CLASS))
+      is_instrumentation_test := false
+    endif
   endif
   # CTS modules can be used for test data, so test config files must be
   # explicitly created using AndroidTest.xml
   ifeq (,$(filter cts, $(LOCAL_COMPATIBILITY_SUITE)))
-    ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
-      include $(BUILD_SYSTEM)/autogen_test_config.mk
-      test_config := $(autogen_test_config_file)
-      autogen_test_config_file :=
+    ifneq (true, $(LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG))
+      ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
+        include $(BUILD_SYSTEM)/autogen_test_config.mk
+        test_config := $(autogen_test_config_file)
+        autogen_test_config_file :=
+      endif
     endif
   endif
 endif
diff --git a/core/binary.mk b/core/binary.mk
index 6920373..e3da7d2 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -189,24 +189,14 @@
   ifeq (,$(LOCAL_NDK_STL_VARIANT))
     LOCAL_NDK_STL_VARIANT := system
   endif
-  ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
+  ifneq (1,$(words $(filter none system c++_static c++_shared, $(LOCAL_NDK_STL_VARIANT))))
     $(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
   endif
+
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
     my_system_shared_libraries += libstdc++
-  else # LOCAL_NDK_STL_VARIANT is not system
-  ifneq (,$(filter stlport_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/stlport/stlport
-    my_system_shared_libraries += libstdc++
-    ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
-      my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
-      my_ldlibs += -ldl
-    else
-      my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_shared.so
-    endif
-  else # LOCAL_NDK_STL_VARIANT is not stlport_* either
-  ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
+  else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
     my_ndk_stl_include_path := \
       $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
     my_ndk_stl_include_path += \
@@ -232,17 +222,9 @@
     my_ldlibs += -ldl
 
     my_ndk_cpp_std_version := c++11
-  else # LOCAL_NDK_STL_VARIANT is not c++_* either
-  ifneq (,$(filter gnustl_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/include \
-                               $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/include
-    my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/libgnustl_static.a
   else # LOCAL_NDK_STL_VARIANT must be none
     # Do nothing.
   endif
-  endif
-  endif
-  endif
 endif
 
 ifneq ($(LOCAL_USE_VNDK),)
@@ -372,11 +354,6 @@
             my_clang := true
         endif
     endif
-# Add option to make gcc the default for device build
-else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
-    ifeq ($(my_clang),)
-        my_clang := false
-    endif
 else ifeq ($(my_clang),)
     my_clang := true
 endif
@@ -1404,10 +1381,12 @@
 ## other NDK-built libraries
 ####################################################
 
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
 ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
-my_warn_types :=
-my_allowed_types := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
 else ifdef LOCAL_USE_VNDK
     _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
     ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -1427,8 +1406,8 @@
     endif
 else
 my_link_type := native:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform
 endif
 
 my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
@@ -1721,13 +1700,13 @@
     endif
     # If clang-tidy is not enabled globally, add the -quiet flag.
     ifeq (,$(filter 1 true,$(WITH_TIDY)))
-      my_tidy_flags += -quiet
+      my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
     endif
 
     # We might be using the static analyzer through clang-tidy.
     # https://bugs.llvm.org/show_bug.cgi?id=32914
     ifneq ($(my_tidy_checks),)
-      my_tidy_flags += "-extra-arg-before=-D__clang_analyzer__"
+      my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
     endif
   endif
 endif
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 09f9be5..b2522ee 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -55,6 +55,7 @@
 LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
 LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING:=
 LOCAL_DEX_PREOPT:= # '',true,false,nostripping
+LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG:=
 LOCAL_DONT_CHECK_MODULE:=
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
@@ -78,6 +79,7 @@
 LOCAL_EXPORT_C_INCLUDE_DIRS:=
 LOCAL_EXPORT_HEADER_LIBRARY_HEADERS:=
 LOCAL_EXPORT_PACKAGE_RESOURCES:=
+LOCAL_EXPORT_PROGUARD_FLAG_FILES:=
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
 LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
 LOCAL_EXTRACT_APK:=
@@ -193,7 +195,7 @@
 LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
 LOCAL_PREBUILT_STRIP_COMMENTS:=
 LOCAL_PRIVILEGED_MODULE:=
-# '',full,custom,nosystem,disabled,obfuscation,optimization
+# '',full,custom,disabled,obfuscation,optimization
 LOCAL_PROGUARD_ENABLED:=
 LOCAL_PROGUARD_FLAG_FILES:=
 LOCAL_PROGUARD_FLAGS:=
@@ -231,6 +233,7 @@
 LOCAL_SOONG_DEX_JAR :=
 LOCAL_SOONG_HEADER_JAR :=
 LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
+LOCAL_SOONG_PROGUARD_DICT :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
 LOCAL_SOONG_RRO_DIRS :=
 # '',true
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 73b1c04..01cf3f5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -50,7 +50,7 @@
 endif
 
 ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv5te
+$(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
 endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
diff --git a/core/combo/arch/arm/armv5te-vfp.mk b/core/combo/arch/arm/armv5te-vfp.mk
deleted file mode 100644
index 75299ac..0000000
--- a/core/combo/arch/arm/armv5te-vfp.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-# At the moment, use the same settings than the one
-# for armv5te, since TARGET_ARCH_VARIANT := armv5te-vfp
-# will only be used to select an optimized VFP-capable assembly
-# interpreter loop for Dalvik.
-#
-include $(BUILD_COMBOS)/arch/arm/armv5te.mk
-
diff --git a/core/combo/arch/arm/armv5te.mk b/core/combo/arch/arm/armv5te.mk
deleted file mode 100644
index bd75695..0000000
--- a/core/combo/arch/arm/armv5te.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-# Configuration for Linux on ARM.
-# Generating binaries for the ARMv5TE architecture and higher
-#
-
diff --git a/core/combo/select.mk b/core/combo/select.mk
index 5e181b9..eab4c72 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -28,7 +28,7 @@
 
 # Set reasonable defaults for the various variables
 
-$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD
+$(combo_var_prefix)GLOBAL_ARFLAGS := cqsD -format=gnu
 
 $(combo_var_prefix)STATIC_LIB_SUFFIX := .a
 
diff --git a/core/config.mk b/core/config.mk
index 43eaf01..255c848 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -60,14 +60,14 @@
 
 # Mark variables deprecated/obsolete
 CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
-$(KATI_deprecated_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
+$(KATI_obsolete_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
 $(KATI_obsolete_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
 $(KATI_obsolete_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
 $(KATI_obsolete_var ANDROID_HOST_OUT,Use HOST_OUT instead. See $(CHANGES_URL)#ANDROID_HOST_OUT)
-$(KATI_deprecated_var ANDROID_PRODUCT_OUT,Use PRODUCT_OUT instead. See $(CHANGES_URL)#ANDROID_PRODUCT_OUT)
+$(KATI_obsolete_var ANDROID_PRODUCT_OUT,Use PRODUCT_OUT instead. See $(CHANGES_URL)#ANDROID_PRODUCT_OUT)
 $(KATI_obsolete_var ANDROID_HOST_OUT_TESTCASES,Use HOST_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_HOST_OUT_TESTCASES)
 $(KATI_obsolete_var ANDROID_TARGET_OUT_TESTCASES,Use TARGET_OUT_TESTCASES instead. See $(CHANGES_URL)#ANDROID_TARGET_OUT_TESTCASES)
-$(KATI_deprecated_var ANDROID_BUILD_TOP,Use '.' instead. See $(CHANGES_URL)#ANDROID_BUILD_TOP)
+$(KATI_obsolete_var ANDROID_BUILD_TOP,Use '.' instead. See $(CHANGES_URL)#ANDROID_BUILD_TOP)
 $(KATI_obsolete_var \
   ANDROID_TOOLCHAIN \
   ANDROID_TOOLCHAIN_2ND_ARCH \
@@ -76,6 +76,7 @@
   ANDROID_PRE_BUILD_PATHS \
   ,See $(CHANGES_URL)#other_envsetup_variables)
 $(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
+$(KATI_obsolete_var USE_CLANG_PLATFORM_BUILD,Clang is the only supported Android compiler. See $(CHANGES_URL)#USE_CLANG_PLATFORM_BUILD)
 
 CHANGES_URL :=
 
@@ -581,8 +582,6 @@
   ZIPALIGN := $(prebuilt_build_tools_bin)/zipalign
 endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK
 
-R8_COMPAT_PROGUARD := $(HOST_OUT_EXECUTABLES)/r8-compat-proguard
-
 ifeq (,$(TARGET_BUILD_APPS))
   # Use RenderScript prebuilts for unbundled builds but not PDK builds
   LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc
@@ -611,13 +610,13 @@
 
 LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
 # The default PKGDATADIR built in the prebuilt bison is a relative path
-# external/bison/data.
+# prebuilts/build-tools/common/bison.
 # To run bison from elsewhere you need to set up enviromental variable
 # BISON_PKGDATADIR.
-BISON_PKGDATADIR := $(PWD)/external/bison/data
-BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison
+BISON_PKGDATADIR := $(PWD)/prebuilts/build-tools/common/bison
+BISON := prebuilts/build-tools/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bin/bison
 YACC := $(BISON) -d
-BISON_DATA := $(wildcard external/bison/data/* external/bison/data/*/*)
+BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
 
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
@@ -707,7 +706,11 @@
 COLUMN:= column
 
 ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
+ifeq ($(RUN_ERROR_PRONE),true)
 USE_OPENJDK9 :=
+else
+USE_OPENJDK9 := true
+endif
 TARGET_OPENJDK9 :=
 else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),false)
 USE_OPENJDK9 :=
@@ -823,8 +826,6 @@
 endif
 $(.KATI_obsolete_var DEVICE_FRAMEWORK_MANIFEST_FILE,No one should ever need to use this.)
 
-FRAMEWORK_COMPATIBILITY_MATRIX_FILES := $(wildcard hardware/interfaces/compatibility_matrix.*.xml)
-
 BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
 BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
 
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index f07659d..5171b8a 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -74,6 +74,16 @@
 ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
     my_cflags += -D_USING_LIBCXX
 
+    ifeq ($($(my_prefix)OS),darwin)
+        # libc++'s headers are annotated with availability macros that indicate
+        # which version of Mac OS was the first to ship with a libc++ feature
+        # available in its *system's* libc++.dylib. We do not use the system's
+        # library, but rather ship our own. As such, these availability
+        # attributes are meaningless for us but cause build breaks when we try
+        # to use code that would not be available in the system's dylib.
+        my_cppflags += -D_LIBCPP_DISABLE_AVAILABILITY
+    endif
+
     # Note that the structure of this means that LOCAL_CXX_STL := libc++ will
     # use the static libc++ for static executables.
     ifeq ($(my_link_type),dynamic)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79e72c1..20d43dc 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -93,8 +93,11 @@
 # If we use a boot image profile.
 my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
 ifeq (,$(my_use_profile_for_boot_image))
-# If not set, use the default.
-my_use_profile_for_boot_image := false
+# If not set, set the default to true if we are not a PDK build. PDK builds
+# can't build the profile since they don't have frameworks/base.
+ifneq (true,$(TARGET_BUILD_PDK))
+my_use_profile_for_boot_image := true
+endif
 endif
 
 ifeq (true,$(my_use_profile_for_boot_image))
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 89a39a8..05add60 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -287,7 +287,7 @@
 # Check BOARD_VNDK_VERSION
 define check_vndk_version
   $(eval vndk_path := prebuilts/vndk/v$(1)) \
-  $(if $(wildcard $(vndk_path)/Android.bp),,$(error VNDK version $(1) not found))
+  $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
 endef
 
 ifdef BOARD_VNDK_VERSION
@@ -657,13 +657,3 @@
 ifeq ($(CALLED_FROM_SETUP),true)
 PRINT_BUILD_CONFIG ?= true
 endif
-
-ifeq ($(USE_CLANG_PLATFORM_BUILD),)
-USE_CLANG_PLATFORM_BUILD := true
-endif
-
-ifneq ($(USE_CLANG_PLATFORM_BUILD),true)
-ifneq ($(USE_CLANG_PLATFORM_BUILD),false)
-$(error USE_CLANG_PLATFORM_BUILD must be true or false)
-endif
-endif
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 265d482..ab5fd2c 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -24,16 +24,11 @@
 ifdef my_embed_jni
 # App explicitly requires the prebuilt NDK stl shared libraies.
 # The NDK stl shared libraries should never go to the system image.
-ifneq ($(filter $(LOCAL_NDK_STL_VARIANT), stlport_shared c++_shared),)
+ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
 ifndef LOCAL_SDK_VERSION
 $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
     LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
 endif
-endif
-ifeq (stlport_shared,$(LOCAL_NDK_STL_VARIANT))
-my_jni_shared_libraries += \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/stlport/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libstlport_shared.so
-else ifeq (c++_shared,$(LOCAL_NDK_STL_VARIANT))
 my_jni_shared_libraries += \
     $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
 endif
@@ -108,15 +103,16 @@
 endif  # outer my_prebuilt_jni_libs
 
 # Verify that all included libraries are built against the NDK
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
 ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
 ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := app:sdk
-my_warn_types := native:platform
-my_allowed_types := native:ndk
+my_warn_types := native:platform $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
 else
 my_link_type := app:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform native:vendor native:vndk native:vndk_private
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private
 endif
 
 my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
diff --git a/core/jacoco.mk b/core/jacoco.mk
index f51790d..6406df4 100644
--- a/core/jacoco.mk
+++ b/core/jacoco.mk
@@ -84,8 +84,8 @@
 	mkdir -p $(PRIVATE_INSTRUMENTED_PATH)
 	java -jar $(JACOCO_CLI_JAR) \
 	  instrument \
-	  -quiet \
-	  -dest '$(PRIVATE_INSTRUMENTED_PATH)' \
+	  --quiet \
+	  --dest '$(PRIVATE_INSTRUMENTED_PATH)' \
 	  $(PRIVATE_UNZIPPED_PATH)
 	touch $(PRIVATE_INSTRUMENTED_TIMESTAMP_PATH)
 
diff --git a/core/java.mk b/core/java.mk
index 3e0123b..ee071c9 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -558,6 +558,12 @@
 
 $(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
 
+LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_jar)
+
+#######################################
+include $(BUILD_SYSTEM)/jacoco.mk
+#######################################
+
 # Temporarily enable --multi-dex until proguard supports v53 class files
 # ( http://b/67673860 ) or we move away from proguard altogether.
 ifdef TARGET_OPENJDK9
@@ -569,7 +575,7 @@
 ifndef LOCAL_IS_STATIC_JAVA_LIBRARY
 my_desugaring := true
 $(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(full_classes_jar) $(full_java_header_libs) $(DESUGAR)
+$(full_classes_desugar_jar): $(LOCAL_FULL_CLASSES_JACOCO_JAR) $(full_java_header_libs) $(DESUGAR)
 	$(desugar-classes-jar)
 endif
 else
@@ -577,23 +583,17 @@
 endif
 
 ifndef my_desugaring
-full_classes_desugar_jar := $(full_classes_jar)
+full_classes_desugar_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
 endif
 
-LOCAL_FULL_CLASSES_PRE_JACOCO_JAR := $(full_classes_desugar_jar)
-
-#######################################
-include $(BUILD_SYSTEM)/jacoco.mk
-#######################################
-
-full_classes_pre_proguard_jar := $(LOCAL_FULL_CLASSES_JACOCO_JAR)
+full_classes_pre_proguard_jar := $(full_classes_desugar_jar)
 
 # Keep a copy of the jar just before proguard processing.
 $(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),$(intermediates.COMMON)/classes-pre-proguard.jar))
 
 # Run proguard if necessary
 ifdef LOCAL_PROGUARD_ENABLED
-ifneq ($(filter-out full custom nosystem obfuscation optimization shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
+ifneq ($(filter-out full custom obfuscation optimization,$(LOCAL_PROGUARD_ENABLED)),)
     $(warning while processing: $(LOCAL_MODULE))
     $(error invalid value for LOCAL_PROGUARD_ENABLED: $(LOCAL_PROGUARD_ENABLED))
 endif
@@ -631,19 +631,16 @@
 
 common_proguard_flags := -forceprocessing
 
-common_proguard_flag_files :=
-ifeq ($(filter nosystem,$(LOCAL_PROGUARD_ENABLED)),)
-common_proguard_flag_files += $(BUILD_SYSTEM)/proguard.flags
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.emma.flags
-endif
-# If this is a test package, add proguard keep flags for tests.
+common_proguard_flag_files := $(BUILD_SYSTEM)/proguard.flags
 ifneq ($(LOCAL_INSTRUMENTATION_FOR)$(filter tests,$(LOCAL_MODULE_TAGS)),)
-common_proguard_flag_files += $(BUILD_SYSTEM)/proguard_tests.flags
-ifeq ($(filter shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
 common_proguard_flags += -dontshrink # don't shrink tests by default
-endif # shrinktests
 endif # test package
+ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
+  ifdef LOCAL_USE_AAPT2
+    common_proguard_flag_files += $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+        $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags)
+  endif
+endif
 ifneq ($(common_proguard_flag_files),)
 common_proguard_flags += $(addprefix -include , $(common_proguard_flag_files))
 # This is included from $(BUILD_SYSTEM)/proguard.flags
@@ -685,7 +682,6 @@
 
 endif # no obfuscation
 endif # LOCAL_INSTRUMENTATION_FOR
-endif  # LOCAL_PROGUARD_ENABLED is not nosystem
 
 proguard_flag_files := $(addprefix $(LOCAL_PATH)/, $(LOCAL_PROGUARD_FLAG_FILES))
 ifeq ($(USE_R8),true)
diff --git a/core/java_library.mk b/core/java_library.mk
index e4916b8..8cf0074 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -42,6 +42,8 @@
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
 endif # LOCAL_EMMA_INSTRUMENT
 endif # EMMA_INSTRUMENT_STATIC
 else
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 2a63817..e153a8a 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -275,6 +275,8 @@
 ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
 # Only add jacocoagent if the package contains some java code
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
 endif # Contains java code
 else
 ifdef LOCAL_SDK_VERSION
@@ -361,6 +363,8 @@
 $(full_classes_compiled_jar): $(data_binding_stamp)
 endif  # LOCAL_DATA_BINDING
 
+resource_export_package :=
+
 ifeq ($(need_compile_res),true)
 
 ###############################
@@ -427,7 +431,6 @@
 
 $(proguard_options_file): $(R_file_stamp)
 
-resource_export_package :=
 ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
 # Put this module's resources into a PRODUCT-agnositc package that
 # other packages can use to build their own PRODUCT-agnostic R.java (etc.)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 69df2d1..d934338 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -173,8 +173,10 @@
 endif
 export_cflags :=
 
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
 ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
 else ifdef LOCAL_USE_VNDK
     _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
     ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -566,12 +568,16 @@
 ifneq ($(my_src_aar),)
 # This is .aar file, archive of classes.jar and Android resources.
 my_src_jar := $(intermediates.COMMON)/aar/classes.jar
+my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
 
+$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS := $(my_src_proguard_options)
 $(my_src_jar) : $(my_src_aar)
 	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
 	$(hide) unzip -qo -d $(dir $@) $<
 	# Make sure the extracted classes.jar has a new timestamp.
 	$(hide) touch $@
+	# Make sure the proguard file exists and has a new timestamp.
+	$(hide) touch $(dir $@)/proguard.txt
 
 endif
 
@@ -595,6 +601,10 @@
 
 ifdef LOCAL_USE_AAPT2
 ifneq ($(my_src_aar),)
+
+$(intermediates.COMMON)/export_proguard_flags : $(my_src_proguard_options)
+	$(transform-prebuilt-to-target)
+
 LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
 ifeq ($(LOCAL_SDK_RES_VERSION),)
   LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
diff --git a/core/proguard.emma.flags b/core/proguard.emma.flags
deleted file mode 100644
index bf94086..0000000
--- a/core/proguard.emma.flags
+++ /dev/null
@@ -1,4 +0,0 @@
-# Keep everything for the emma classes
--keep class com.vladium.** {
-  *;
-}
diff --git a/core/proguard_tests.flags b/core/proguard_tests.flags
deleted file mode 100644
index 1f840bc..0000000
--- a/core/proguard_tests.flags
+++ /dev/null
@@ -1,26 +0,0 @@
-# Keep everything for tests
-# This flag has been moved to the makefiles and is set for tests by default.
-#-dontshrink
-
-# But we may want to obfuscate if the main app gets obfuscated.
-# This flag has been moved to the makefiles.
-#-dontobfuscate
-
-#-keep class * extends junit.framework.TestCase {
-#  public void test*();
-#}
-
-#-keepclasseswithmembers class * {
-#  public static void run();
-#  public static junit.framework.Test suite();
-#}
-
-# some AllTests don't include run().
-#-keepclasseswithmembers class * {
-#  public static junit.framework.Test suite();
-#}
-
-#-keep class * extends junit.framework.TestSuite
-#-keep class * extends android.app.Instrumentation
-#-keep class * extends android.test.TestSuiteProvider
-
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 633ef0c..65aabff 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -13,13 +13,6 @@
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
 
-ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
-    $(intermediates.COMMON)/jacoco-report-classes.jar))
-  $(call add-dependency,$(common_javalib.jar),\
-    $(intermediates.COMMON)/jacoco-report-classes.jar)
-endif
-
 full_classes_jar := $(intermediates.COMMON)/classes.jar
 full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
@@ -27,6 +20,20 @@
 $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_jar)))
 $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),$(full_classes_pre_proguard_jar)))
 
+ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
+    $(intermediates.COMMON)/jacoco-report-classes.jar))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/jacoco-report-classes.jar)
+endif
+
+ifdef LOCAL_SOONG_PROGUARD_DICT
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
+    $(intermediates.COMMON)/proguard_dictionary))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/proguard_dictionary)
+endif
+
 ifneq ($(TURBINE_DISABLED),false)
 ifdef LOCAL_SOONG_HEADER_JAR
 $(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index bbad4c8..c7eefc9 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -110,7 +110,6 @@
 $(call add_json_bool, Device_uses_hwc2,                  $(filter true,$(TARGET_USES_HWC2)))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_PROJECT_SYSTEM_INCLUDES))
 $(call add_json_bool, DevicePrefer32BitExecutables,      $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)))
-$(call add_json_val,  DeviceUsesClang,                   $(if $(USE_CLANG_PLATFORM_BUILD),$(USE_CLANG_PLATFORM_BUILD),false))
 $(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
 $(call add_json_str,  Platform_vndk_version,             $(PLATFORM_VNDK_VERSION))
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 6645af5..8348349 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -118,6 +118,13 @@
 endif
 endif
 
+ifdef LOCAL_USE_AAPT2
+$(intermediates.COMMON)/export_proguard_flags: $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
+	@echo "Export proguard flags: $@"
+	rm -f $@
+	cat $+ >$@
+endif
+
 # add --non-constant-id to prevent inlining constants.
 # AAR needs text symbol file R.txt.
 ifdef LOCAL_USE_AAPT2
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 59a3a9e..b5c3a7c 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -8,15 +8,9 @@
   ifndef LOCAL_SDK_VERSION
     LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
   else
-    ifneq (,$(filter c++_%,$(LOCAL_NDK_STL_VARIANT)))
-        my_ndk_gtest_suffix := _c++
-    else ifneq ($(filter stlport_,$(LOCAL_NDK_STL_VARIANT)),)
-        my_ndk_gtest_suffix := _stlport
-    else ifneq ($(filter gnustl_,$(LOCAL_NDK_STL_VARIANT)),)
-        my_ndk_gtest_suffix := _gnustl
-    else # system STL, use stlport
-        my_ndk_gtest_suffix := _stlport
-    endif
+    # TODO(danalbert): Remove the suffix from the module since we only need the
+    # one variant now.
+    my_ndk_gtest_suffix := _c++
     LOCAL_STATIC_LIBRARIES += \
         libgtest_main_ndk$(my_ndk_gtest_suffix) \
         libgtest_ndk$(my_ndk_gtest_suffix)
diff --git a/core/tasks/check_emu_boot.mk b/core/tasks/check_emu_boot.mk
new file mode 100644
index 0000000..4870677
--- /dev/null
+++ b/core/tasks/check_emu_boot.mk
@@ -0,0 +1,23 @@
+check_emu_boot0 := $(DIST_DIR)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-emulator-boot-test-result.txt
+$(check_emu_boot0) : PRIVATE_PREFIX := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
+$(check_emu_boot0) : PRIVATE_EMULATOR_BOOT_TEST_SH := device/generic/goldfish/tools/emulator_boot_test.sh
+$(check_emu_boot0) : PRIVATE_BOOT_COMPLETE_STRING := "emulator: INFO: boot completed"
+$(check_emu_boot0) : PRIVATE_BOOT_FAIL_STRING := "emulator: ERROR: fail to boot after"
+$(check_emu_boot0) : PRIVATE_SUCCESS_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-SUCCESS.txt
+$(check_emu_boot0) : PRIVATE_FAIL_FILE := $(DIST_DIR)/$(PRIVATE_PREFIX)-BOOT-FAIL.txt
+$(check_emu_boot0) : $(INSTALLED_QEMU_SYSTEMIMAGE)  $(INSTALLED_QEMU_VENDORIMAGE) \
+                 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(PRODUCT_OUT)/userdata.img) \
+                 $(PRODUCT_OUT)/ramdisk.img device/generic/goldfish/tools/emulator_boot_test.sh
+	@mkdir -p $(dir $(check_emu_boot0))
+	$(hide) rm -f $(check_emu_boot0)
+	$(hide) rm -f $(PRIVATE_SUCCESS_FILE)
+	$(hide) rm -f $(PRIVATE_FAIL_FILE)
+	(export ANDROID_PRODUCT_OUT=$$(cd $(PRODUCT_OUT);pwd);\
+		export ANDROID_BUILD_TOP=$$(pwd);\
+		$(PRIVATE_EMULATOR_BOOT_TEST_SH) > $(check_emu_boot0))
+	(if grep -q $(PRIVATE_BOOT_COMPLETE_STRING) $(check_emu_boot0);\
+	then echo boot_succeeded > $(PRIVATE_SUCCESS_FILE); fi)
+	(if grep -q $(PRIVATE_BOOT_FAIL_STRING) $(check_emu_boot0);\
+	then echo boot_failed > $(PRIVATE_FAIL_FILE); fi)
+.PHONY: check_emu_boot
+check_emu_boot: $(check_emu_boot0)
diff --git a/core/tasks/vndk.mk b/core/tasks/vndk.mk
index 1bbd3b0..3604aed 100644
--- a/core/tasks/vndk.mk
+++ b/core/tasks/vndk.mk
@@ -17,6 +17,12 @@
 # BOARD_VNDK_VERSION must be set to 'current' in order to generate a VNDK snapshot.
 ifeq ($(BOARD_VNDK_VERSION),current)
 
+# PLATFORM_VNDK_VERSION must be set.
+ifneq (,$(PLATFORM_VNDK_VERSION))
+
+# BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'.
+ifneq ($(BOARD_VNDK_RUNTIME_DISABLE),true)
+
 # Returns arch-specific libclang_rt.ubsan* library name.
 # Because VNDK_CORE_LIBRARIES includes all arch variants for libclang_rt.ubsan*
 # libs, the arch-specific libs are selected separately.
@@ -33,15 +39,17 @@
 # Returns list of file paths of the intermediate objs
 #
 # Args:
-#   $(1): list of obj names (e.g., libfoo.vendor, ld.config.txt, ...)
+#   $(1): list of module and filename pairs (e.g., ld.config.txt:ld.config.27.txt ...)
 #   $(2): target class (e.g., SHARED_LIBRARIES, STATIC_LIBRARIES, ETC)
 #   $(3): if not empty, evaluates for TARGET_2ND_ARCH
 define paths-of-intermediates
 $(strip \
-  $(foreach obj,$(1), \
-    $(eval file_name := $(if $(filter SHARED_LIBRARIES,$(2)),$(patsubst %.so,%,$(obj)).so,$(obj))) \
-    $(eval dir := $(call intermediates-dir-for,$(2),$(obj),,,$(3))) \
-    $(call append-path,$(dir),$(file_name)) \
+  $(foreach pair,$(1), \
+    $(eval split_pair := $(subst :,$(space),$(pair))) \
+    $(eval module := $(word 1,$(split_pair))) \
+    $(eval filename := $(word 2,$(split_pair))) \
+    $(eval dir := $(call intermediates-dir-for,$(2),$(module),,,$(3))) \
+    $(call append-path,$(dir),$(filename)) \
   ) \
 )
 endef
@@ -70,13 +78,10 @@
 else
   vndk_core_libs := $(addsuffix .vendor,$(filter-out libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
 
-  # for TARGET_ARCH
   vndk_core_libs += $(call clang-ubsan-vndk-core)
-
-  # TODO(b/69834489): Package additional arch variants
-  # ifdef TARGET_2ND_ARCH
-  #   vndk_core_libs += $(call clang-ubsan-vndk-core,true)
-  # endif
+  ifdef TARGET_2ND_ARCH
+    vndk_core_libs += $(call clang-ubsan-vndk-core,true)
+  endif
 endif
 
 vndk_sp_libs := $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
@@ -135,39 +140,41 @@
 
 #######################################
 # vndk_snapshot_zip
-vndk_snapshot_arch := $(vndk_snapshot_out)/arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)
+vndk_snapshot_variant := $(vndk_snapshot_out)/$(TARGET_ARCH)
+vndk_lib_dir := $(vndk_snapshot_variant)/arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)
+vndk_lib_dir_2nd := $(vndk_snapshot_variant)/arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)
 vndk_snapshot_zip := $(PRODUCT_OUT)/android-vndk-$(TARGET_ARCH).zip
 
 $(vndk_snapshot_zip): PRIVATE_VNDK_SNAPSHOT_OUT := $(vndk_snapshot_out)
 
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_snapshot_arch)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_lib_dir)/shared/vndk-core
 $(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES := \
-  $(call paths-of-intermediates,$(vndk_core_libs),SHARED_LIBRARIES)
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
 
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_snapshot_arch)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_lib_dir)/shared/vndk-sp
 $(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES := \
-  $(call paths-of-intermediates,$(vndk_sp_libs),SHARED_LIBRARIES)
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
 
-$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_arch)/configs
+$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_variant)/configs
 $(vndk_snapshot_zip): PRIVATE_CONFIGS_INTERMEDIATES := \
-  $(call paths-of-intermediates,$(vndk_prebuilt_txts),ETC) \
+  $(call paths-of-intermediates,$(foreach txt,$(vndk_prebuilt_txts), \
+    $(txt):$(patsubst %.txt,%.$(PLATFORM_VNDK_VERSION).txt,$(txt))),ETC) \
   $(vndk_snapshot_configs)
 
-$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_arch)/NOTICE_FILES
+$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_variant)/NOTICE_FILES
 $(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_INTERMEDIATES := \
   $(call paths-of-notice-files,$(vndk_core_libs),vndk) \
   $(call paths-of-notice-files,$(vndk_sp_libs),vndk-sp)
 
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# vndk_snapshot_arch_2ND := $(vndk_snapshot_out)/arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)
-# $(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_snapshot_arch_2ND)/shared/vndk-core
-# $(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
-#   $(call paths-of-intermediates,$(vndk_core_libs),SHARED_LIBRARIES,true)
-# $(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_snapshot_arch_2ND)/shared/vndk-sp
-# $(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
-#   $(call paths-of-intermediates,$(vndk_sp_libs),SHARED_LIBRARIES,true)
-# endif
+ifdef TARGET_2ND_ARCH
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
+  $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+endif
 
 # Args
 #   $(1): destination directory
@@ -200,13 +207,12 @@
 		$(PRIVATE_CONFIGS_OUT),$(PRIVATE_CONFIGS_INTERMEDIATES))
 	$(call private-copy-vndk-intermediates, \
 		$(PRIVATE_NOTICE_FILES_OUT),$(PRIVATE_NOTICE_FILES_INTERMEDIATES))
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# 	$(call private-copy-vndk-intermediates, \
-# 		$(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
-# 	$(call private-copy-vndk-intermediates, \
-# 		$(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
-# endif
+ifdef TARGET_2ND_ARCH
+	$(call private-copy-vndk-intermediates, \
+		$(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
+	$(call private-copy-vndk-intermediates, \
+		$(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
+endif
 	$(hide) $(SOONG_ZIP) -o $@ -C $(PRIVATE_VNDK_SNAPSHOT_OUT) -D $(PRIVATE_VNDK_SNAPSHOT_OUT)
 
 .PHONY: vndk
@@ -226,18 +232,28 @@
 vndk_snapshot_top :=
 vndk_snapshot_out :=
 vndk_snapshot_configs_out :=
-vndk_snapshot_arch :=
+vndk_snapshot_variant :=
+vndk_lib_dir :=
+vndk_lib_dir_2nd :=
 vndk_snapshot_dependencies :=
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# vndk_snapshot_arch_2ND :=
-# endif
+
+else # BOARD_VNDK_RUNTIME_DISABLE is set to 'true'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'."
+endif # BOARD_VNDK_RUNTIME_DISABLE
+
+else # PLATFORM_VNDK_VERSION is NOT set
+error_msg := "CANNOT generate VNDK snapshot. PLATFORM_VNDK_VERSION must be set."
+endif # PLATFORM_VNDK_VERSION
 
 else # BOARD_VNDK_VERSION is NOT set to 'current'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'."
+endif # BOARD_VNDK_VERSION
+
+ifneq (,$(error_msg))
 
 .PHONY: vndk
 vndk:
-	$(call echo-error,$(current_makefile),CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'.)
+	$(call echo-error,$(current_makefile),$(error_msg))
 	exit 1
 
-endif # BOARD_VNDK_VERSION
+endif
diff --git a/envsetup.sh b/envsetup.sh
index 394df65..372dffb 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
              Selects <product_name> as the product to build, and <build_variant> as the variant to
              build, and stores those selections in the environment to be read by subsequent
              invocations of 'm' etc.
-- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 - croot:     Changes directory to the top of the tree.
 - m:         Makes from the top of the tree.
 - mm:        Builds all of the modules in the current directory, but not their dependencies.
@@ -51,7 +51,7 @@
     cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     # Call the build system to dump the "<val>=<value>" pairs as a shell script.
-    build_dicts_script=`\cd $T; build/soong/soong_ui.bash --dumpvars-mode \
+    build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
                         --vars="$cached_vars" \
                         --abs-vars="$cached_abs_vars" \
                         --var-prefix=var_cache_ \
@@ -661,10 +661,10 @@
 function tapas()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/tapasHelp.sh
@@ -688,7 +688,6 @@
     case $arch in
       x86)    product=aosp_x86;;
       mips)   product=aosp_mips;;
-      armv5)  product=generic_armv5;;
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
       mips64)  product=aosp_mips64;;
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 058ac1d..38b3e34 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
 cd ../..
 TOP="${PWD}"
 
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 
 tapas selects individual apps to be built by the Android build system. Unlike
 "lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index f4d6b93..3768ece 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -53,6 +53,13 @@
 BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
 endif
 
+# VNDK Version in device compatibility matrix and framework manifest
+ifeq ($(BOARD_VNDK_VERSION),current)
+VINTF_VNDK_VERSION := $(PLATFORM_VNDK_VERSION)
+else
+VINTF_VNDK_VERSION := $(BOARD_VNDK_VERSION)
+endif
+
 # Device Compatibility Matrix
 ifdef DEVICE_MATRIX_FILE
 include $(CLEAR_VARS)
@@ -62,9 +69,11 @@
 LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
 
 GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
+
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
 $(GEN): $(DEVICE_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37342627): put BOARD_VNDK_VERSION & BOARD_VNDK_LIBRARIES into device matrix.
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
+	REQUIRED_VNDK_VERSION=$(PRIVATE_VINTF_VNDK_VERSION) \
+		$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
 
 LOCAL_PREBUILT_MODULE_FILE := $(GEN)
 include $(BUILD_PREBUILT)
@@ -89,9 +98,12 @@
 endif
 endif
 
+$(GEN): PRIVATE_VINTF_VNDK_VERSION := $(VINTF_VNDK_VERSION)
 $(GEN): PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES := $(FRAMEWORK_MANIFEST_INPUT_FILES)
 $(GEN): $(FRAMEWORK_MANIFEST_INPUT_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) $(HOST_OUT_EXECUTABLES)/assemble_vintf \
+	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
+	PROVIDED_VNDK_VERSIONS="$(PRIVATE_VINTF_VNDK_VERSION) $(PRODUCT_EXTRA_VNDK_VERSIONS)" \
+		$(HOST_OUT_EXECUTABLES)/assemble_vintf \
 		-i $(call normalize-path-list,$(PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES)) \
 		-o $@ $(PRIVATE_FLAGS)
 
@@ -99,60 +111,4 @@
 include $(BUILD_PREBUILT)
 BUILT_SYSTEM_MANIFEST := $(LOCAL_BUILT_MODULE)
 
-# Framework Compatibility Matrix
-include $(CLEAR_VARS)
-LOCAL_MODULE        := framework_compatibility_matrix.xml
-LOCAL_MODULE_STEM   := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifdef BUILT_VENDOR_MANIFEST
-$(GEN): $(BUILT_VENDOR_MANIFEST)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MANIFEST)"
-endif
-
-ifeq (true,$(BOARD_AVB_ENABLE))
-$(GEN): $(AVBTOOL)
-# INTERNAL_AVB_SYSTEM_SIGNING_ARGS consists of BOARD_AVB_SYSTEM_KEY_PATH and
-# BOARD_AVB_SYSTEM_ALGORITHM. We should add the dependency of key path, which
-# is a file, here.
-$(GEN): $(BOARD_AVB_SYSTEM_KEY_PATH)
-# Use deferred assignment (=) instead of immediate assignment (:=).
-# Otherwise, cannot get INTERNAL_AVB_SYSTEM_SIGNING_ARGS.
-FRAMEWORK_VBMETA_VERSION = $$("$(AVBTOOL)" add_hashtree_footer \
-                              --print_required_libavb_version \
-                              $(INTERNAL_AVB_SYSTEM_SIGNING_ARGS) \
-                              $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
-else
-FRAMEWORK_VBMETA_VERSION := 0.0
-endif
-
-# All kernel versions that the system image works with.
-KERNEL_VERSIONS := 3.18 4.4 4.9
-KERNEL_CONFIG_DATA := kernel/configs
-
-$(GEN): $(foreach version,$(KERNEL_VERSIONS),\
-	$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg))
-$(GEN): PRIVATE_FLAGS += $(foreach version,$(KERNEL_VERSIONS),\
-	--kernel=$(version):$(call normalize-path-list,\
-		$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg)))
-
-KERNEL_VERSIONS :=
-KERNEL_CONFIG_DATA :=
-
-$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37405869) (b/37715375) inject avb versions as well for devices that have avb enabled.
-	POLICYVERS=$(POLICYVERS) \
-		BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
-		FRAMEWORK_VBMETA_VERSION=$(FRAMEWORK_VBMETA_VERSION) \
-		PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-		$(HOST_OUT_EXECUTABLES)/assemble_vintf \
-		-i $(call normalize-path-list,$(FRAMEWORK_COMPATIBILITY_MATRIX_FILES)) \
-		-o $@ $(PRIVATE_FLAGS)
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_COMPATIBILITY_MATRIX := $(LOCAL_BUILT_MODULE)
+VINTF_VNDK_VERSION :=
diff --git a/target/board/generic/sepolicy/bootanim.te b/target/board/generic/sepolicy/bootanim.te
index b23e1ca..e4f7c73 100644
--- a/target/board/generic/sepolicy/bootanim.te
+++ b/target/board/generic/sepolicy/bootanim.te
@@ -3,7 +3,6 @@
 #TODO: This can safely be ignored until b/62954877 is fixed
 dontaudit bootanim system_data_file:dir read;
 
-allow bootanim vendor_file:file { execute getattr open read };
 allow bootanim graphics_device:chr_file { read ioctl open };
 
 set_prop(bootanim, qemu_prop)
diff --git a/target/board/generic_armv5/AndroidBoard.mk b/target/board/generic_armv5/AndroidBoard.mk
deleted file mode 100644
index 7daff27..0000000
--- a/target/board/generic_armv5/AndroidBoard.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
--include build/target/board/generic/AndroidBoard.mk
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_armv5/BoardConfig.mk
deleted file mode 100644
index 016937a..0000000
--- a/target/board/generic_armv5/BoardConfig.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/BoardConfig.mk
-
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
-
-WITH_DEXPREOPT := false
diff --git a/target/board/generic_armv5/README.txt b/target/board/generic_armv5/README.txt
deleted file mode 100644
index 25d590a..0000000
--- a/target/board/generic_armv5/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-The "generic_armv5" product defines a non-hardware-specific target
-without a kernel or bootloader.
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_armv5/device.mk b/target/board/generic_armv5/device.mk
deleted file mode 100644
index 7c4aaf2..0000000
--- a/target/board/generic_armv5/device.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/device.mk
diff --git a/target/board/generic_armv5/system.prop b/target/board/generic_armv5/system.prop
deleted file mode 100644
index 137a0f9..0000000
--- a/target/board/generic_armv5/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/system/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index f8fb88f..a73a31b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -11,6 +11,10 @@
 TARGET_ARCH_VARIANT := x86
 TARGET_PRELINK_MODULE := false
 
+#emulator now uses 64bit kernel to run 32bit x86 image
+#
+TARGET_USES_64_BIT_BINDER := true
+
 # The IA emulator (qemu) uses the Goldfish devices
 HAVE_HTC_AUDIO_DRIVER := true
 BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
index 44f601f..a8c9bc5 100644
--- a/target/board/treble_common.mk
+++ b/target/board/treble_common.mk
@@ -36,26 +36,25 @@
 # Generic AOSP image always requires separate vendor.img
 TARGET_COPY_OUT_VENDOR := vendor
 
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
 # Generic AOSP image does NOT support HWC1
 TARGET_USES_HWC2 := true
 # Set emulator framebuffer display device buffer count to 3
 NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
 
-BOARD_FLASH_BLOCK_SIZE := 512
+# Audio
+USE_XML_AUDIO_POLICY_CONF := 1
 
 # b/64700195: add minimum support for odm.img
 # Currently odm.img can only be built by `make custom_images`.
 # Adding /odm mount point under root directory.
 BOARD_ROOT_EXTRA_FOLDERS += odm
+
+# Android Verified Boot (AVB):
+#   Builds a special vbmeta.img that disables AVB verification.
+#   Otherwise, AVB will prevent the device from booting the generic system.img.
+#   Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
+#   metadata into system.img.
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
+endif
+BOARD_BUILD_DISABLED_VBMETAIMAGE := true
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 9e2adee..85330b3 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -36,7 +36,6 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/full.mk \
-    $(LOCAL_DIR)/generic_armv5.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
     $(LOCAL_DIR)/full_x86.mk \
     $(LOCAL_DIR)/aosp_mips.mk \
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 03203ce..811c330 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 include $(SRC_TARGET_DIR)/product/full_x86.mk
 
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index b252349..16599cb 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -85,6 +85,7 @@
     telephony-common \
     uiautomator \
     uncrypt \
+    vndk_snapshot_package \
     voip-common \
     webview \
     webview_zygote \
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 55ee6dc..20f0ebf 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -23,6 +23,7 @@
     android.hardware.configstore@1.0-service \
     android.hidl.allocator@1.0-service \
     android.hidl.memory@1.0-impl \
+    android.hidl.memory@1.0-impl.vendor \
     atrace \
     bootanimation \
     bootstat \
diff --git a/target/product/generic_armv5.mk b/target/product/generic_armv5.mk
deleted file mode 100644
index daa321a..0000000
--- a/target/product/generic_armv5.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic product that isn't specialized for a specific device.
-# It includes the base Android platform.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic.mk)
-
-# Overrides
-PRODUCT_BRAND := generic_armv5
-PRODUCT_DEVICE := generic_armv5
-PRODUCT_NAME := generic_armv5
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 1e82773..b9820d3 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index c385352..5880bf8 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -42,8 +42,6 @@
 PRODUCT_PACKAGES += \
     libvulkan \
 
-# Audio:
-USE_XML_AUDIO_POLICY_CONF := 1
 # The following policy XML files are used as fallback for
 # vendors/devices not using XML to configure audio policy.
 PRODUCT_COPY_FILES += \
@@ -72,16 +70,6 @@
 PRODUCT_COPY_FILES += \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
 
-# Android Verified Boot (AVB):
-#   Builds a special vbmeta.img that disables AVB verification.
-#   Otherwise, AVB will prevent the device from booting the generic system.img.
-#   Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-#   metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
 #GSI support for the devices that disable VNDK enforcing
 PRODUCT_COPY_FILES += \
     system/core/rootdir/etc/ld.config.txt:system/etc/ld.config.noenforce.txt \
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index ea8c95e..93aaf37 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -77,26 +77,19 @@
 	@chmod a+x $@
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_current
+LOCAL_MODULE := vndk_package
 LOCAL_REQUIRED_MODULES := \
     $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
     $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
     $(LLNDK_LIBRARIES) \
     llndk.libraries.txt \
     vndksp.libraries.txt
-
 include $(BUILD_PHONY_PACKAGE)
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_package
-ifeq (current,$(BOARD_VNDK_VERSION))
+LOCAL_MODULE := vndk_snapshot_package
 LOCAL_REQUIRED_MODULES := \
-    vndk_current
-else
-LOCAL_REQUIRED_MODULES := \
-    vndk_v$(BOARD_VNDK_VERSION)
-endif
-LOCAL_REQUIRED_MODULES += \
-    $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver))
+    $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
 include $(BUILD_PHONY_PACKAGE)
+
 endif # BOARD_VNDK_VERSION is set
diff --git a/tools/adbs b/tools/adbs
deleted file mode 100755
index a8f06c0..0000000
--- a/tools/adbs
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import os.path
-import re
-import string
-import sys
-
-sys.path.insert(0, os.path.dirname(__file__) + "/../../development/scripts")
-import stack_core
-import symbol
-
-if __name__ == '__main__':
-  # pass the options to adb
-  adb_cmd  = "adb " + ' '.join(sys.argv[1:])
-
-  # create tracer for line parsing
-  tracer = stack_core.TraceConverter()
-
-  # invoke the adb command and filter its output
-  stream = os.popen(adb_cmd)
-  while (True):
-    line = stream.readline()
-    if (line == ''):
-      break
-    if(tracer.ProcessLine(line) == False):
-      print(line.strip())
-      sys.stdout.flush()
-
-  # adb itself aborts
-  stream.close()
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 8b55a45..9601d88 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -307,8 +307,7 @@
   if OPTIONS.info_dict.get("userdata_img_with_data") == "true":
     user_dir = os.path.join(OPTIONS.input_tmp, "DATA")
   else:
-    user_dir = tempfile.mkdtemp()
-    OPTIONS.tempfiles.append(user_dir)
+    user_dir = common.MakeTempDir()
 
   fstab = OPTIONS.info_dict["fstab"]
   if fstab:
@@ -363,9 +362,7 @@
   cmd = [avbtool, "make_vbmeta_image", "--output", img.name]
   common.AppendAVBSigningArgs(cmd, "vbmeta")
 
-  public_key_dir = tempfile.mkdtemp(prefix="avbpubkey-")
-  OPTIONS.tempfiles.append(public_key_dir)
-
+  public_key_dir = common.MakeTempDir(prefix="avbpubkey-")
   for partition, path in partitions.items():
     assert partition in common.AVB_PARTITIONS, 'Unknown partition: %s' % (
         partition,)
@@ -453,8 +450,7 @@
   timestamp = (datetime.datetime(2009, 1, 1) - epoch).total_seconds()
   image_props["timestamp"] = int(timestamp)
 
-  user_dir = tempfile.mkdtemp()
-  OPTIONS.tempfiles.append(user_dir)
+  user_dir = common.MakeTempDir()
 
   fstab = OPTIONS.info_dict["fstab"]
   if fstab:
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 8f06b95..f366853 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -237,15 +237,23 @@
 class HeapItem(object):
   def __init__(self, item):
     self.item = item
-    # Negate the score since python's heap is a min-heap and we want
-    # the maximum score.
+    # Negate the score since python's heap is a min-heap and we want the
+    # maximum score.
     self.score = -item.score
+
   def clear(self):
     self.item = None
+
   def __bool__(self):
-    return self.item is None
+    return self.item is not None
+
+  # Python 2 uses __nonzero__, while Python 3 uses __bool__.
+  __nonzero__ = __bool__
+
+  # The rest operations are generated by functools.total_ordering decorator.
   def __eq__(self, other):
     return self.score == other.score
+
   def __le__(self, other):
     return self.score <= other.score
 
@@ -1377,8 +1385,8 @@
       assert patch_start == patch_size
       return split_info_list
 
-    def AddSplitTransferForLargeApks():
-      """Create split transfers for large apk files.
+    def SplitLargeApks():
+      """Split the large apks files.
 
       Example: Chrome.apk will be split into
         src-0: Chrome.apk-0, tgt-0: Chrome.apk-0
@@ -1444,22 +1452,22 @@
 
           split_src_name = "{}-{}".format(src_name, index)
           split_tgt_name = "{}-{}".format(tgt_name, index)
-          transfer_split = Transfer(split_tgt_name, split_src_name,
-                                    split_tgt_ranges, split_src_ranges,
-                                    self.tgt.RangeSha1(split_tgt_ranges),
-                                    self.src.RangeSha1(split_src_ranges),
-                                    "diff", self.transfers)
-          transfer_split.patch = patch_content
+          split_large_apks.append((split_tgt_name,
+                                   split_src_name,
+                                   split_tgt_ranges,
+                                   split_src_ranges,
+                                   patch_content))
 
     print("Finding transfers...")
 
     large_apks = []
+    split_large_apks = []
     cache_size = common.OPTIONS.cache_size
     split_threshold = 0.125
     max_blocks_per_transfer = int(cache_size * split_threshold /
                                   self.tgt.blocksize)
     empty = RangeSet()
-    for tgt_fn, tgt_ranges in self.tgt.file_map.items():
+    for tgt_fn, tgt_ranges in sorted(self.tgt.file_map.items()):
       if tgt_fn == "__ZERO":
         # the special "__ZERO" domain is all the blocks not contained
         # in any file and that are filled with zeros.  We have a
@@ -1503,13 +1511,23 @@
       AddTransfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
 
     transfer_lock = threading.Lock()
-    threads = [threading.Thread(target=AddSplitTransferForLargeApks)
+    threads = [threading.Thread(target=SplitLargeApks)
                for _ in range(self.threads)]
     for th in threads:
       th.start()
     while threads:
       threads.pop().join()
 
+    # Sort the split transfers for large apks to generate a determinate package.
+    split_large_apks.sort()
+    for (tgt_name, src_name, tgt_ranges, src_ranges,
+         patch) in split_large_apks:
+      transfer_split = Transfer(tgt_name, src_name, tgt_ranges, src_ranges,
+                                self.tgt.RangeSha1(tgt_ranges),
+                                self.src.RangeSha1(src_ranges),
+                                "diff", self.transfers)
+      transfer_split.patch = patch
+
   def AbbreviateSourceNames(self):
     for k in self.src.file_map.keys():
       b = os.path.basename(k)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 2a92d86..ed60188 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -15,27 +15,33 @@
 # limitations under the License.
 
 """
-Build image output_image_file from input_directory, properties_file, and target_out_dir
+Builds output_image from the given input_directory, properties_file,
+and writes the image to target_output_directory.
 
-Usage:  build_image input_directory properties_file output_image_file target_out_dir
-
+Usage:  build_image.py input_directory properties_file output_image \\
+            target_output_directory
 """
+
+from __future__ import print_function
+
 import os
 import os.path
 import re
-import subprocess
-import sys
-import common
 import shlex
 import shutil
+import subprocess
+import sys
+
+import common
 import sparse_img
-import tempfile
+
 
 OPTIONS = common.OPTIONS
 
 FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
 BLOCK_SIZE = 4096
 
+
 def RunCommand(cmd, verbose=None):
   """Echo and run the given command.
 
@@ -56,6 +62,7 @@
     print(output.rstrip())
   return (output, p.returncode)
 
+
 def GetVerityFECSize(partition_size):
   cmd = ["fec", "-s", str(partition_size)]
   output, exit_code = RunCommand(cmd, False)
@@ -63,6 +70,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVerityTreeSize(partition_size):
   cmd = ["build_verity_tree", "-s", str(partition_size)]
   output, exit_code = RunCommand(cmd, False)
@@ -70,6 +78,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVerityMetadataSize(partition_size):
   cmd = ["system/extras/verity/build_verity_metadata.py", "size",
          str(partition_size)]
@@ -78,6 +87,7 @@
     return False, 0
   return True, int(output)
 
+
 def GetVeritySize(partition_size, fec_supported):
   success, verity_tree_size = GetVerityTreeSize(partition_size)
   if not success:
@@ -93,16 +103,19 @@
     return verity_size + fec_size
   return verity_size
 
+
 def GetSimgSize(image_file):
   simg = sparse_img.SparseImage(image_file, build_map=False)
   return simg.blocksize * simg.total_blocks
 
+
 def ZeroPadSimg(image_file, pad_size):
   blocks = pad_size // BLOCK_SIZE
   print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
   simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
   simg.AppendFillChunk(0, blocks)
 
+
 def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
   """Calculates max image size for a given partition size.
 
@@ -115,8 +128,8 @@
   Returns:
     The maximum image size or 0 if an error occurred.
   """
-  cmd =[avbtool, "add_%s_footer" % footer_type,
-        "--partition_size", partition_size, "--calc_max_image_size"]
+  cmd = [avbtool, "add_%s_footer" % footer_type,
+         "--partition_size", partition_size, "--calc_max_image_size"]
   cmd.extend(shlex.split(additional_args))
 
   (output, exit_code) = RunCommand(cmd)
@@ -125,6 +138,7 @@
   else:
     return int(output)
 
+
 def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
                  partition_name, key_path, algorithm, salt,
                  additional_args):
@@ -140,14 +154,15 @@
     algorithm: Name of algorithm to use or None.
     salt: The salt to use (a hexadecimal string) or None.
     additional_args: Additional arguments to pass to 'avbtool
-      add_hashtree_image'.
+        add_hashtree_image'.
+
   Returns:
     True if the operation succeeded.
   """
-  cmd =[avbtool, "add_%s_footer" % footer_type,
-        "--partition_size", partition_size,
-        "--partition_name", partition_name,
-        "--image", image_path]
+  cmd = [avbtool, "add_%s_footer" % footer_type,
+         "--partition_size", partition_size,
+         "--partition_name", partition_name,
+         "--image", image_path]
 
   if key_path and algorithm:
     cmd.extend(["--key", key_path, "--algorithm", algorithm])
@@ -159,12 +174,15 @@
   (_, exit_code) = RunCommand(cmd)
   return exit_code == 0
 
+
 def AdjustPartitionSizeForVerity(partition_size, fec_supported):
   """Modifies the provided partition size to account for the verity metadata.
 
   This information is used to size the created image appropriately.
+
   Args:
     partition_size: the size of the partition to be verified.
+
   Returns:
     A tuple of the size of the partition adjusted for verity metadata, and
     the size of verity metadata.
@@ -201,30 +219,34 @@
   AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
   return (result, verity_size)
 
+
 AdjustPartitionSizeForVerity.results = {}
 
+
 def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
                    padding_size):
   cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
          verity_path, verity_fec_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build FEC data! Error: %s" % output
+    print("Could not build FEC data! Error: %s" % output)
     return False
   return True
 
+
 def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
   cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
          verity_image_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build verity tree! Error: %s" % output
+    print("Could not build verity tree! Error: %s" % output)
     return False
   root, salt = output.split()
   prop_dict["verity_root_hash"] = root
   prop_dict["verity_salt"] = salt
   return True
 
+
 def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                         block_device, signer_path, key, signer_args,
                         verity_disable):
@@ -237,10 +259,11 @@
     cmd.append("--verity_disable")
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "Could not build verity metadata! Error: %s" % output
+    print("Could not build verity metadata! Error: %s" % output)
     return False
   return True
 
+
 def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
   """Appends the unsparse image to the given sparse image.
 
@@ -253,18 +276,23 @@
   cmd = ["append2simg", sparse_image_path, unsparse_image_path]
   output, exit_code = RunCommand(cmd)
   if exit_code != 0:
-    print "%s: %s" % (error_message, output)
+    print("%s: %s" % (error_message, output))
     return False
   return True
 
+
 def Append(target, file_to_append, error_message):
-  # appending file_to_append to target
-  with open(target, "a") as out_file:
-    with open(file_to_append, "r") as input_file:
+  """Appends file_to_append to target."""
+  try:
+    with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
       for line in input_file:
         out_file.write(line)
+  except IOError:
+    print(error_message)
+    return False
   return True
 
+
 def BuildVerifiedImage(data_image_path, verity_image_path,
                        verity_metadata_path, verity_fec_path,
                        padding_size, fec_supported):
@@ -286,6 +314,7 @@
     return False
   return True
 
+
 def UnsparseImage(sparse_image_path, replace=True):
   img_dir = os.path.dirname(sparse_image_path)
   unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
@@ -302,6 +331,7 @@
     return False, None
   return True, unsparse_image_path
 
+
 def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
   """Creates an image that is verifiable using dm-verity.
 
@@ -323,7 +353,7 @@
   signer_args = OPTIONS.verity_signer_args
 
   # make a tempdir
-  tempdir_name = tempfile.mkdtemp(suffix="_verity_images")
+  tempdir_name = common.MakeTempDir(suffix="_verity_images")
 
   # get partial image paths
   verity_image_path = os.path.join(tempdir_name, "verity.img")
@@ -332,7 +362,6 @@
 
   # build the verity tree and get the root hash and salt
   if not BuildVerityTree(out_file, verity_image_path, prop_dict):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
   # build the metadata blocks
@@ -342,7 +371,6 @@
   if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                              block_dev, signer_path, signer_key, signer_args,
                              verity_disable):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
   # build the full verified image
@@ -358,23 +386,16 @@
                             verity_fec_path,
                             padding_size,
                             fec_supported):
-    shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
-  shutil.rmtree(tempdir_name, ignore_errors=True)
   return True
 
-def ConvertBlockMapToBaseFs(block_map_file):
-  fd, base_fs_file = tempfile.mkstemp(prefix="script_gen_",
-                                      suffix=".base_fs")
-  os.close(fd)
 
+def ConvertBlockMapToBaseFs(block_map_file):
+  base_fs_file = common.MakeTempFile(prefix="script_gen_", suffix=".base_fs")
   convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
   (_, exit_code) = RunCommand(convert_command)
-  if exit_code != 0:
-    os.remove(base_fs_file)
-    return None
-  return base_fs_file
+  return base_fs_file if exit_code == 0 else None
 
 
 def CheckHeadroom(ext4fs_output, prop_dict):
@@ -391,17 +412,26 @@
 
   Returns:
     The check result.
+
+  Raises:
+    AssertionError: On invalid input.
   """
+  assert ext4fs_output is not None
+  assert prop_dict.get('fs_type', '').startswith('ext4')
+  assert 'partition_headroom' in prop_dict
+  assert 'mount_point' in prop_dict
+
   ext4fs_stats = re.compile(
       r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
       r'(?P<total_blocks>[0-9]+) blocks')
-  m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
+  last_line = ext4fs_output.strip().split('\n')[-1]
+  m = ext4fs_stats.match(last_line)
   used_blocks = int(m.groupdict().get('used_blocks'))
   total_blocks = int(m.groupdict().get('total_blocks'))
-  headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+  headroom_blocks = int(prop_dict['partition_headroom']) / BLOCK_SIZE
   adjusted_blocks = total_blocks - headroom_blocks
   if used_blocks > adjusted_blocks:
-    mount_point = prop_dict.get("mount_point")
+    mount_point = prop_dict["mount_point"]
     print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
           "headroom: %d blocks, available: %d blocks)" % (
               mount_point, total_blocks, used_blocks, headroom_blocks,
@@ -417,7 +447,8 @@
     in_dir: path of input directory.
     prop_dict: property dictionary.
     out_file: path of the output image file.
-    target_out: path of the product out directory to read device specific FS config files.
+    target_out: path of the product out directory to read device specific FS
+        config files.
 
   Returns:
     True iff the image is built successfully.
@@ -426,17 +457,15 @@
   # /system and the ramdisk, and can be mounted at the root of the file system.
   origin_in = in_dir
   fs_config = prop_dict.get("fs_config")
-  base_fs_file = None
-  if (prop_dict.get("system_root_image") == "true"
-      and prop_dict["mount_point"] == "system"):
-    in_dir = tempfile.mkdtemp()
-    # Change the mount point to "/"
+  if (prop_dict.get("system_root_image") == "true" and
+      prop_dict["mount_point"] == "system"):
+    in_dir = common.MakeTempDir()
+    # Change the mount point to "/".
     prop_dict["mount_point"] = "/"
     if fs_config:
       # We need to merge the fs_config files of system and ramdisk.
-      fd, merged_fs_config = tempfile.mkstemp(prefix="root_fs_config",
-                                              suffix=".txt")
-      os.close(fd)
+      merged_fs_config = common.MakeTempFile(prefix="root_fs_config",
+                                             suffix=".txt")
       with open(merged_fs_config, "w") as fw:
         if "ramdisk_fs_config" in prop_dict:
           with open(prop_dict["ramdisk_fs_config"]) as fr:
@@ -447,7 +476,7 @@
 
   build_command = []
   fs_type = prop_dict.get("fs_type", "")
-  run_fsck = False
+  run_e2fsck = False
 
   fs_spans_partition = True
   if fs_type.startswith("squash"):
@@ -461,8 +490,8 @@
   # verified.
   if verity_supported and is_verity_partition:
     partition_size = int(prop_dict.get("partition_size"))
-    (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size,
-                                                                verity_fec_supported)
+    (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(
+        partition_size, verity_fec_supported)
     if not adjusted_size:
       return False
     prop_dict["partition_size"] = str(adjusted_size)
@@ -481,8 +510,8 @@
     partition_size = prop_dict["partition_size"]
     # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
     additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
-    max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type, partition_size,
-                                         additional_args)
+    max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type,
+                                         partition_size, additional_args)
     if max_image_size == 0:
       return False
     prop_dict["partition_size"] = str(max_image_size)
@@ -492,7 +521,7 @@
     build_command = [prop_dict["ext_mkuserimg"]]
     if "extfs_sparse_flag" in prop_dict:
       build_command.append(prop_dict["extfs_sparse_flag"])
-      run_fsck = True
+      run_e2fsck = True
     build_command.extend([in_dir, out_file, fs_type,
                           prop_dict["mount_point"]])
     build_command.append(prop_dict["partition_size"])
@@ -546,7 +575,7 @@
       build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
     if "squashfs_block_size" in prop_dict:
       build_command.extend(["-b", prop_dict["squashfs_block_size"]])
-    if "squashfs_disable_4k_align" in prop_dict and prop_dict.get("squashfs_disable_4k_align") == "true":
+    if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
     build_command = ["mkf2fsuserimg.sh"]
@@ -576,28 +605,14 @@
     shutil.rmtree(staging_system, ignore_errors=True)
     shutil.copytree(origin_in, staging_system, symlinks=True)
 
-  ext4fs_output = None
-  try:
-    if fs_type.startswith("ext4"):
-      (ext4fs_output, exit_code) = RunCommand(build_command)
-    else:
-      (_, exit_code) = RunCommand(build_command)
-  finally:
-    if in_dir != origin_in:
-      # Clean up temporary directories and files.
-      shutil.rmtree(in_dir, ignore_errors=True)
-      if fs_config:
-        os.remove(fs_config)
-    if base_fs_file is not None:
-      os.remove(base_fs_file)
+  (mkfs_output, exit_code) = RunCommand(build_command)
   if exit_code != 0:
     print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
     return False
 
   # Check if there's enough headroom space available for ext4 image.
   if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
-    assert ext4fs_output is not None
-    if not CheckHeadroom(ext4fs_output, prop_dict):
+    if not CheckHeadroom(mkfs_output, prop_dict):
       return False
 
   if not fs_spans_partition:
@@ -611,7 +626,7 @@
     if verity_supported and is_verity_partition:
       ZeroPadSimg(out_file, partition_size - image_size)
 
-  # create the verified image if this is to be verified
+  # Create the verified image if this is to be verified.
   if verity_supported and is_verity_partition:
     if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
       return False
@@ -627,11 +642,12 @@
     salt = prop_dict.get("avb_salt")
     # avb_add_hash_footer_args or avb_add_hashtree_footer_args
     additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
-    if not AVBAddFooter(out_file, avbtool, avb_footer_type, original_partition_size,
-                        partition_name, key_path, algorithm, salt, additional_args):
+    if not AVBAddFooter(out_file, avbtool, avb_footer_type,
+                        original_partition_size, partition_name, key_path,
+                        algorithm, salt, additional_args):
       return False
 
-  if run_fsck and prop_dict.get("skip_fsck") != "true":
+  if run_e2fsck and prop_dict.get("skip_fsck") != "true":
     success, unsparse_image = UnsparseImage(out_file, replace=False)
     if not success:
       return False
@@ -643,7 +659,8 @@
     os.remove(unsparse_image)
 
     if exit_code != 0:
-      print("Error: '%s' failed with exit code %d" % (e2fsck_command, exit_code))
+      print("Error: '%s' failed with exit code %d" % (e2fsck_command,
+                                                      exit_code))
       return False
 
   return True
@@ -710,7 +727,8 @@
     copy_prop("system_base_fs_file", "base_fs_file")
     copy_prop("system_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "system_other":
-    # We inherit the selinux policies of /system since we contain some of its files.
+    # We inherit the selinux policies of /system since we contain some of its
+    # files.
     d["mount_point"] = "system"
     copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
     copy_prop("avb_system_add_hashtree_footer_args",
@@ -732,7 +750,7 @@
     copy_prop("fs_type", "fs_type")
     copy_prop("userdata_fs_type", "fs_type")
     copy_prop("userdata_size", "partition_size")
-    copy_prop("flash_logical_block_size","flash_logical_block_size")
+    copy_prop("flash_logical_block_size", "flash_logical_block_size")
     copy_prop("flash_erase_block_size", "flash_erase_block_size")
   elif mount_point == "cache":
     copy_prop("cache_fs_type", "fs_type")
@@ -778,7 +796,7 @@
 
 def main(argv):
   if len(argv) != 4:
-    print __doc__
+    print(__doc__)
     sys.exit(1)
 
   in_dir = argv[0]
@@ -807,16 +825,19 @@
     elif image_filename == "oem.img":
       mount_point = "oem"
     else:
-      print >> sys.stderr, "error: unknown image file name ", image_filename
-      exit(1)
+      print("error: unknown image file name ", image_filename, file=sys.stderr)
+      sys.exit(1)
 
     image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
 
   if not BuildImage(in_dir, image_properties, out_file, target_out):
-    print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
-                                                                in_dir)
-    exit(1)
+    print("error: failed to build %s from %s" % (out_file, in_dir),
+          file=sys.stderr)
+    sys.exit(1)
 
 
 if __name__ == '__main__':
-  main(sys.argv[1:])
+  try:
+    main(sys.argv[1:])
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index c4877e0..db63fd3 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -53,11 +53,13 @@
 
 import common
 
-# Work around a bug in python's zipfile module that prevents opening
-# of zipfiles if any entry has an extra field of between 1 and 3 bytes
-# (which is common with zipaligned APKs).  This overrides the
-# ZipInfo._decodeExtra() method (which contains the bug) with an empty
-# version (since we don't need to decode the extra field anyway).
+# Work around a bug in Python's zipfile module that prevents opening of zipfiles
+# if any entry has an extra field of between 1 and 3 bytes (which is common with
+# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
+# contains the bug) with an empty version (since we don't need to decode the
+# extra field anyway).
+# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
+# Python 3.5.0 alpha 1.
 class MyZipInfo(zipfile.ZipInfo):
   def _decodeExtra(self):
     pass
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 12e757d..ebebd63 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -574,18 +574,16 @@
 
 
 def UnzipTemp(filename, pattern=None):
-  """Unzip the given archive into a temporary directory and return the name.
+  """Unzips the given archive into a temporary directory and returns the name.
 
-  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a
-  temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
+  If filename is of the form "foo.zip+bar.zip", unzip foo.zip into a temp dir,
+  then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
 
-  Returns (tempdir, zipobj) where zipobj is a zipfile.ZipFile (of the
-  main file), open for reading.
+  Returns:
+    (tempdir, zipobj): tempdir is the name of the temprary directory; zipobj is
+        a zipfile.ZipFile (of the main file), open for reading.
   """
 
-  tmp = tempfile.mkdtemp(prefix="targetfiles-")
-  OPTIONS.tempfiles.append(tmp)
-
   def unzip_to_dir(filename, dirname):
     cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
     if pattern is not None:
@@ -596,6 +594,7 @@
       raise ExternalError("failed to unzip input target-files \"%s\"" %
                           (filename,))
 
+  tmp = MakeTempDir(prefix="targetfiles-")
   m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
   if m:
     unzip_to_dir(m.group(1), tmp)
@@ -793,11 +792,22 @@
 
 
 def ReadApkCerts(tf_zip):
-  """Given a target_files ZipFile, parse the META/apkcerts.txt file
-  and return a tuple with the following elements: (1) a dictionary that maps
-  packages to certs (based on the "certificate" and "private_key" attributes
-  in the file. (2) A string representing the extension of compressed APKs in
-  the target files (e.g ".gz" ".bro")."""
+  """Parses the APK certs info from a given target-files zip.
+
+  Given a target-files ZipFile, parses the META/apkcerts.txt entry and returns a
+  tuple with the following elements: (1) a dictionary that maps packages to
+  certs (based on the "certificate" and "private_key" attributes in the file;
+  (2) a string representing the extension of compressed APKs in the target files
+  (e.g ".gz", ".bro").
+
+  Args:
+    tf_zip: The input target_files ZipFile (already open).
+
+  Returns:
+    (certmap, ext): certmap is a dictionary that maps packages to certs; ext is
+        the extension string of compressed APKs (e.g. ".gz"), or None if there's
+        no compressed APKs.
+  """
   certmap = {}
   compressed_extension = None
 
@@ -813,41 +823,51 @@
     line = line.strip()
     if not line:
       continue
-    m = re.match(r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
-                 r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
-                 line)
-    if m:
-      matches = m.groupdict()
-      cert = matches["CERT"]
-      privkey = matches["PRIVKEY"]
-      name = matches["NAME"]
-      this_compressed_extension = matches["COMPRESSED"]
-      public_key_suffix_len = len(OPTIONS.public_key_suffix)
-      private_key_suffix_len = len(OPTIONS.private_key_suffix)
-      if cert in SPECIAL_CERT_STRINGS and not privkey:
-        certmap[name] = cert
-      elif (cert.endswith(OPTIONS.public_key_suffix) and
-            privkey.endswith(OPTIONS.private_key_suffix) and
-            cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
-        certmap[name] = cert[:-public_key_suffix_len]
-      else:
-        raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
-      if this_compressed_extension:
-        # Only count the installed files.
-        filename = name + '.' + this_compressed_extension
-        if filename not in installed_files:
-          continue
-        # Make sure that all the values in the compression map have the same
-        # extension. We don't support multiple compression methods in the same
-        # system image.
-        if compressed_extension:
-          if this_compressed_extension != compressed_extension:
-            raise ValueError("multiple compressed extensions : %s vs %s",
-                             (compressed_extension, this_compressed_extension))
-        else:
-          compressed_extension = this_compressed_extension
+    m = re.match(
+        r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
+        r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
+        line)
+    if not m:
+      continue
 
-  return (certmap, ("." + compressed_extension) if compressed_extension else None)
+    matches = m.groupdict()
+    cert = matches["CERT"]
+    privkey = matches["PRIVKEY"]
+    name = matches["NAME"]
+    this_compressed_extension = matches["COMPRESSED"]
+
+    public_key_suffix_len = len(OPTIONS.public_key_suffix)
+    private_key_suffix_len = len(OPTIONS.private_key_suffix)
+    if cert in SPECIAL_CERT_STRINGS and not privkey:
+      certmap[name] = cert
+    elif (cert.endswith(OPTIONS.public_key_suffix) and
+          privkey.endswith(OPTIONS.private_key_suffix) and
+          cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
+      certmap[name] = cert[:-public_key_suffix_len]
+    else:
+      raise ValueError("Failed to parse line from apkcerts.txt:\n" + line)
+
+    if not this_compressed_extension:
+      continue
+
+    # Only count the installed files.
+    filename = name + '.' + this_compressed_extension
+    if filename not in installed_files:
+      continue
+
+    # Make sure that all the values in the compression map have the same
+    # extension. We don't support multiple compression methods in the same
+    # system image.
+    if compressed_extension:
+      if this_compressed_extension != compressed_extension:
+        raise ValueError(
+            "Multiple compressed extensions: {} vs {}".format(
+                compressed_extension, this_compressed_extension))
+    else:
+      compressed_extension = this_compressed_extension
+
+  return (certmap,
+          ("." + compressed_extension) if compressed_extension else None)
 
 
 COMMON_DOCSTRING = """
@@ -955,12 +975,24 @@
   return fn
 
 
+def MakeTempDir(prefix='tmp', suffix=''):
+  """Makes a temporary dir that will be cleaned up with a call to Cleanup().
+
+  Returns:
+    The absolute pathname of the new directory.
+  """
+  dir_name = tempfile.mkdtemp(suffix=suffix, prefix=prefix)
+  OPTIONS.tempfiles.append(dir_name)
+  return dir_name
+
+
 def Cleanup():
   for i in OPTIONS.tempfiles:
     if os.path.isdir(i):
-      shutil.rmtree(i)
+      shutil.rmtree(i, ignore_errors=True)
     else:
       os.remove(i)
+  del OPTIONS.tempfiles[:]
 
 
 class PasswordManager(object):
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 0c44faf..7a81928 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,14 +77,14 @@
     with temporary=True) to this one."""
     self.script.extend(other.script)
 
-  def AssertOemProperty(self, name, values):
+  def AssertOemProperty(self, name, values, oem_no_mount):
     """Assert that a property on the OEM paritition matches allowed values."""
     if not name:
       raise ValueError("must specify an OEM property")
     if not values:
       raise ValueError("must specify the OEM value")
-    get_prop_command = None
-    if common.OPTIONS.oem_no_mount:
+
+    if oem_no_mount:
       get_prop_command = 'getprop("%s")' % name
     else:
       get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index d5ac922..88cb741 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -49,8 +49,10 @@
 
   -o  (--oem_settings)  <main_file[,additional_files...]>
       Comma seperated list of files used to specify the expected OEM-specific
-      properties on the OEM partition of the intended device.
-      Multiple expected values can be used by providing multiple files.
+      properties on the OEM partition of the intended device. Multiple expected
+      values can be used by providing multiple files. Only the first dict will
+      be used to compute fingerprint, while the rest will be used to assert
+      OEM-specific properties.
 
   --oem_no_mount
       For devices with OEM-specific properties but without an OEM partition,
@@ -128,16 +130,11 @@
 
 from __future__ import print_function
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print("Python 2.7 or newer is required.", file=sys.stderr)
-  sys.exit(1)
-
 import multiprocessing
 import os.path
-import subprocess
 import shlex
+import subprocess
+import sys
 import tempfile
 import zipfile
 
@@ -145,6 +142,11 @@
 import edify_generator
 import sparse_img
 
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
+
 OPTIONS = common.OPTIONS
 OPTIONS.package_key = None
 OPTIONS.incremental_source = None
@@ -163,7 +165,6 @@
 OPTIONS.updater_binary = None
 OPTIONS.oem_source = None
 OPTIONS.oem_no_mount = False
-OPTIONS.fallback_to_full = True
 OPTIONS.full_radio = False
 OPTIONS.full_bootloader = False
 # Stash size cannot exceed cache_size * threshold.
@@ -179,6 +180,136 @@
 UNZIP_PATTERN = ['IMAGES/*', 'META/*']
 
 
+class BuildInfo(object):
+  """A class that holds the information for a given build.
+
+  This class wraps up the property querying for a given source or target build.
+  It abstracts away the logic of handling OEM-specific properties, and caches
+  the commonly used properties such as fingerprint.
+
+  There are two types of info dicts: a) build-time info dict, which is generated
+  at build time (i.e. included in a target_files zip); b) OEM info dict that is
+  specified at package generation time (via command line argument
+  '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
+  having "oem_fingerprint_properties" in build-time info dict), all the queries
+  would be answered based on build-time info dict only. Otherwise if using
+  OEM-specific properties, some of them will be calculated from two info dicts.
+
+  Users can query properties similarly as using a dict() (e.g. info['fstab']),
+  or to query build properties via GetBuildProp() or GetVendorBuildProp().
+
+  Attributes:
+    info_dict: The build-time info dict.
+    is_ab: Whether it's a build that uses A/B OTA.
+    oem_dicts: A list of OEM dicts.
+    oem_props: A list of OEM properties that should be read from OEM dicts; None
+        if the build doesn't use any OEM-specific property.
+    fingerprint: The fingerprint of the build, which would be calculated based
+        on OEM properties if applicable.
+    device: The device name, which could come from OEM dicts if applicable.
+  """
+
+  def __init__(self, info_dict, oem_dicts):
+    """Initializes a BuildInfo instance with the given dicts.
+
+    Arguments:
+      info_dict: The build-time info dict.
+      oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
+          that it always uses the first dict to calculate the fingerprint or the
+          device name. The rest would be used for asserting OEM properties only
+          (e.g.  one package can be installed on one of these devices).
+    """
+    self.info_dict = info_dict
+    self.oem_dicts = oem_dicts
+
+    self._is_ab = info_dict.get("ab_update") == "true"
+    self._oem_props = info_dict.get("oem_fingerprint_properties")
+
+    if self._oem_props:
+      assert oem_dicts, "OEM source required for this build"
+
+    # These two should be computed only after setting self._oem_props.
+    self._device = self.GetOemProperty("ro.product.device")
+    self._fingerprint = self.CalculateFingerprint()
+
+  @property
+  def is_ab(self):
+    return self._is_ab
+
+  @property
+  def device(self):
+    return self._device
+
+  @property
+  def fingerprint(self):
+    return self._fingerprint
+
+  @property
+  def oem_props(self):
+    return self._oem_props
+
+  def __getitem__(self, key):
+    return self.info_dict[key]
+
+  def get(self, key, default=None):
+    return self.info_dict.get(key, default)
+
+  def GetBuildProp(self, prop):
+    """Returns the inquired build property."""
+    try:
+      return self.info_dict.get("build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+
+  def GetVendorBuildProp(self, prop):
+    """Returns the inquired vendor build property."""
+    try:
+      return self.info_dict.get("vendor.build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError(
+          "couldn't find %s in vendor.build.prop" % (prop,))
+
+  def GetOemProperty(self, key):
+    if self.oem_props is not None and key in self.oem_props:
+      return self.oem_dicts[0][key]
+    return self.GetBuildProp(key)
+
+  def CalculateFingerprint(self):
+    if self.oem_props is None:
+      return self.GetBuildProp("ro.build.fingerprint")
+    return "%s/%s/%s:%s" % (
+        self.GetOemProperty("ro.product.brand"),
+        self.GetOemProperty("ro.product.name"),
+        self.GetOemProperty("ro.product.device"),
+        self.GetBuildProp("ro.build.thumbprint"))
+
+  def WriteMountOemScript(self, script):
+    assert self.oem_props is not None
+    recovery_mount_options = self.info_dict.get("recovery_mount_options")
+    script.Mount("/oem", recovery_mount_options)
+
+  def WriteDeviceAssertions(self, script, oem_no_mount):
+    # Read the property directly if not using OEM properties.
+    if not self.oem_props:
+      script.AssertDevice(self.device)
+      return
+
+    # Otherwise assert OEM properties.
+    if not self.oem_dicts:
+      raise common.ExternalError(
+          "No OEM file provided to answer expected assertions")
+
+    for prop in self.oem_props.split():
+      values = []
+      for oem_dict in self.oem_dicts:
+        if prop in oem_dict:
+          values.append(oem_dict[prop])
+      if not values:
+        raise common.ExternalError(
+            "The OEM file is missing the property %s" % (prop,))
+      script.AssertOemProperty(prop, values, oem_no_mount)
+
+
 def SignOutput(temp_zip_name, output_zip_name):
   pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
@@ -186,37 +317,15 @@
                   whole_file=True)
 
 
-def AppendAssertions(script, info_dict, oem_dicts=None):
-  oem_props = info_dict.get("oem_fingerprint_properties")
-  if not oem_props:
-    device = GetBuildProp("ro.product.device", info_dict)
-    script.AssertDevice(device)
-  else:
-    if not oem_dicts:
-      raise common.ExternalError(
-          "No OEM file provided to answer expected assertions")
-    for prop in oem_props.split():
-      values = []
-      for oem_dict in oem_dicts:
-        if oem_dict.get(prop):
-          values.append(oem_dict[prop])
-      if not values:
-        raise common.ExternalError(
-            "The OEM file is missing the property %s" % prop)
-      script.AssertOemProperty(prop, values)
-
-
-def _LoadOemDicts(script, recovery_mount_options=None):
+def _LoadOemDicts(oem_source):
   """Returns the list of loaded OEM properties dict."""
-  oem_dicts = None
-  if OPTIONS.oem_source is None:
-    raise common.ExternalError("OEM source required for this build")
-  if not OPTIONS.oem_no_mount and script:
-    script.Mount("/oem", recovery_mount_options)
+  if not oem_source:
+    return None
+
   oem_dicts = []
-  for oem_file in OPTIONS.oem_source:
-    oem_dicts.append(common.LoadDictionaryFromLines(
-        open(oem_file).readlines()))
+  for oem_file in oem_source:
+    with open(oem_file) as fp:
+      oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
   return oem_dicts
 
 
@@ -267,25 +376,30 @@
     return False
 
 
-def HasTrebleEnabled(target_files_zip, info_dict):
+def HasTrebleEnabled(target_files_zip, target_info):
   return (HasVendorPartition(target_files_zip) and
-          GetBuildProp("ro.treble.enabled", info_dict) == "true")
+          target_info.GetBuildProp("ro.treble.enabled") == "true")
 
 
-def GetOemProperty(name, oem_props, oem_dict, info_dict):
-  if oem_props is not None and name in oem_props:
-    return oem_dict[name]
-  return GetBuildProp(name, info_dict)
+def WriteFingerprintAssertion(script, target_info, source_info):
+  source_oem_props = source_info.oem_props
+  target_oem_props = target_info.oem_props
 
-
-def CalculateFingerprint(oem_props, oem_dict, info_dict):
-  if oem_props is None:
-    return GetBuildProp("ro.build.fingerprint", info_dict)
-  return "%s/%s/%s:%s" % (
-      GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
-      GetBuildProp("ro.build.thumbprint", info_dict))
+  if source_oem_props is None and target_oem_props is None:
+    script.AssertSomeFingerprint(
+        source_info.fingerprint, target_info.fingerprint)
+  elif source_oem_props is not None and target_oem_props is not None:
+    script.AssertSomeThumbprint(
+        target_info.GetBuildProp("ro.build.thumbprint"),
+        source_info.GetBuildProp("ro.build.thumbprint"))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_info.fingerprint,
+        target_info.GetBuildProp("ro.build.thumbprint"))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_info.fingerprint,
+        source_info.GetBuildProp("ro.build.thumbprint"))
 
 
 def GetImage(which, tmpdir):
@@ -313,9 +427,8 @@
   return sparse_img.SparseImage(path, mappath, clobbered_blocks)
 
 
-def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip,
-                                           target_info_dict,
-                                           source_info_dict=None):
+def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
+                                           source_info=None):
   """Adds compatibility info into the output zip if it's Treble-enabled target.
 
   Metadata used for on-device compatibility verification is retrieved from
@@ -328,9 +441,9 @@
   Args:
     target_zip: Zip file containing the source files to be included for OTA.
     output_zip: Zip file that will be sent for OTA.
-    target_info_dict: The dict that holds the target build info.
-    source_info_dict: The dict that holds the source build info, if generating
-        an incremental OTA; None otherwise.
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, if
+        generating an incremental OTA; None otherwise.
   """
 
   def AddCompatibilityArchive(system_updated, vendor_updated):
@@ -353,8 +466,8 @@
 
     # Create new archive.
     compatibility_archive = tempfile.NamedTemporaryFile()
-    compatibility_archive_zip = zipfile.ZipFile(compatibility_archive, "w",
-        compression=zipfile.ZIP_DEFLATED)
+    compatibility_archive_zip = zipfile.ZipFile(
+        compatibility_archive, "w", compression=zipfile.ZIP_DEFLATED)
 
     # Add metadata.
     for file_name in compatibility_files:
@@ -375,59 +488,50 @@
 
   # Will only proceed if the target has enabled the Treble support (as well as
   # having a /vendor partition).
-  if not HasTrebleEnabled(target_zip, target_info_dict):
+  if not HasTrebleEnabled(target_zip, target_info):
     return
 
   # We don't support OEM thumbprint in Treble world (which calculates
   # fingerprints in a different way as shown in CalculateFingerprint()).
-  assert not target_info_dict.get("oem_fingerprint_properties")
+  assert not target_info.oem_props
 
   # Full OTA carries the info for system/vendor both.
-  if source_info_dict is None:
+  if source_info is None:
     AddCompatibilityArchive(True, True)
     return
 
-  assert not source_info_dict.get("oem_fingerprint_properties")
+  assert not source_info.oem_props
 
-  source_fp = GetBuildProp("ro.build.fingerprint", source_info_dict)
-  target_fp = GetBuildProp("ro.build.fingerprint", target_info_dict)
+  source_fp = source_info.fingerprint
+  target_fp = target_info.fingerprint
   system_updated = source_fp != target_fp
 
-  source_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        source_info_dict)
-  target_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        target_info_dict)
+  source_fp_vendor = source_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
+  target_fp_vendor = target_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
   vendor_updated = source_fp_vendor != target_fp_vendor
 
   AddCompatibilityArchive(system_updated, vendor_updated)
 
 
 def WriteFullOTAPackage(input_zip, output_zip):
-  # TODO: how to determine this?  We don't know what version it will
-  # be installed on top of. For now, we expect the API just won't
-  # change very often. Similarly for fstab, it might have changed
-  # in the target build.
-  script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+  target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if oem_props:
-    recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  # We don't know what version it will be installed on top of. We expect the API
+  # just won't change very often. Similarly for fstab, it might have changed in
+  # the target build.
+  target_api_version = target_info["recovery_api_version"]
+  script = edify_generator.EdifyGenerator(target_api_version, target_info)
 
-  target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict)
-  metadata = {
-      "post-build": target_fp,
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
-  }
+  if target_info.oem_props and not OPTIONS.oem_no_mount:
+    target_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info)
 
   device_specific = common.DeviceSpecificParams(
       input_zip=input_zip,
-      input_version=OPTIONS.info_dict["recovery_api_version"],
+      input_version=target_api_version,
       output_zip=output_zip,
       script=script,
       input_tmp=OPTIONS.input_tmp,
@@ -436,13 +540,12 @@
 
   assert HasRecoveryPatch(input_zip)
 
-  metadata["ota-type"] = "BLOCK"
-
-  ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
-  ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
+  # Assertions (e.g. downgrade check, device properties check).
+  ts = target_info.GetBuildProp("ro.build.date.utc")
+  ts_text = target_info.GetBuildProp("ro.build.date")
   script.AssertOlderBuild(ts, ts_text)
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.FullOTA_Assertions()
 
   # Two-step package strategy (in chronological order, which is *not*
@@ -468,9 +571,9 @@
   recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
                                          OPTIONS.input_tmp, "RECOVERY")
   if OPTIONS.two_step:
-    if not OPTIONS.info_dict.get("multistage_support", None):
+    if not target_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
-    fs = OPTIONS.info_dict["fstab"]["/misc"]
+    fs = target_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
     bcb_dev = {"bcb_dev": fs.device}
@@ -492,7 +595,7 @@
     script.Comment("Stage 3/3")
 
   # Dump fingerprints
-  script.Print("Target: %s" % target_fp)
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   device_specific.FullOTA_InstallBegin()
 
@@ -525,10 +628,9 @@
     vendor_diff = common.BlockDifference("vendor", vendor_tgt)
     vendor_diff.WriteScript(script, output_zip)
 
-  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip,
-                                         OPTIONS.info_dict)
+  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
 
-  common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
+  common.CheckSize(boot_img.data, "boot.img", target_info)
   common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
 
   script.ShowProgress(0.05, 5)
@@ -575,29 +677,12 @@
                      compress_type=zipfile.ZIP_STORED)
 
 
-def GetBuildProp(prop, info_dict):
-  """Returns the inquired build property from a given info_dict."""
-  try:
-    return info_dict.get("build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
-
-
-def GetVendorBuildProp(prop, info_dict):
-  """Returns the inquired vendor build property from a given info_dict."""
-  try:
-    return info_dict.get("vendor.build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError(
-        "couldn't find %s in vendor.build.prop" % (prop,))
-
-
-def HandleDowngradeMetadata(metadata):
+def HandleDowngradeMetadata(metadata, target_info, source_info):
   # Only incremental OTAs are allowed to reach here.
   assert OPTIONS.incremental_source is not None
 
-  post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
-  pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+  post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
+  pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
   is_downgrade = long(post_timestamp) < long(pre_timestamp)
 
   if OPTIONS.downgrade:
@@ -607,72 +692,104 @@
     metadata["ota-downgrade"] = "yes"
   elif OPTIONS.timestamp:
     if not is_downgrade:
-      raise RuntimeError("--timestamp specified but no timestamp hack needed: "
-                         "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+      raise RuntimeError("--override_timestamp specified but no timestamp hack "
+                         "needed: pre: %s, post: %s" % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = str(long(pre_timestamp) + 1)
   else:
     if is_downgrade:
       raise RuntimeError("Downgrade detected based on timestamp check: "
-                         "pre: %s, post: %s. Need to specify --timestamp OR "
-                         "--downgrade to allow building the incremental." % (
-                             pre_timestamp, post_timestamp))
+                         "pre: %s, post: %s. Need to specify "
+                         "--override_timestamp OR --downgrade to allow "
+                         "building the incremental." % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = post_timestamp
 
 
-def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
-  source_version = OPTIONS.source_info_dict["recovery_api_version"]
-  target_version = OPTIONS.target_info_dict["recovery_api_version"]
+def GetPackageMetadata(target_info, source_info=None):
+  """Generates and returns the metadata dict.
 
-  if source_version == 0:
-    print("WARNING: generating edify script for a source that "
-          "can't install it.")
-  script = edify_generator.EdifyGenerator(
-      source_version, OPTIONS.target_info_dict,
-      fstab=OPTIONS.source_info_dict["fstab"])
+  It generates a dict() that contains the info to be written into an OTA
+  package (META-INF/com/android/metadata). It also handles the detection of
+  downgrade / timestamp override / data wipe based on the global options.
 
-  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
-  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if source_oem_props or target_oem_props:
-    recovery_mount_options = OPTIONS.source_info_dict.get(
-        "recovery_mount_options")
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  Args:
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, or
+        None if generating full OTA.
+
+  Returns:
+    A dict to be written into package metadata entry.
+  """
+  assert isinstance(target_info, BuildInfo)
+  assert source_info is None or isinstance(source_info, BuildInfo)
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict),
-      "ota-type": "BLOCK",
+      'post-build' : target_info.fingerprint,
+      'post-build-incremental' : target_info.GetBuildProp(
+          'ro.build.version.incremental'),
   }
 
-  HandleDowngradeMetadata(metadata)
+  if target_info.is_ab:
+    metadata['ota-type'] = 'AB'
+    metadata['ota-required-cache'] = '0'
+  else:
+    metadata['ota-type'] = 'BLOCK'
+
+  if OPTIONS.wipe_user_data:
+    metadata['ota-wipe'] = 'yes'
+
+  is_incremental = source_info is not None
+  if is_incremental:
+    metadata['pre-build'] = source_info.fingerprint
+    metadata['pre-build-incremental'] = source_info.GetBuildProp(
+        'ro.build.version.incremental')
+    metadata['pre-device'] = source_info.device
+  else:
+    metadata['pre-device'] = target_info.device
+
+  # Detect downgrades, or fill in the post-timestamp.
+  if is_incremental:
+    HandleDowngradeMetadata(metadata, target_info, source_info)
+  else:
+    metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+  return metadata
+
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
+  target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+  source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+
+  target_api_version = target_info["recovery_api_version"]
+  source_api_version = source_info["recovery_api_version"]
+  if source_api_version == 0:
+    print("WARNING: generating edify script for a source that "
+          "can't install it.")
+
+  script = edify_generator.EdifyGenerator(
+      source_api_version, target_info, fstab=source_info["fstab"])
+
+  if target_info.oem_props or source_info.oem_props:
+    if not OPTIONS.oem_no_mount:
+      source_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info, source_info)
 
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
-      source_version=source_version,
+      source_version=source_api_version,
       target_zip=target_zip,
-      target_version=target_version,
+      target_version=target_api_version,
       output_zip=output_zip,
       script=script,
       metadata=metadata,
-      info_dict=OPTIONS.source_info_dict)
-
-  source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict)
-  target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.target_info_dict)
-  metadata["pre-build"] = source_fp
-  metadata["post-build"] = target_fp
-  metadata["pre-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.source_info_dict)
-  metadata["post-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.target_info_dict)
+      info_dict=source_info)
 
   source_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
-      OPTIONS.source_info_dict)
+      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
   target_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
   updating_boot = (not OPTIONS.two_step and
                    (source_boot.data != target_boot.data))
 
@@ -683,19 +800,18 @@
   system_tgt = GetImage("system", OPTIONS.target_tmp)
 
   blockimgdiff_version = max(
-      int(i) for i in
-      OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+      int(i) for i in target_info.get("blockimgdiff_versions", "1").split(","))
   assert blockimgdiff_version >= 3
 
   # Check the first block of the source system partition for remount R/W only
   # if the filesystem is ext4.
-  system_src_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+  system_src_partition = source_info["fstab"]["/system"]
   check_first_block = system_src_partition.fs_type == "ext4"
   # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
   # in zip formats. However with squashfs, a) all files are compressed in LZ4;
   # b) the blocks listed in block map may not contain all the bytes for a given
   # file (because they're rounded to be 4K-aligned).
-  system_tgt_partition = OPTIONS.target_info_dict["fstab"]["/system"]
+  system_tgt_partition = target_info["fstab"]["/system"]
   disable_imgdiff = (system_src_partition.fs_type == "squashfs" or
                      system_tgt_partition.fs_type == "squashfs")
   system_diff = common.BlockDifference("system", system_tgt, system_src,
@@ -711,7 +827,7 @@
 
     # Check first block of vendor partition for remount R/W only if
     # disk type is ext4
-    vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+    vendor_partition = source_info["fstab"]["/vendor"]
     check_first_block = vendor_partition.fs_type == "ext4"
     disable_imgdiff = vendor_partition.fs_type == "squashfs"
     vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
@@ -722,10 +838,10 @@
     vendor_diff = None
 
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.target_info_dict,
-      OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
-  AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
+  # Assertions (e.g. device properties check).
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -751,12 +867,12 @@
   #    (allow recovery to mark itself finished and reboot)
 
   if OPTIONS.two_step:
-    if not OPTIONS.source_info_dict.get("multistage_support", None):
+    if not source_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
     fs = OPTIONS.source_info_dict["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev": fs.device}
+    bcb_dev = {"bcb_dev" : fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
     script.AppendExtra("""
 if get_stage("%(bcb_dev)s") == "2/3" then
@@ -776,27 +892,14 @@
     script.Comment("Stage 1/3")
 
   # Dump fingerprints
-  script.Print("Source: %s" % (source_fp,))
-  script.Print("Target: %s" % (target_fp,))
+  script.Print("Source: {}".format(source_info.fingerprint))
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   script.Print("Verifying current system...")
 
   device_specific.IncrementalOTA_VerifyBegin()
 
-  if source_oem_props is None and target_oem_props is None:
-    script.AssertSomeFingerprint(source_fp, target_fp)
-  elif source_oem_props is not None and target_oem_props is not None:
-    script.AssertSomeThumbprint(
-        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
-        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-  elif source_oem_props is None and target_oem_props is not None:
-    script.AssertFingerprintOrThumbprint(
-        source_fp,
-        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
-  else:
-    script.AssertFingerprintOrThumbprint(
-        target_fp,
-        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+  WriteFingerprintAssertion(script, target_info, source_info)
 
   # Check the required cache size (i.e. stashed blocks).
   size = []
@@ -806,8 +909,7 @@
     size.append(vendor_diff.required_cache)
 
   if updating_boot:
-    boot_type, boot_device = common.GetTypeAndDevice(
-        "/boot", OPTIONS.source_info_dict)
+    boot_type, boot_device = common.GetTypeAndDevice("/boot", source_info)
     d = common.Difference(target_boot, source_boot)
     _, _, d = d.ComputePatch()
     if d is None:
@@ -984,7 +1086,8 @@
     cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
     rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
     cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
+    p1 = common.Run(cmd, verbose=False, stdout=log_file,
+                    stderr=subprocess.STDOUT)
     p1.communicate()
     assert p1.returncode == 0, "openssl pkcs8 failed"
 
@@ -993,35 +1096,15 @@
   output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                compression=zipfile.ZIP_DEFLATED)
 
-  # Metadata to comply with Android OTA package format.
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
-  oem_dicts = None
-  if oem_props:
-    oem_dicts = _LoadOemDicts(None)
-
-  metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                         OPTIONS.info_dict),
-      "post-build-incremental" : GetBuildProp("ro.build.version.incremental",
-                                              OPTIONS.info_dict),
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "ota-required-cache": "0",
-      "ota-type": "AB",
-  }
-
   if source_file is not None:
-    metadata["pre-build"] = CalculateFingerprint(oem_props,
-                                                 oem_dicts and oem_dicts[0],
-                                                 OPTIONS.source_info_dict)
-    metadata["pre-build-incremental"] = GetBuildProp(
-        "ro.build.version.incremental", OPTIONS.source_info_dict)
-
-    HandleDowngradeMetadata(metadata)
+    target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+    source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
   else:
-    metadata["post-timestamp"] = GetBuildProp(
-        "ro.build.date.utc", OPTIONS.info_dict)
+    target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+    source_info = None
+
+  # Metadata to comply with Android OTA package format.
+  metadata = GetPackageMetadata(target_info, source_info)
 
   # 1. Generate payload.
   payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
@@ -1120,23 +1203,23 @@
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r")
-  if (OPTIONS.info_dict.get("verity") == "true" or
-      OPTIONS.info_dict.get("avb_enable") == "true"):
+  if (target_info.get("verity") == "true" or
+      target_info.get("avb_enable") == "true"):
     care_map_path = "META/care_map.txt"
     namelist = target_zip.namelist()
     if care_map_path in namelist:
       care_map_data = target_zip.read(care_map_path)
       common.ZipWriteStr(output_zip, "care_map.txt", care_map_data,
-          compress_type=zipfile.ZIP_STORED)
+                         compress_type=zipfile.ZIP_STORED)
     else:
       print("Warning: cannot find care map file in target_file package")
 
-  # OPTIONS.source_info_dict must be None for incrementals.
+  # source_info must be None for full OTAs.
   if source_file is None:
-    assert OPTIONS.source_info_dict is None
+    assert source_info is None
 
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.info_dict, OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
   common.ZipClose(target_zip)
 
@@ -1221,8 +1304,6 @@
       OPTIONS.block_based = True
     elif o in ("-b", "--binary"):
       OPTIONS.updater_binary = a
-    elif o in ("--no_fallback_to_full",):
-      OPTIONS.fallback_to_full = False
     elif o == "--stash_threshold":
       try:
         OPTIONS.stash_threshold = float(a)
@@ -1260,7 +1341,6 @@
                                  "oem_settings=",
                                  "oem_no_mount",
                                  "verify",
-                                 "no_fallback_to_full",
                                  "stash_threshold=",
                                  "log_diff=",
                                  "payload_signer=",
@@ -1286,14 +1366,35 @@
   assert not (OPTIONS.downgrade and OPTIONS.timestamp), \
       "Cannot have --downgrade AND --override_timestamp both"
 
-  # Load the dict file from the zip directly to have a peek at the OTA type.
-  # For packages using A/B update, unzipping is not needed.
+  # Load the build info dicts from the zip directly or the extracted input
+  # directory. We don't need to unzip the entire target-files zips, because they
+  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
+  # When loading the info dicts, we don't need to provide the second parameter
+  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
+  # some properties with their actual paths, such as 'selinux_fc',
+  # 'ramdisk_dir', which won't be used during OTA generation.
   if OPTIONS.extracted_input is not None:
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input, OPTIONS.extracted_input)
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    input_zip = zipfile.ZipFile(args[0], "r")
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-    common.ZipClose(input_zip)
+    with zipfile.ZipFile(args[0], 'r') as input_zip:
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+
+  if OPTIONS.verbose:
+    print("--- target info ---")
+    common.DumpInfoDict(OPTIONS.info_dict)
+
+  # Load the source build dict if applicable.
+  if OPTIONS.incremental_source is not None:
+    OPTIONS.target_info_dict = OPTIONS.info_dict
+    with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+
+    if OPTIONS.verbose:
+      print("--- source info ---")
+      common.DumpInfoDict(OPTIONS.source_info_dict)
+
+  # Load OEM dicts if provided.
+  OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
 
@@ -1309,20 +1410,6 @@
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
 
   if ab_update:
-    if OPTIONS.incremental_source is not None:
-      OPTIONS.target_info_dict = OPTIONS.info_dict
-      source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
-      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-      common.ZipClose(source_zip)
-
-    if OPTIONS.verbose:
-      print("--- target info ---")
-      common.DumpInfoDict(OPTIONS.info_dict)
-
-      if OPTIONS.incremental_source is not None:
-        print("--- source info ---")
-        common.DumpInfoDict(OPTIONS.source_info_dict)
-
     WriteABOTAPackageWithBrilloScript(
         target_file=args[0],
         output_file=args[1],
@@ -1331,48 +1418,45 @@
     print("done.")
     return
 
+  # Sanity check the loaded info dicts first.
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
+
+  # Non-A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size")
+  if cache_size is None:
+    print("--- can't determine the cache partition size ---")
+  OPTIONS.cache_size = cache_size
+
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
     input_zip = zipfile.ZipFile(args[0], "r")
   else:
     print("unzipping target target-files...")
     OPTIONS.input_tmp, input_zip = common.UnzipTemp(
         args[0], UNZIP_PATTERN)
+  OPTIONS.target_tmp = OPTIONS.input_tmp
 
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
-
-  if OPTIONS.verbose:
-    print("--- target info ---")
-    common.DumpInfoDict(OPTIONS.info_dict)
-
-  # If the caller explicitly specified the device-specific extensions
-  # path via -s/--device_specific, use that.  Otherwise, use
-  # META/releasetools.py if it is present in the target target_files.
-  # Otherwise, take the path of the file from 'tool_extensions' in the
-  # info dict and look for that in the local filesystem, relative to
-  # the current directory.
-
+  # If the caller explicitly specified the device-specific extensions path via
+  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+  # is present in the target target_files. Otherwise, take the path of the file
+  # from 'tool_extensions' in the info dict and look for that in the local
+  # filesystem, relative to the current directory.
   if OPTIONS.device_specific is None:
     from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
     if os.path.exists(from_input):
       print("(using device-specific extensions from target_files)")
       OPTIONS.device_specific = from_input
     else:
-      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
 
   if OPTIONS.device_specific is not None:
     OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  if OPTIONS.info_dict.get("no_recovery") == "true":
-    raise common.ExternalError(
-        "--- target build has specified no recovery ---")
-
   # Set up the output zip. Create a temporary zip file if signing is needed.
   if OPTIONS.no_signing:
     if os.path.exists(args[1]):
@@ -1384,46 +1468,26 @@
     output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                  compression=zipfile.ZIP_DEFLATED)
 
-  # Non A/B OTAs rely on /cache partition to store temporary files.
-  cache_size = OPTIONS.info_dict.get("cache_size", None)
-  if cache_size is None:
-    print("--- can't determine the cache partition size ---")
-  OPTIONS.cache_size = cache_size
-
   # Generate a full OTA.
   if OPTIONS.incremental_source is None:
     WriteFullOTAPackage(input_zip, output_zip)
 
-  # Generate an incremental OTA. It will fall back to generate a full OTA on
-  # failure unless no_fallback_to_full is specified.
+  # Generate an incremental OTA.
   else:
     print("unzipping source target-files...")
     OPTIONS.source_tmp, source_zip = common.UnzipTemp(
         OPTIONS.incremental_source,
         UNZIP_PATTERN)
-    OPTIONS.target_info_dict = OPTIONS.info_dict
-    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
-                                                   OPTIONS.source_tmp)
-    if OPTIONS.verbose:
-      print("--- source info ---")
-      common.DumpInfoDict(OPTIONS.source_info_dict)
-    try:
-      WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
-      if OPTIONS.log_diff:
-        out_file = open(OPTIONS.log_diff, 'w')
-        import target_files_diff
-        target_files_diff.recursiveDiff('',
-                                        OPTIONS.source_tmp,
-                                        OPTIONS.input_tmp,
-                                        out_file)
-        out_file.close()
-    except ValueError:
-      if not OPTIONS.fallback_to_full:
-        raise
-      print("--- failed to build incremental; falling back to full ---")
-      OPTIONS.incremental_source = None
-      WriteFullOTAPackage(input_zip, output_zip)
 
+    WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
+
+    if OPTIONS.log_diff:
+      with open(OPTIONS.log_diff, 'w') as out_file:
+        import target_files_diff
+        target_files_diff.recursiveDiff(
+            '', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
+
+  common.ZipClose(input_zip)
   common.ZipClose(output_zip)
 
   # Sign the generated zip package unless no_signing is specified.
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 7bfc04b..7a1126c 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -90,14 +90,9 @@
       the existing ones in info dict.
 """
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print >> sys.stderr, "Python 2.7 or newer is required."
-  sys.exit(1)
+from __future__ import print_function
 
 import base64
-import cStringIO
 import copy
 import errno
 import gzip
@@ -106,12 +101,19 @@
 import shutil
 import stat
 import subprocess
+import sys
 import tempfile
 import zipfile
 
 import add_img_to_target_files
 import common
 
+
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
+
 OPTIONS = common.OPTIONS
 
 OPTIONS.extra_apks = {}
@@ -126,6 +128,7 @@
 OPTIONS.avb_algorithms = {}
 OPTIONS.avb_extra_args = {}
 
+
 def GetApkCerts(certmap):
   # apply the key remapping to the contents of the file
   for apk, cert in certmap.iteritems():
@@ -149,17 +152,18 @@
     compressed_apk_extension = ".apk" + compressed_extension
   for info in input_tf_zip.infolist():
     if (info.filename.endswith(".apk") or
-        (compressed_apk_extension and info.filename.endswith(compressed_apk_extension))):
+        (compressed_apk_extension and
+         info.filename.endswith(compressed_apk_extension))):
       name = os.path.basename(info.filename)
       if compressed_apk_extension and name.endswith(compressed_apk_extension):
         name = name[:-len(compressed_extension)]
       if name not in apk_key_map:
         unknown_apks.append(name)
   if unknown_apks:
-    print "ERROR: no key specified for:\n\n ",
-    print "\n  ".join(unknown_apks)
-    print "\nUse '-e <apkname>=' to specify a key (which may be an"
-    print "empty string to not sign this apk)."
+    print("ERROR: no key specified for:\n")
+    print("  " + "\n  ".join(unknown_apks))
+    print("\nUse '-e <apkname>=' to specify a key (which may be an empty "
+          "string to not sign this apk).")
     sys.exit(1)
 
 
@@ -171,7 +175,8 @@
 
   if is_compressed:
     uncompressed = tempfile.NamedTemporaryFile()
-    with gzip.open(unsigned.name, "rb") as in_file, open(uncompressed.name, "wb") as out_file:
+    with gzip.open(unsigned.name, "rb") as in_file, \
+         open(uncompressed.name, "wb") as out_file:
       shutil.copyfileobj(in_file, out_file)
 
     # Finally, close the "unsigned" file (which is gzip compressed), and then
@@ -203,14 +208,15 @@
     min_api_level = 1
 
   common.SignFile(unsigned.name, signed.name, keyname, pw,
-      min_api_level=min_api_level,
-      codename_to_api_level_map=codename_to_api_level_map)
+                  min_api_level=min_api_level,
+                  codename_to_api_level_map=codename_to_api_level_map)
 
-  data = None;
+  data = None
   if is_compressed:
     # Recompress the file after it has been signed.
     compressed = tempfile.NamedTemporaryFile()
-    with open(signed.name, "rb") as in_file, gzip.open(compressed.name, "wb") as out_file:
+    with open(signed.name, "rb") as in_file, \
+         gzip.open(compressed.name, "wb") as out_file:
       shutil.copyfileobj(in_file, out_file)
 
     data = compressed.read()
@@ -233,10 +239,11 @@
   if compressed_extension:
     compressed_apk_extension = ".apk" + compressed_extension
 
-  maxsize = max([len(os.path.basename(i.filename))
-                 for i in input_tf_zip.infolist()
-                 if i.filename.endswith('.apk') or
-                 (compressed_apk_extension and i.filename.endswith(compressed_apk_extension))])
+  maxsize = max(
+      [len(os.path.basename(i.filename)) for i in input_tf_zip.infolist()
+       if (i.filename.endswith('.apk') or
+           (compressed_apk_extension and
+            i.filename.endswith(compressed_apk_extension)))])
   system_root_image = misc_info.get("system_root_image") == "true"
 
   for info in input_tf_zip.infolist():
@@ -248,21 +255,23 @@
 
     # Sign APKs.
     if (info.filename.endswith(".apk") or
-        (compressed_apk_extension and info.filename.endswith(compressed_apk_extension))):
-      is_compressed = compressed_extension and info.filename.endswith(compressed_apk_extension)
+        (compressed_apk_extension and
+         info.filename.endswith(compressed_apk_extension))):
+      is_compressed = (compressed_extension and
+                       info.filename.endswith(compressed_apk_extension))
       name = os.path.basename(info.filename)
       if is_compressed:
         name = name[:-len(compressed_extension)]
 
       key = apk_key_map[name]
       if key not in common.SPECIAL_CERT_STRINGS:
-        print "    signing: %-*s (%s)" % (maxsize, name, key)
+        print("    signing: %-*s (%s)" % (maxsize, name, key))
         signed_data = SignApk(data, key, key_passwords[key], platform_api_level,
-            codename_to_api_level_map, is_compressed)
+                              codename_to_api_level_map, is_compressed)
         common.ZipWriteStr(output_tf_zip, out_info, signed_data)
       else:
         # an APK we're not supposed to sign.
-        print "NOT signing: %s" % (name,)
+        print("NOT signing: %s" % (name,))
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
     # System properties.
@@ -274,7 +283,7 @@
                            "ROOT/default.prop",  # legacy
                            "RECOVERY/RAMDISK/prop.default",
                            "RECOVERY/RAMDISK/default.prop"):  # legacy
-      print "rewriting %s:" % (info.filename,)
+      print("Rewriting %s:" % (info.filename,))
       if stat.S_ISLNK(info.external_attr >> 16):
         new_data = data
       else:
@@ -282,7 +291,7 @@
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
     elif info.filename.endswith("mac_permissions.xml"):
-      print "rewriting %s with new keys." % (info.filename,)
+      print("Rewriting %s with new keys." % (info.filename,))
       new_data = ReplaceCerts(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
@@ -333,10 +342,7 @@
     ReplaceVerityPrivateKey(misc_info, OPTIONS.replace_verity_private_key[1])
 
   if OPTIONS.replace_verity_public_key:
-    if system_root_image:
-      dest = "ROOT/verity_key"
-    else:
-      dest = "BOOT/RAMDISK/verity_key"
+    dest = "ROOT/verity_key" if system_root_image else "BOOT/RAMDISK/verity_key"
     # We are replacing the one in boot image only, since the one under
     # recovery won't ever be needed.
     ReplaceVerityPublicKey(
@@ -361,7 +367,7 @@
   for old, new in OPTIONS.key_map.iteritems():
     try:
       if OPTIONS.verbose:
-        print "    Replacing %s.x509.pem with %s.x509.pem" % (old, new)
+        print("    Replacing %s.x509.pem with %s.x509.pem" % (old, new))
       f = open(old + ".x509.pem")
       old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
       f.close()
@@ -369,17 +375,17 @@
       new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
       f.close()
       # Only match entire certs.
-      pattern = "\\b"+old_cert16+"\\b"
+      pattern = "\\b" + old_cert16 + "\\b"
       (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
       if OPTIONS.verbose:
-        print "    Replaced %d occurence(s) of %s.x509.pem with " \
-            "%s.x509.pem" % (num, old, new)
+        print("    Replaced %d occurence(s) of %s.x509.pem with "
+              "%s.x509.pem" % (num, old, new))
     except IOError as e:
       if e.errno == errno.ENOENT and not OPTIONS.verbose:
         continue
 
-      print "    Error accessing %s. %s. Skip replacing %s.x509.pem " \
-          "with %s.x509.pem." % (e.filename, e.strerror, old, new)
+      print("    Error accessing %s. %s. Skip replacing %s.x509.pem with "
+            "%s.x509.pem." % (e.filename, e.strerror, old, new))
 
   return data
 
@@ -445,8 +451,8 @@
         value = " ".join(value)
       line = key + "=" + value
     if line != original_line:
-      print "  replace: ", original_line
-      print "     with: ", line
+      print("  replace: ", original_line)
+      print("     with: ", line)
     output.append(line)
   return "\n".join(output) + "\n"
 
@@ -462,7 +468,7 @@
     extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
                            for k in extra_recovery_keys.split()]
     if extra_recovery_keys:
-      print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
+      print("extra recovery-only key(s): " + ", ".join(extra_recovery_keys))
   else:
     extra_recovery_keys = []
 
@@ -476,8 +482,8 @@
     mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
 
   if mapped_keys:
-    print "using:\n   ", "\n   ".join(mapped_keys)
-    print "for OTA package verification"
+    print("using:\n   ", "\n   ".join(mapped_keys))
+    print("for OTA package verification")
   else:
     devkey = misc_info.get("default_system_dev_certificate",
                            "build/target/product/security/testkey")
@@ -511,7 +517,11 @@
   # put into a zipfile system/etc/security/otacerts.zip.
   # We DO NOT include the extra_recovery_keys (if any) here.
 
-  temp_file = cStringIO.StringIO()
+  try:
+    from StringIO import StringIO
+  except ImportError:
+    from io import StringIO
+  temp_file = StringIO()
   certs_zip = zipfile.ZipFile(temp_file, "w")
   for k in mapped_keys:
     common.ZipWrite(certs_zip, k)
@@ -527,7 +537,7 @@
       print("\n  WARNING: Found more than one OTA keys; Using the first one"
             " as payload verification key.\n\n")
 
-    print "Using %s for payload verification." % (mapped_keys[0],)
+    print("Using %s for payload verification." % (mapped_keys[0],))
     cmd = common.Run(
         ["openssl", "x509", "-pubkey", "-noout", "-in", mapped_keys[0]],
         stdout=subprocess.PIPE)
@@ -544,41 +554,62 @@
   return new_recovery_keys
 
 
-def ReplaceVerityPublicKey(targetfile_zip, filename, key_path):
-  print "Replacing verity public key with %s" % (key_path,)
-  common.ZipWrite(targetfile_zip, key_path, arcname=filename)
+def ReplaceVerityPublicKey(output_zip, filename, key_path):
+  """Replaces the verity public key at the given path in the given zip.
+
+  Args:
+    output_zip: The output target_files zip.
+    filename: The archive name in the output zip.
+    key_path: The path to the public key.
+  """
+  print("Replacing verity public key with %s" % (key_path,))
+  common.ZipWrite(output_zip, key_path, arcname=filename)
 
 
 def ReplaceVerityPrivateKey(misc_info, key_path):
-  print "Replacing verity private key with %s" % (key_path,)
+  """Replaces the verity private key in misc_info dict.
+
+  Args:
+    misc_info: The info dict.
+    key_path: The path to the private key in PKCS#8 format.
+  """
+  print("Replacing verity private key with %s" % (key_path,))
   misc_info["verity_key"] = key_path
 
 
-def ReplaceVerityKeyId(targetfile_input_zip, targetfile_output_zip, keypath):
-  in_cmdline = targetfile_input_zip.read("BOOT/cmdline")
-  # copy in_cmdline to output_zip if veritykeyid is not present in in_cmdline
-  if "veritykeyid" not in in_cmdline:
-    common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", in_cmdline)
-    return in_cmdline
-  out_cmdline = []
-  for param in in_cmdline.split():
-    if "veritykeyid" in param:
-      # extract keyid using openssl command
-      p = common.Run(
-          ["openssl", "x509", "-in", keypath, "-text"],
-          stdout=subprocess.PIPE)
-      keyid, stderr = p.communicate()
-      keyid = re.search(
-          r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
-      print "Replacing verity keyid with %s error=%s" % (keyid, stderr)
-      out_cmdline.append("veritykeyid=id:%s" % (keyid,))
-    else:
-      out_cmdline.append(param)
+def ReplaceVerityKeyId(input_zip, output_zip, key_path):
+  """Replaces the veritykeyid parameter in BOOT/cmdline.
 
-  out_cmdline = ' '.join(out_cmdline)
-  out_cmdline = out_cmdline.strip()
-  print "out_cmdline %s" % (out_cmdline)
-  common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", out_cmdline)
+  Args:
+    input_zip: The input target_files zip, which should be already open.
+    output_zip: The output target_files zip, which should be already open and
+        writable.
+    key_path: The path to the PEM encoded X.509 certificate.
+  """
+  in_cmdline = input_zip.read("BOOT/cmdline")
+  # Copy in_cmdline to output_zip if veritykeyid is not present.
+  if "veritykeyid" not in in_cmdline:
+    common.ZipWriteStr(output_zip, "BOOT/cmdline", in_cmdline)
+    return
+
+  out_buffer = []
+  for param in in_cmdline.split():
+    if "veritykeyid" not in param:
+      out_buffer.append(param)
+      continue
+
+    # Extract keyid using openssl command.
+    p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
+                   stdout=subprocess.PIPE)
+    keyid, stderr = p.communicate()
+    assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
+    keyid = re.search(
+        r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
+    print("Replacing verity keyid with {}".format(keyid))
+    out_buffer.append("veritykeyid=id:%s" % (keyid,))
+
+  out_cmdline = ' '.join(out_buffer).strip() + '\n'
+  common.ZipWriteStr(output_zip, "BOOT/cmdline", out_cmdline)
 
 
 def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info):
@@ -600,12 +631,12 @@
   """Replaces the AVB signing keys."""
 
   AVB_FOOTER_ARGS_BY_PARTITION = {
-    'boot' : 'avb_boot_add_hash_footer_args',
-    'dtbo' : 'avb_dtbo_add_hash_footer_args',
-    'recovery' : 'avb_recovery_add_hash_footer_args',
-    'system' : 'avb_system_add_hashtree_footer_args',
-    'vendor' : 'avb_vendor_add_hashtree_footer_args',
-    'vbmeta' : 'avb_vbmeta_args',
+      'boot' : 'avb_boot_add_hash_footer_args',
+      'dtbo' : 'avb_dtbo_add_hash_footer_args',
+      'recovery' : 'avb_recovery_add_hash_footer_args',
+      'system' : 'avb_system_add_hashtree_footer_args',
+      'vendor' : 'avb_vendor_add_hashtree_footer_args',
+      'vbmeta' : 'avb_vbmeta_args',
   }
 
   def ReplaceAvbPartitionSigningKey(partition):
@@ -616,15 +647,15 @@
     algorithm = OPTIONS.avb_algorithms.get(partition)
     assert algorithm, 'Missing AVB signing algorithm for %s' % (partition,)
 
-    print 'Replacing AVB signing key for %s with "%s" (%s)' % (
-        partition, key, algorithm)
+    print('Replacing AVB signing key for %s with "%s" (%s)' % (
+        partition, key, algorithm))
     misc_info['avb_' + partition + '_algorithm'] = algorithm
     misc_info['avb_' + partition + '_key_path'] = key
 
     extra_args = OPTIONS.avb_extra_args.get(partition)
     if extra_args:
-      print 'Setting extra AVB signing args for %s to "%s"' % (
-          partition, extra_args)
+      print('Setting extra AVB signing args for %s to "%s"' % (
+          partition, extra_args))
       args_key = AVB_FOOTER_ARGS_BY_PARTITION[partition]
       misc_info[args_key] = (misc_info.get(args_key, '') + ' ' + extra_args)
 
@@ -767,29 +798,29 @@
       argv, __doc__,
       extra_opts="e:d:k:ot:",
       extra_long_opts=[
-        "extra_apks=",
-        "default_key_mappings=",
-        "key_mapping=",
-        "replace_ota_keys",
-        "tag_changes=",
-        "replace_verity_public_key=",
-        "replace_verity_private_key=",
-        "replace_verity_keyid=",
-        "avb_vbmeta_algorithm=",
-        "avb_vbmeta_key=",
-        "avb_vbmeta_extra_args=",
-        "avb_boot_algorithm=",
-        "avb_boot_key=",
-        "avb_boot_extra_args=",
-        "avb_dtbo_algorithm=",
-        "avb_dtbo_key=",
-        "avb_dtbo_extra_args=",
-        "avb_system_algorithm=",
-        "avb_system_key=",
-        "avb_system_extra_args=",
-        "avb_vendor_algorithm=",
-        "avb_vendor_key=",
-        "avb_vendor_extra_args=",
+          "extra_apks=",
+          "default_key_mappings=",
+          "key_mapping=",
+          "replace_ota_keys",
+          "tag_changes=",
+          "replace_verity_public_key=",
+          "replace_verity_private_key=",
+          "replace_verity_keyid=",
+          "avb_vbmeta_algorithm=",
+          "avb_vbmeta_key=",
+          "avb_vbmeta_extra_args=",
+          "avb_boot_algorithm=",
+          "avb_boot_key=",
+          "avb_boot_extra_args=",
+          "avb_dtbo_algorithm=",
+          "avb_dtbo_key=",
+          "avb_dtbo_extra_args=",
+          "avb_system_algorithm=",
+          "avb_system_key=",
+          "avb_system_extra_args=",
+          "avb_vendor_algorithm=",
+          "avb_vendor_key=",
+          "avb_vendor_extra_args=",
       ],
       extra_option_handler=option_handler)
 
@@ -832,16 +863,14 @@
   new_args.append(args[1])
   add_img_to_target_files.main(new_args)
 
-  print "done."
+  print("done.")
 
 
 if __name__ == '__main__':
   try:
     main(sys.argv[1:])
-  except common.ExternalError, e:
-    print
-    print "   ERROR: %s" % (e,)
-    print
+  except common.ExternalError as e:
+    print("\n   ERROR: %s\n" % (e,))
     sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index e5a3694..7084e21 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -16,12 +16,43 @@
 
 from __future__ import print_function
 
-import common
 import unittest
 
-from blockimgdiff import BlockImageDiff, EmptyImage, Transfer
+import common
+from blockimgdiff import BlockImageDiff, EmptyImage, HeapItem, Transfer
 from rangelib import RangeSet
 
+
+class HealpItemTest(unittest.TestCase):
+
+  class Item(object):
+    def __init__(self, score):
+      self.score = score
+
+  def test_init(self):
+    item1 = HeapItem(self.Item(15))
+    item2 = HeapItem(self.Item(20))
+    item3 = HeapItem(self.Item(15))
+    self.assertTrue(item1)
+    self.assertTrue(item2)
+    self.assertTrue(item3)
+
+    self.assertNotEqual(item1, item2)
+    self.assertEqual(item1, item3)
+    # HeapItem uses negated scores.
+    self.assertGreater(item1, item2)
+    self.assertLessEqual(item1, item3)
+    self.assertTrue(item1 <= item3)
+    self.assertFalse(item2 >= item1)
+
+  def test_clear(self):
+    item = HeapItem(self.Item(15))
+    self.assertTrue(item)
+
+    item.clear()
+    self.assertFalse(item)
+
+
 class BlockImageDiffTest(unittest.TestCase):
 
   def test_GenerateDigraphOrder(self):
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
index 6566a5a..161faff 100644
--- a/tools/releasetools/test_build_image.py
+++ b/tools/releasetools/test_build_image.py
@@ -14,52 +14,81 @@
 # limitations under the License.
 #
 
-import shutil
-import tempfile
 import unittest
 
+import common
 from build_image import CheckHeadroom, RunCommand
 
 
 class BuildImageTest(unittest.TestCase):
 
+  # Available: 1000 blocks.
+  EXT4FS_OUTPUT = (
+      "Created filesystem with 2777/129024 inodes and 515099/516099 blocks")
+
   def test_CheckHeadroom_SizeUnderLimit(self):
-    ext4fs_output = ("Created filesystem with 2777/129024 inodes and "
-                     "508140/516099 blocks")
+    # Required headroom: 1000 blocks.
     prop_dict = {
-        'partition_headroom' : '4194304',
+        'fs_type' : 'ext4',
+        'partition_headroom' : '4096000',
         'mount_point' : 'system',
     }
-    self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
+    self.assertTrue(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
 
   def test_CheckHeadroom_InsufficientHeadroom(self):
-    ext4fs_output = ("Created filesystem with 2777/129024 inodes and "
-                     "515099/516099 blocks")
+    # Required headroom: 1001 blocks.
     prop_dict = {
+        'fs_type' : 'ext4',
         'partition_headroom' : '4100096',
         'mount_point' : 'system',
     }
-    self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
+    self.assertFalse(CheckHeadroom(self.EXT4FS_OUTPUT, prop_dict))
+
+  def test_CheckHeadroom_WrongFsType(self):
+    prop_dict = {
+        'fs_type' : 'f2fs',
+        'partition_headroom' : '4100096',
+        'mount_point' : 'system',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
+
+  def test_CheckHeadroom_MissingProperties(self):
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'partition_headroom' : '4100096',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
+
+    prop_dict = {
+        'fs_type' : 'ext4',
+        'mount_point' : 'system',
+    }
+    self.assertRaises(
+        AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
 
   def test_CheckHeadroom_WithMke2fsOutput(self):
     """Tests the result parsing from actual call to mke2fs."""
-    input_dir = tempfile.mkdtemp()
-    output_image = tempfile.NamedTemporaryFile(suffix='.img')
-    command = ['mkuserimg_mke2fs.sh', input_dir, output_image.name, 'ext4',
+    input_dir = common.MakeTempDir()
+    output_image = common.MakeTempFile(suffix='.img')
+    command = ['mkuserimg_mke2fs.sh', input_dir, output_image, 'ext4',
                '/system', '409600', '-j', '0']
     ext4fs_output, exit_code = RunCommand(command)
     self.assertEqual(0, exit_code)
 
     prop_dict = {
+        'fs_type' : 'ext4',
         'partition_headroom' : '40960',
         'mount_point' : 'system',
     }
     self.assertTrue(CheckHeadroom(ext4fs_output, prop_dict))
 
     prop_dict = {
+        'fs_type' : 'ext4',
         'partition_headroom' : '413696',
         'mount_point' : 'system',
     }
     self.assertFalse(CheckHeadroom(ext4fs_output, prop_dict))
 
-    shutil.rmtree(input_dir)
+    common.Cleanup()
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index bb93937..8fb4600 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -14,12 +14,10 @@
 # limitations under the License.
 #
 import os
-import shutil
 import tempfile
 import time
 import unittest
 import zipfile
-
 from hashlib import sha1
 
 import common
@@ -29,6 +27,7 @@
 MiB = 1024 * KiB
 GiB = 1024 * MiB
 
+
 def get_2gb_string():
   size = int(2 * GiB + 1)
   block_size = 4 * KiB
@@ -354,18 +353,141 @@
       os.remove(zip_file.name)
 
 
-class InstallRecoveryScriptFormatTest(unittest.TestCase):
-  """Check the format of install-recovery.sh
+class CommonApkUtilsTest(unittest.TestCase):
+  """Tests the APK utils related functions."""
 
-  Its format should match between common.py and validate_target_files.py."""
+  APKCERTS_TXT1 = (
+      'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
+      ' private_key="certs/devkey.pk8"\n'
+      'name="Settings.apk"'
+      ' certificate="build/target/product/security/platform.x509.pem"'
+      ' private_key="build/target/product/security/platform.pk8"\n'
+      'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
+  )
+
+  APKCERTS_CERTMAP1 = {
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
+  }
+
+  APKCERTS_TXT2 = (
+      'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
+      ' private_key="certs/compressed1.pk8" compressed="gz"\n'
+      'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
+      ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+      'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
+      ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+      'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
+      ' private_key="certs/compressed3.pk8" compressed="gz"\n'
+  )
+
+  APKCERTS_CERTMAP2 = {
+      'Compressed1.apk' : 'certs/compressed1',
+      'Compressed2a.apk' : 'certs/compressed2',
+      'Compressed2b.apk' : 'certs/compressed2',
+      'Compressed3.apk' : 'certs/compressed3',
+  }
+
+  APKCERTS_TXT3 = (
+      'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
+      ' private_key="certs/compressed4.pk8" compressed="xz"\n'
+  )
+
+  APKCERTS_CERTMAP3 = {
+      'Compressed4.apk' : 'certs/compressed4',
+  }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _write_apkcerts_txt(apkcerts_txt, additional=None):
+    if additional is None:
+      additional = []
+    target_files = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
+      for entry in additional:
+        target_files_zip.writestr(entry, '')
+    return target_files
+
+  def test_ReadApkCerts_NoncompressedApks(self):
+    target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
+    self.assertIsNone(ext)
+
+  def test_ReadApkCerts_CompressedApks(self):
+    # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
+    # not stored in '.gz' format, so it shouldn't be considered as installed.
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT2,
+        ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
+    self.assertEqual('.gz', ext)
+
+    # Alternative case with '.xz'.
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
+    self.assertEqual('.xz', ext)
+
+  def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
+        ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      certmap, ext = common.ReadApkCerts(input_zip)
+
+    certmap_merged = self.APKCERTS_CERTMAP1.copy()
+    certmap_merged.update(self.APKCERTS_CERTMAP2)
+    self.assertDictEqual(certmap_merged, certmap)
+    self.assertEqual('.gz', ext)
+
+  def test_ReadApkCerts_MultipleCompressionMethods(self):
+    target_files = self._write_apkcerts_txt(
+        self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
+        ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+  def test_ReadApkCerts_MismatchingKeys(self):
+    malformed_apkcerts_txt = (
+        'name="App1.apk" certificate="certs/cert1.x509.pem"'
+        ' private_key="certs/cert2.pk8"\n'
+    )
+    target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
+
+    with zipfile.ZipFile(target_files, 'r') as input_zip:
+      self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+
+class InstallRecoveryScriptFormatTest(unittest.TestCase):
+  """Checks the format of install-recovery.sh.
+
+  Its format should match between common.py and validate_target_files.py.
+  """
 
   def setUp(self):
-    self._tempdir = tempfile.mkdtemp()
+    self._tempdir = common.MakeTempDir()
     # Create a dummy dict that contains the fstab info for boot&recovery.
     self._info = {"fstab" : {}}
-    dummy_fstab = \
-        ["/dev/soc.0/by-name/boot /boot emmc defaults defaults",
-         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
+    dummy_fstab = [
+        "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
+        "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
     self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, dummy_fstab)
     # Construct the gzipped recovery.img and boot.img
     self.recovery_data = bytearray([
@@ -414,4 +536,4 @@
                                                         self._info)
 
   def tearDown(self):
-    shutil.rmtree(self._tempdir)
+    common.Cleanup()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
new file mode 100644
index 0000000..5f6c5d0
--- /dev/null
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -0,0 +1,478 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import copy
+import unittest
+
+import common
+from ota_from_target_files import (
+    _LoadOemDicts, BuildInfo, GetPackageMetadata, WriteFingerprintAssertion)
+
+
+class MockScriptWriter(object):
+  """A class that mocks edify_generator.EdifyGenerator.
+
+  It simply pushes the incoming arguments onto script stack, which is to assert
+  the calls to EdifyGenerator functions.
+  """
+
+  def __init__(self):
+    self.script = []
+
+  def Mount(self, *args):
+    self.script.append(('Mount',) + args)
+
+  def AssertDevice(self, *args):
+    self.script.append(('AssertDevice',) + args)
+
+  def AssertOemProperty(self, *args):
+    self.script.append(('AssertOemProperty',) + args)
+
+  def AssertFingerprintOrThumbprint(self, *args):
+    self.script.append(('AssertFingerprintOrThumbprint',) + args)
+
+  def AssertSomeFingerprint(self, *args):
+    self.script.append(('AssertSomeFingerprint',) + args)
+
+  def AssertSomeThumbprint(self, *args):
+    self.script.append(('AssertSomeThumbprint',) + args)
+
+
+class BuildInfoTest(unittest.TestCase):
+
+  TEST_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.product.name' : 'product-name',
+          'ro.build.fingerprint' : 'build-fingerprint',
+          'ro.build.foo' : 'build-foo',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+  }
+
+  TEST_INFO_DICT_USES_OEM_PROPS = {
+      'build.prop' : {
+          'ro.product.name' : 'product-name',
+          'ro.build.thumbprint' : 'build-thumbprint',
+          'ro.build.bar' : 'build-bar',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+      'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+  }
+
+  TEST_OEM_DICTS = [
+      {
+          'ro.product.brand' : 'brand1',
+          'ro.product.device' : 'device1',
+      },
+      {
+          'ro.product.brand' : 'brand2',
+          'ro.product.device' : 'device2',
+      },
+      {
+          'ro.product.brand' : 'brand3',
+          'ro.product.device' : 'device3',
+      },
+  ]
+
+  def test_init(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('product-device', target_info.device)
+    self.assertEqual('build-fingerprint', target_info.fingerprint)
+    self.assertFalse(target_info.is_ab)
+    self.assertIsNone(target_info.oem_props)
+
+  def test_init_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('device1', target_info.device)
+    self.assertEqual('brand1/product-name/device1:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Swap the order in oem_dicts, which would lead to different BuildInfo.
+    oem_dicts = copy.copy(self.TEST_OEM_DICTS)
+    oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, oem_dicts)
+    self.assertEqual('device3', target_info.device)
+    self.assertEqual('brand3/product-name/device3:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Missing oem_dict should be rejected.
+    self.assertRaises(AssertionError, BuildInfo,
+                      self.TEST_INFO_DICT_USES_OEM_PROPS, None)
+
+  def test___getitem__(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+
+  def test___getitem__with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertRaises(KeyError,
+                      lambda: target_info['build.prop']['ro.build.foo'])
+
+  def test_get(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
+
+  def test_get_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
+    self.assertRaises(KeyError,
+                      lambda: target_info.get('build.prop')['ro.build.foo'])
+
+  def test_GetBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_WriteMountOemScript(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteMountOemScript(script_writer)
+    self.assertEqual([('Mount', '/oem', None)], script_writer.script)
+
+  def test_WriteDeviceAssertions(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual([('AssertDevice', 'product-device')], script_writer.script)
+
+  def test_WriteDeviceAssertions_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual(
+        [
+            ('AssertOemProperty', 'ro.product.device',
+             ['device1', 'device2', 'device3'], False),
+            ('AssertOemProperty', 'ro.product.brand',
+             ['brand1', 'brand2', 'brand3'], False),
+        ],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_without_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+    source_info_dict['build.prop']['ro.build.fingerprint'] = (
+        'source-build-fingerprint')
+    source_info = BuildInfo(source_info_dict, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeFingerprint', 'source-build-fingerprint',
+          'build-fingerprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_source_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_target_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info = BuildInfo(self.TEST_INFO_DICT, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_both_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+    source_info_dict['build.prop']['ro.build.thumbprint'] = (
+        'source-build-thumbprint')
+    source_info = BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeThumbprint', 'build-thumbprint',
+          'source-build-thumbprint')],
+        script_writer.script)
+
+
+class LoadOemDictsTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_NoneDict(self):
+    self.assertIsNone(_LoadOemDicts(None))
+
+  def test_SingleDict(self):
+    dict_file = common.MakeTempFile()
+    with open(dict_file, 'w') as dict_fp:
+      dict_fp.write('abc=1\ndef=2\nxyz=foo\na.b.c=bar\n')
+
+    oem_dicts = _LoadOemDicts([dict_file])
+    self.assertEqual(1, len(oem_dicts))
+    self.assertEqual('foo', oem_dicts[0]['xyz'])
+    self.assertEqual('bar', oem_dicts[0]['a.b.c'])
+
+  def test_MultipleDicts(self):
+    oem_source = []
+    for i in range(3):
+      dict_file = common.MakeTempFile()
+      with open(dict_file, 'w') as dict_fp:
+        dict_fp.write(
+            'ro.build.index={}\ndef=2\nxyz=foo\na.b.c=bar\n'.format(i))
+      oem_source.append(dict_file)
+
+    oem_dicts = _LoadOemDicts(oem_source)
+    self.assertEqual(3, len(oem_dicts))
+    for i, oem_dict in enumerate(oem_dicts):
+      self.assertEqual('2', oem_dict['def'])
+      self.assertEqual('foo', oem_dict['xyz'])
+      self.assertEqual('bar', oem_dict['a.b.c'])
+      self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
+
+
+class OtaFromTargetFilesTest(unittest.TestCase):
+
+  TEST_TARGET_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-target',
+          'ro.build.version.incremental' : 'build-version-incremental-target',
+          'ro.build.date.utc' : '1500000000',
+      },
+  }
+
+  TEST_SOURCE_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-source',
+          'ro.build.version.incremental' : 'build-version-incremental-source',
+          'ro.build.date.utc' : '1400000000',
+      },
+  }
+
+  def setUp(self):
+    # Reset the global options as in ota_from_target_files.py.
+    common.OPTIONS.incremental_source = None
+    common.OPTIONS.downgrade = False
+    common.OPTIONS.timestamp = False
+    common.OPTIONS.wipe_user_data = False
+
+  def test_GetPackageMetadata_abOta_full(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_abOta_incremental(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_full(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_incremental(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_wipe(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  @staticmethod
+  def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
+    (target_info['build.prop']['ro.build.date.utc'],
+     source_info['build.prop']['ro.build.date.utc']) = (
+         source_info['build.prop']['ro.build.date.utc'],
+         target_info['build.prop']['ro.build.date.utc'])
+
+  def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+                      source_info)
+
+  def test_GetPackageMetadata_downgrade(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.downgrade = True
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-downgrade' : 'yes',
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_overrideTimestamp(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.timestamp = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-timestamp' : '1500000001',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 90afdc7..726d6b9 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -16,13 +16,22 @@
 
 from __future__ import print_function
 
+import tempfile
 import unittest
+import zipfile
 
-from sign_target_files_apks import EditTags, RewriteProps
+import common
+from sign_target_files_apks import EditTags, ReplaceVerityKeyId, RewriteProps
 
 
 class SignTargetFilesApksTest(unittest.TestCase):
 
+  def setUp(self):
+    self.tempdir = common.MakeTempDir()
+
+  def tearDown(self):
+    common.Cleanup()
+
   def test_EditTags(self):
     self.assertEqual(EditTags('dev-keys'), ('release-keys'))
     self.assertEqual(EditTags('test-keys'), ('release-keys'))
@@ -59,9 +68,132 @@
     )
 
     # Assert the case for each individual line.
-    for input, output in props:
-      self.assertEqual(RewriteProps(input), output)
+    for prop, output in props:
+      self.assertEqual(RewriteProps(prop), output)
 
     # Concatenate all the input lines.
     self.assertEqual(RewriteProps('\n'.join([prop[0] for prop in props])),
                      ''.join([prop[1] for prop in props]))
+
+  def test_ReplaceVerityKeyId(self):
+    BOOT_CMDLINE1 = (
+        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
+        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
+        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
+        "buildvariant=userdebug "
+        "veritykeyid=id:7e4333f9bba00adfe0ede979e28ed1920492b40f\n")
+
+    BOOT_CMDLINE2 = (
+        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
+        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
+        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
+        "buildvariant=userdebug "
+        "veritykeyid=id:485900563d272c46ae118605a47419ac09ca8c11\n")
+
+    # From build/target/product/security/verity.x509.pem.
+    VERITY_CERTIFICATE1 = """-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
+"""
+
+    # From build/target/product/security/testkey.x509.pem.
+    VERITY_CERTIFICATE2 = """-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waM
+qOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4
+wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy
+4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzU
+RNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97s
+zI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkw
+HQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZ
+AFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZa
+J6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4Y
+LCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe
++ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX
+31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwr
+sBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE0=
+-----END CERTIFICATE-----
+"""
+
+    input_file = tempfile.NamedTemporaryFile(
+        delete=False, suffix='.zip', dir=self.tempdir)
+    with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
+
+    # Test with the first certificate.
+    cert_file = tempfile.NamedTemporaryFile(
+        delete=False, suffix='.x509.pem', dir=self.tempdir)
+    cert_file.write(VERITY_CERTIFICATE1)
+    cert_file.close()
+
+    output_file = tempfile.NamedTemporaryFile(
+        delete=False, suffix='.zip', dir=self.tempdir)
+    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
+         zipfile.ZipFile(output_file.name, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+
+    with zipfile.ZipFile(output_file.name) as output_zip:
+      self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline'))
+
+    # Test with the second certificate.
+    with open(cert_file.name, 'w') as cert_file_fp:
+      cert_file_fp.write(VERITY_CERTIFICATE2)
+
+    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
+         zipfile.ZipFile(output_file.name, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+
+    with zipfile.ZipFile(output_file.name) as output_zip:
+      self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline'))
+
+  def test_ReplaceVerityKeyId_no_veritykeyid(self):
+    BOOT_CMDLINE = (
+        "console=ttyHSL0,115200,n8 androidboot.hardware=bullhead boot_cpus=0-5 "
+        "lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
+        "loop.max_part=7\n")
+
+    input_file = tempfile.NamedTemporaryFile(
+        delete=False, suffix='.zip', dir=self.tempdir)
+    with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
+
+    output_file = tempfile.NamedTemporaryFile(
+        delete=False, suffix='.zip', dir=self.tempdir)
+    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
+         zipfile.ZipFile(output_file.name, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, None)
+
+    with zipfile.ZipFile(output_file.name) as output_zip:
+      self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline'))
diff --git a/tools/warn.py b/tools/warn.py
index 62feac3..f42fb96 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1010,12 +1010,7 @@
      'severity': Severity.HIGH,
      'description':
          'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
-     'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Invalid @GuardedBy expression',
-     'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+     'patterns': [r".*: warning: \[GuardedBy\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':