Merge "Make mksquashfs generate a fs_config-friendly root entry"
diff --git a/Changes.md b/Changes.md
index 3e48bad..37bbad0 100644
--- a/Changes.md
+++ b/Changes.md
@@ -92,6 +92,11 @@
 attribute to the root element `<manifest>`. If `PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE`
 is 26 or 27, you can add `"target-level"="1"` to your device manifest instead.
 
+### Stop using USE_CLANG_PLATFORM_BUILD {#USE_CLANG_PLATFORM_BUILD}
+
+Clang is the default and only supported Android compiler, so there is no reason
+for this option to exist.
+
 ### Other envsetup.sh variables  {#other_envsetup_variables}
 
 * ANDROID_TOOLCHAIN
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 7e23ee0..5ab64b3 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -456,6 +456,13 @@
 $(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/with-local/)
 $(call add-clean-step, rm -rf $(HOST_OUT_COMMON_INTERMEDIATES)/*/*_intermediates/no-local/)
 
+# Remove legacy VINTF metadata files
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/manifest.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/vendor/compatibility_matrix.xml)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/compatibility_matrix.xml)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/OWNERS b/OWNERS
index 89b446a..7a59f70 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,3 @@
 ccross@android.com
 dwillemsen@google.com
+nanzhang@google.com
diff --git a/core/Makefile b/core/Makefile
index 9a582c3..136690f 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4,15 +4,6 @@
 # intermedites-dir-for
 LOCAL_PATH := $(BUILD_SYSTEM)
 
-# Pick a reasonable string to use to identify files.
-ifneq (,$(filter eng.%,$(BUILD_NUMBER)))
-  # BUILD_NUMBER has a timestamp in it, which means that
-  # it will change every time.  Pick a stable value.
-  FILE_NAME_TAG := eng.$(USER)
-else
-  FILE_NAME_TAG := $(BUILD_NUMBER)
-endif
-
 # -----------------------------------------------------------------
 # Define rules to copy PRODUCT_COPY_FILES defined by the product.
 # PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
@@ -123,6 +114,24 @@
 endif
 
 # -----------------------------------------------------------------
+# FINAL_VENDOR_DEFAULT_PROPERTIES will be installed in vendor/default.prop if
+# property_overrides_split_enabled is true. Otherwise it will be installed in
+# ROOT/default.prop.
+ifdef BOARD_VNDK_VERSION
+  ifeq ($(BOARD_VNDK_VERSION),current)
+    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(PLATFORM_VNDK_VERSION)
+  else
+    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(BOARD_VNDK_VERSION)
+  endif
+else
+  FINAL_VENDOR_DEFAULT_PROPERTIES :=
+endif
+FINAL_VENDOR_DEFAULT_PROPERTIES += \
+    $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
+FINAL_VENDOR_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
+    $(FINAL_VENDOR_DEFAULT_PROPERTIES),=)
+
+# -----------------------------------------------------------------
 # prop.default
 ifdef property_overrides_split_enabled
 INSTALLED_DEFAULT_PROP_TARGET := $(TARGET_OUT)/etc/prop.default
@@ -139,7 +148,7 @@
     $(call collapse-pairs, $(PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
 ifndef property_overrides_split_enabled
   FINAL_DEFAULT_PROPERTIES += \
-      $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
+      $(call collapse-pairs, $(FINAL_VENDOR_DEFAULT_PROPERTIES))
 endif
 FINAL_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
     $(FINAL_DEFAULT_PROPERTIES),=)
@@ -174,20 +183,6 @@
 INSTALLED_VENDOR_DEFAULT_PROP_TARGET := $(TARGET_OUT_VENDOR)/default.prop
 ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET)
 
-ifdef BOARD_VNDK_VERSION
-  ifeq ($(BOARD_VNDK_VERSION),current)
-    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(PLATFORM_VNDK_VERSION)
-  else
-    FINAL_VENDOR_DEFAULT_PROPERTIES := ro.vndk.version=$(BOARD_VNDK_VERSION)
-  endif
-else
-  FINAL_VENDOR_DEFAULT_PROPERTIES :=
-endif
-FINAL_VENDOR_DEFAULT_PROPERTIES += \
-    $(call collapse-pairs, $(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
-FINAL_VENDOR_DEFAULT_PROPERTIES := $(call uniq-pairs-by-first-component, \
-    $(FINAL_VENDOR_DEFAULT_PROPERTIES),=)
-
 $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
@@ -234,28 +229,37 @@
 
 # The string used to uniquely identify the combined build and product; used by the OTA server.
 ifeq (,$(strip $(BUILD_FINGERPRINT)))
-  ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
-    BF_BUILD_NUMBER := $(USER)$(shell $(DATE) +%m%d%H%M)
+  ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+    BF_BUILD_NUMBER := $(USER)$$($(DATE_FROM_FILE) +%m%d%H%M)
   else
-    BF_BUILD_NUMBER := $(BUILD_NUMBER)
+    BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
   endif
   BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
-ifneq ($(words $(BUILD_FINGERPRINT)),1)
-  $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
-endif
+# unset it for safety.
+BF_BUILD_NUMBER :=
 
-$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) > $(PRODUCT_OUT)/build_fingerprint.txt)
-BUILD_FINGERPRINT_FROM_FILE := $$(cat $(PRODUCT_OUT)/build_fingerprint.txt)
+BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE)))
+  $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))")
+endif
+BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE))
+# unset it for safety.
+BUILD_FINGERPRINT :=
 
 # The string used to uniquely identify the system build; used by the OTA server.
 # This purposefully excludes any product-specific variables.
 ifeq (,$(strip $(BUILD_THUMBPRINT)))
-  BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+  BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
-ifneq ($(words $(BUILD_THUMBPRINT)),1)
-  $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+
+BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE)))
+  $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))")
 endif
+BUILD_THUMBPRINT_FROM_FILE := $$(cat $(BUILD_THUMBPRINT_FILE))
+# unset it for safety.
+BUILD_THUMBPRINT :=
 
 KNOWN_OEM_THUMBPRINT_PROPERTIES := \
     ro.product.brand \
@@ -343,7 +347,7 @@
 			PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
 			BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
 			BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
-			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
+			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
 			TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
 			TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
 			TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
@@ -406,6 +410,9 @@
 	$(hide) echo ro.vendor.build.date=`$(DATE_FROM_FILE)`>>$@
 	$(hide) echo ro.vendor.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
 	$(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist="$(TARGET_CPU_ABI_LIST)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist32="$(TARGET_CPU_ABI_LIST_32_BIT)">>$@
+	$(hide) echo ro.vendor.product.cpu.abilist64="$(TARGET_CPU_ABI_LIST_64_BIT)">>$@
 	$(hide) TARGET_DEVICE="$(TARGET_DEVICE)" \
 			PRODUCT_NAME="$(TARGET_PRODUCT)" \
 			PRODUCT_BRAND="$(PRODUCT_BRAND)" \
@@ -423,6 +430,32 @@
 	$(hide) build/make/tools/post_process_props.py $@
 endif  # property_overrides_split_enabled
 
+# -----------------------------------------------------------------
+# product build.prop
+INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+
+FINAL_PRODUCT_PROPERTIES += \
+    $(call collapse-pairs, $(PRODUCT_PRODUCT_PROPERTIES))
+FINAL_PRODUCT_PROPERTIES := $(call uniq-pairs-by-first-component, \
+    $(FINAL_PRODUCT_PROPERTIES),=)
+
+$(INSTALLED_PRODUCT_BUILD_PROP_TARGET):
+	@echo Target product buildinfo: $@
+	@mkdir -p $(dir $@)
+	$(hide) echo > $@
+ifdef BOARD_USES_PRODUCTIMAGE
+	$(hide) echo ro.product.build.date=`$(DATE_FROM_FILE)`>>$@
+	$(hide) echo ro.product.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+	$(hide) echo ro.product.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
+endif  # BOARD_USES_PRODUCTIMAGE
+	$(hide) echo "#" >> $@; \
+	        echo "# ADDITIONAL PRODUCT PROPERTIES" >> $@; \
+	        echo "#" >> $@;
+	$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
+		echo "$(line)" >> $@;)
+	$(hide) build/make/tools/post_process_props.py $@
+
 # ----------------------------------------------------------------
 
 # -----------------------------------------------------------------
@@ -547,6 +580,16 @@
 $(call dist-for-goals,droidcore,$(WALL_WERROR))
 
 # -----------------------------------------------------------------
+# Modules missing profile files
+PGO_PROFILE_MISSING := $(PRODUCT_OUT)/pgo_profile_file_missing.txt
+$(PGO_PROFILE_MISSING):
+	@rm -f $@
+	echo "# Modules missing PGO profile files" >> $@
+	for m in $(SOONG_MODULES_MISSING_PGO_PROFILE_FILE); do echo $$m >> $@; done
+
+$(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
+
+# -----------------------------------------------------------------
 # The dev key is used to sign this package, and as the key required
 # for future OTA packages installed by this system.  Actual product
 # deliverables will be re-signed by hand.  We expect this file to
@@ -1013,7 +1056,7 @@
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
-ifneq ($(filter $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
+ifneq ($(filter $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
 INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
 endif
 
@@ -1044,6 +1087,7 @@
 $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
@@ -1065,6 +1109,15 @@
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "vendor_squashfs_block_size=$(BOARD_VENDORIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashfs_disable_4k_align=$(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "product_fs_type=$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT),$(hide) echo "product_extfs_inode_count=$(BOARD_PRODUCTIMAGE_EXTFS_INODE_COUNT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_PARTITION_SIZE),$(hide) echo "product_size=$(BOARD_PRODUCTIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_JOURNAL_SIZE),$(hide) echo "product_journal_size=$(BOARD_PRODUCTIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "product_squashfs_compressor=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "product_squashfs_compressor_opt=$(BOARD_PRODUCTIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "product_squashfs_block_size=$(BOARD_PRODUCTIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
+$(if $(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "product_squashfs_disable_4k_align=$(BOARD_PRODUCTIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),$(hide) echo "product_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH)" >> $(1))
 $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1))
@@ -1079,6 +1132,7 @@
 $(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION),$(hide) echo "product_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
@@ -1099,6 +1153,13 @@
         $(hide) echo "avb_vendor_key_path=$(BOARD_AVB_VENDOR_KEY_PATH)" >> $(1)
         $(hide) echo "avb_vendor_algorithm=$(BOARD_AVB_VENDOR_ALGORITHM)" >> $(1)
         $(hide) echo "avb_vendor_rollback_index_location=$(BOARD_AVB_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_product_add_hashtree_footer_args=$(BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+        $(hide) echo "avb_product_key_path=$(BOARD_AVB_PRODUCT_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_product_algorithm=$(BOARD_AVB_PRODUCT_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_product_rollback_index_location=$(BOARD_AVB_PRODUCT_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
@@ -1137,9 +1198,10 @@
 recovery_sepolicy := \
     $(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
     $(TARGET_RECOVERY_ROOT_OUT)/plat_file_contexts \
-    $(TARGET_RECOVERY_ROOT_OUT)/nonplat_file_contexts \
+    $(TARGET_RECOVERY_ROOT_OUT)/vendor_file_contexts \
     $(TARGET_RECOVERY_ROOT_OUT)/plat_property_contexts \
-    $(TARGET_RECOVERY_ROOT_OUT)/nonplat_property_contexts
+    $(TARGET_RECOVERY_ROOT_OUT)/vendor_property_contexts
+
 # Passed into rsync from non-recovery root to recovery root, to avoid overwriting recovery-specific
 # SELinux files
 IGNORE_RECOVERY_SEPOLICY := $(patsubst $(TARGET_RECOVERY_OUT)/%,--exclude=/%,$(recovery_sepolicy))
@@ -1150,6 +1212,9 @@
 ifdef property_overrides_split_enabled
 recovery_build_props += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
 endif
+ifdef BOARD_USES_PRODUCTIMAGE
+recovery_build_props += $(INSTALLED_PRODUCT_BUILD_PROP_TARGET)
+endif
 recovery_resources_common := $(call include-path-for, recovery)/res
 
 # Set recovery_density to the density bucket of the device.
@@ -1442,10 +1507,26 @@
 endef
 endif
 
+# Create symlink /system/product to /product if necessary.
+ifdef BOARD_USES_PRODUCTIMAGE
+define create-system-product-symlink
+$(hide) if [ -d $(TARGET_OUT)/product ] && [ ! -h $(TARGET_OUT)/product ]; then \
+  echo 'Non-symlink $(TARGET_OUT)/product detected!' 1>&2; \
+  echo 'You cannot install files to $(TARGET_OUT)/product while building a separate product.img!' 1>&2; \
+  exit 1; \
+fi
+$(hide) ln -sf /product $(TARGET_OUT)/product
+endef
+else
+define create-system-product-symlink
+endef
+endif
+
 # $(1): output file
 define build-systemimage-target
   @echo "Target system fs image: $(1)"
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
   $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
       skip_fsck=true)
@@ -1520,6 +1601,7 @@
 define build-systemtarball-target
   $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
   $(call create-system-vendor-symlink)
+  $(call create-system-product-symlink)
   $(MKTARBALL) $(FS_GET_STATS) \
     $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
     $(INSTALLED_SYSTEMTARBALL_TARGET) $(TARGET_OUT)
@@ -1600,6 +1682,10 @@
 	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
 		$(TARGET_COPY_OUT_VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
+		$(TARGET_COPY_OUT_PRODUCT)
+endif
 ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
 	$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
 endif
@@ -1908,6 +1994,55 @@
 endif
 
 # -----------------------------------------------------------------
+# product partition image
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+INTERNAL_PRODUCTIMAGE_FILES := \
+    $(filter $(TARGET_OUT_PRODUCT)/%,\
+      $(ALL_DEFAULT_INSTALLED_MODULES)\
+      $(ALL_PDK_FUSION_FILES))
+
+# platform.zip depends on $(INTERNAL_PRODUCTIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES)
+
+INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
+$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST)
+	@echo Installed file list: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+
+productimage_intermediates := \
+    $(call intermediates-dir-for,PACKAGING,product)
+BUILT_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+define build-productimage-target
+  $(call pretty,"Target product fs image: $(INSTALLED_PRODUCTIMAGE_TARGET)")
+  @mkdir -p $(TARGET_OUT_PRODUCT)
+  @mkdir -p $(productimage_intermediates) && rm -rf $(productimage_intermediates)/product_image_info.txt
+  $(call generate-userimage-prop-dictionary, $(productimage_intermediates)/product_image_info.txt, skip_fsck=true)
+  $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+      ./build/tools/releasetools/build_image.py \
+      $(TARGET_OUT_PRODUCT) $(productimage_intermediates)/product_image_info.txt $(INSTALLED_PRODUCTIMAGE_TARGET) $(TARGET_OUT)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_PRODUCTIMAGE_TARGET),$(BOARD_PRODUCTIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_PRODUCTIMAGE_TARGET := $(BUILT_PRODUCTIMAGE_TARGET)
+$(INSTALLED_PRODUCTIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_PRODUCTIMAGE_FILES) $(INSTALLED_FILES_FILE_PRODUCT) $(BUILD_IMAGE_SRCS)
+	$(build-productimage-target)
+
+.PHONY: productimage-nodeps pnod
+productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
+	$(build-productimage-target)
+
+sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_PRODUCTIMAGE
+INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_PRODUCTIMAGE),$(INSTALLED_PRODUCTIMAGE_TARGET)))
+endif
+
+# -----------------------------------------------------------------
 # dtbo image
 ifdef BOARD_PREBUILT_DTBOIMAGE
 INSTALLED_DTBOIMAGE_TARGET := $(PRODUCT_OUT)/dtbo.img
@@ -1927,17 +2062,6 @@
 
 endif
 
-# Convert to lower case without requiring a shell, which isn't cacheable.
-to-lower = $(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,\
-$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,\
-$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,\
-$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
-# Convert to upper case without requiring a shell, which isn't cacheable.
-to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,\
-$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,\
-$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,\
-$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
-
 # -----------------------------------------------------------------
 # vbmeta image
 ifeq ($(BOARD_AVB_ENABLE),true)
@@ -1961,6 +2085,7 @@
 SYSTEM_FOOTER_ARGS := BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS
 VENDOR_FOOTER_ARGS := BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS
 RECOVERY_FOOTER_ARGS := BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS
+PRODUCT_FOOTER_ARGS := BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS
 
 # Check and set required build variables for a chain partition.
 # $(1): the partition to enable AVB chain, e.g., BOOT or SYSTEM.
@@ -2013,6 +2138,15 @@
 endif
 endif
 
+ifdef INSTALLED_PRODUCTIMAGE_TARGET
+ifdef BOARD_AVB_PRODUCT_KEY_PATH
+$(eval $(call check-and-set-avb-chain-args,PRODUCT))
+else
+INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
+    --include_descriptors_from_image $(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+endif
+
 ifdef INSTALLED_DTBOIMAGE_TARGET
 ifdef BOARD_AVB_DTBO_KEY_PATH
 $(eval $(call check-and-set-avb-chain-args,DTBO))
@@ -2066,6 +2200,9 @@
   $(if $(BOARD_AVB_VENDOR_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VENDOR_KEY_PATH) \
       --output $(1)/vendor.avbpubkey)
+  $(if $(BOARD_AVB_PRODUCT_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_PRODUCT_KEY_PATH) \
+      --output $(1)/product.avbpubkey)
   $(if $(BOARD_AVB_DTBO_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
       --output $(1)/dtbo.avbpubkey)
@@ -2092,6 +2229,7 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_SYSTEMIMAGE) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INSTALLED_RECOVERYIMAGE_TARGET) \
 		$(BOARD_AVB_KEY_PATH)
@@ -2232,18 +2370,18 @@
   system/extras/verity/build_verity_metadata.py \
   system/extras/ext4_utils/mke2fs.conf \
   external/avb/test/data/testkey_rsa4096.pem \
-  $(shell find system/update_engine/scripts -name \*.pyc -prune -o -type f -print | sort) \
-  $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
-      -name verity_key | sort) \
-  $(shell find device $(wildcard vendor) -type f -name \*.pk8 -o -name verifiedboot\* -o \
-      -name \*.x509.pem -o -name oem\*.prop | sort)
+  $(sort $(shell find system/update_engine/scripts -name \*.pyc -prune -o -type f -print)) \
+  $(sort $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
+      -name verity_key)) \
+  $(sort $(shell find device $(wildcard vendor) -type f -name \*.pk8 -o -name verifiedboot\* -o \
+      -name \*.x509.pem -o -name oem\*.prop))
 
 OTATOOLS_RELEASETOOLS := \
-  $(shell find build/make/tools/releasetools -name \*.pyc -prune -o -type f | sort)
+  $(sort $(shell find build/make/tools/releasetools -name \*.pyc -prune -o -type f))
 
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
 OTATOOLS_DEPS += \
-  $(shell find external/vboot_reference/tests/devkeys -type f | sort)
+  $(sort $(shell find external/vboot_reference/tests/devkeys -type f))
 endif
 
 $(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) | $(ACP)
@@ -2355,6 +2493,7 @@
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_CACHEIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(INSTALLED_VBMETAIMAGE_TARGET) \
 		$(INSTALLED_DTBOIMAGE_TARGET) \
 		$(INTERNAL_SYSTEMOTHERIMAGE_FILES) \
@@ -2363,6 +2502,7 @@
 		$(INSTALLED_2NDBOOTLOADER_TARGET) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
 		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
+		$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
 		$(SELINUX_FC) \
 		$(APKCERTS_FILE) \
 		$(SOONG_ZIP) \
@@ -2370,11 +2510,12 @@
 		$(HOST_OUT_EXECUTABLES)/imgdiff \
 		$(HOST_OUT_EXECUTABLES)/bsdiff \
 		$(BUILD_IMAGE_SRCS) \
-		$(INSTALLED_VENDOR_MANIFEST) \
-		$(INSTALLED_VENDOR_MATRIX) \
+		$(BUILT_VENDOR_MANIFEST) \
+		$(BUILT_VENDOR_MATRIX) \
 		| $(ACP)
 	@echo "Package target files: $@"
 	$(call create-system-vendor-symlink)
+	$(call create-system-product-symlink)
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@@ -2440,6 +2581,11 @@
 	$(hide) $(call package_files-copy-root, \
 		$(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	@# Contents of the product image
+	$(hide) $(call package_files-copy-root, \
+		$(TARGET_OUT_PRODUCT),$(zip_root)/PRODUCT)
+endif
 ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET
 	@# Contents of the system_other image
 	$(hide) $(call package_files-copy-root, \
@@ -2504,6 +2650,10 @@
 	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
 endif
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH),)
+	$(hide) cp $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_BASE_FS_PATH))
+endif
 ifneq ($(strip $(SANITIZE_TARGET)),)
 	# We need to create userdata.img with real data because the instrumented libraries are in userdata.img.
 	$(hide) echo "userdata_img_with_data=true" >> $(zip_root)/META/misc_info.txt
@@ -2587,6 +2737,10 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_PRODUCTIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_BOOTIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/IMAGES/
@@ -2617,6 +2771,9 @@
 ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
 	$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
 endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+	$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
+endif
 ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
 endif
@@ -2734,6 +2891,7 @@
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
 		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET) \
 		$(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
@@ -2756,7 +2914,8 @@
 $(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE) \
 		$(INSTALLED_BOOTIMAGE_TARGET) \
 		$(INSTALLED_USERDATAIMAGE_TARGET) \
-		$(INSTALLED_VENDORIMAGE_TARGET)
+		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 $(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
 $(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -2856,6 +3015,15 @@
 vendorimage: $(INSTALLED_QEMU_VENDORIMAGE)
 droidcore: $(INSTALLED_QEMU_VENDORIMAGE)
 endif
+ifeq ($(BOARD_USES_PRODUCTIMAGE),true)
+INSTALLED_QEMU_PRODUCTIMAGE := $(PRODUCT_OUT)/product-qemu.img
+$(INSTALLED_QEMU_PRODUCTIMAGE): $(INSTALLED_PRODUCTIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+	@echo Create product-qemu.img
+	(export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) ${PRODUCT_OUT}/product.img)
+
+productimage: $(INSTALLED_QEMU_PRODUCTIMAGE)
+droidcore: $(INSTALLED_QEMU_PRODUCTIMAGE)
+endif
 endif
 # -----------------------------------------------------------------
 # The emulator package
diff --git a/core/allowed_ndk_types.mk b/core/allowed_ndk_types.mk
index b5a85ca..b88b9e8 100644
--- a/core/allowed_ndk_types.mk
+++ b/core/allowed_ndk_types.mk
@@ -28,12 +28,6 @@
     else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_static)
         my_ndk_stl_family := libc++
         my_ndk_stl_link_type := static
-    else ifeq ($(LOCAL_NDK_STL_VARIANT),stlport_shared)
-        my_ndk_stl_family := stlport
-        my_ndk_stl_link_type := shared
-    else ifeq ($(LOCAL_NDK_STL_VARIANT),stlport_static)
-        my_ndk_stl_family := stlport
-        my_ndk_stl_link_type := static
     else ifeq ($(LOCAL_NDK_STL_VARIANT),none)
         my_ndk_stl_family := none
         my_ndk_stl_link_type := none
@@ -66,8 +60,7 @@
         # already been checked for consistency, and if they don't they'll be
         # kept isolated by RTLD_LOCAL anyway.
         my_allowed_ndk_types += \
-            native:ndk:libc++:shared native:ndk:libc++:static \
-            native:ndk:stlport:shared native:ndk:stlport:static \
+            native:ndk:libc++:shared native:ndk:libc++:static
 
         # The "none" link type that used by static libraries is intentionally
         # omitted here. We should only be dealing with shared libraries in
@@ -79,7 +72,11 @@
     # Else we are a non-static library that uses a static STL, and are
     # incompatible with all other shared libraries that use an STL.
 else
-    my_allowed_ndk_types := native:ndk:none:none native:ndk:system:shared
+    my_allowed_ndk_types := \
+        native:ndk:none:none \
+        native:ndk:system:none \
+        native:ndk:system:shared \
+
     ifeq ($(LOCAL_MODULE_CLASS),APPS)
         # CTS is bad and it should feel bad: http://b/13249737
         my_warn_ndk_types += native:ndk:libc++:static
diff --git a/core/aux_config.mk b/core/aux_config.mk
index c40b8cc..6a5cd63 100644
--- a/core/aux_config.mk
+++ b/core/aux_config.mk
@@ -32,7 +32,7 @@
 
 # setup AUX globals
 AUX_SHLIB_SUFFIX := .so
-AUX_GLOBAL_ARFLAGS := crsPD
+AUX_GLOBAL_ARFLAGS := cqsD
 AUX_STATIC_LIB_SUFFIX := .a
 
 # Load ever-lasting "indexed" version of AUX variant environment; it is treated as READ-ONLY from this
@@ -102,10 +102,10 @@
 $(eval AUX_OS_VARIANT_LIST_$(AUX_OS_$(1)):=) \
 $(call aux-variant-setup-paths,$(_name)) \
 $(eval AUX_ALL_VARIANTS += $(_name)) \
-$(eval AUX_ALL_OSES := $(filterout $(AUX_OS_$(_name)),$(AUX_ALL_OSES)) $(AUX_OS_$(_name))) \
-$(eval AUX_ALL_CPUS := $(filterout $(AUX_CPU_$(_name)),$(AUX_ALL_CPUS)) $(AUX_CPU_$(_name))) \
-$(eval AUX_ALL_ARCHS := $(filterout $(AUX_ARCH_$(_name)),$(AUX_ALL_ARCHS)) $(AUX_ARCH_$(_name))) \
-$(eval AUX_ALL_SUBARCHS := $(filterout $(AUX_SUBARCH_$(_name)),$(AUX_ALL_SUBARCHS)) $(AUX_SUBARCH_$(_name)))
+$(eval AUX_ALL_OSES := $(filter-out $(AUX_OS_$(_name)),$(AUX_ALL_OSES)) $(AUX_OS_$(_name))) \
+$(eval AUX_ALL_CPUS := $(filter-out $(AUX_CPU_$(_name)),$(AUX_ALL_CPUS)) $(AUX_CPU_$(_name))) \
+$(eval AUX_ALL_ARCHS := $(filter-out $(AUX_ARCH_$(_name)),$(AUX_ALL_ARCHS)) $(AUX_ARCH_$(_name))) \
+$(eval AUX_ALL_SUBARCHS := $(filter-out $(AUX_SUBARCH_$(_name)),$(AUX_ALL_SUBARCHS)) $(AUX_SUBARCH_$(_name)))
 endef
 
 # Load system configuration referenced by AUX variant config;
@@ -138,7 +138,7 @@
 $(eval _all:=) \
 $(eval _req:=$(addsuffix _$(1),$(aux_env))) \
 $(foreach var,$(_req),$(eval _all += $(var))) \
-$(eval _missing := $(filterout $(_all),$(_req))) \
+$(eval _missing := $(filter-out $(_all),$(_req))) \
 $(if $(_missing),$(error AUX variant $(1) must define vars: $(_missing)))
 endef
 
@@ -154,7 +154,7 @@
 config_roots := $(wildcard device vendor)
 all_configs :=
 ifdef config_roots
-all_configs := $(shell find $(config_roots) -maxdepth 4 -name '*$(variant_sfx)' -o -name '*$(os_sfx)' | sort)
+all_configs := $(sort $(shell find $(config_roots) -maxdepth 4 -name '*$(variant_sfx)' -o -name '*$(os_sfx)'))
 endif
 all_os_configs := $(filter %$(os_sfx),$(all_configs))
 all_variant_configs := $(filter %$(variant_sfx),$(all_configs))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 9234abe..ff48930 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -72,6 +72,8 @@
 LOCAL_OEM_MODULE := true
 else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(_path)),)
 LOCAL_ODM_MODULE := true
+else ifneq ($(filter $(TARGET_OUT_PRODUCT)/%,$(_path)),)
+LOCAL_PRODUCT_MODULE := true
 endif
 _path :=
 
@@ -86,7 +88,7 @@
 endif
 
 include $(BUILD_SYSTEM)/local_vndk.mk
-include $(BUILD_SYSTEM)/local_vsdk.mk
+include $(BUILD_SYSTEM)/local_systemsdk.mk
 
 my_module_tags := $(LOCAL_MODULE_TAGS)
 ifeq ($(my_host_cross),true)
@@ -200,6 +202,8 @@
   partition_tag := _OEM
 else ifeq (true,$(LOCAL_ODM_MODULE))
   partition_tag := _ODM
+else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+  partition_tag := _PRODUCT
 else ifeq (NATIVE_TESTS,$(LOCAL_MODULE_CLASS))
   partition_tag := _DATA
 else
@@ -490,6 +494,17 @@
 endif
 endif
 
+# For modules tagged as tests but lacking a suite tag, set null-suite as the default.
+# We only support adding a default suite to native tests, native benchmarks, and instrumentation tests.
+# This is because they are the only tests we currently auto-generate test configs for.
+ifneq ($(filter $(my_module_tags),tests),)
+ifndef LOCAL_COMPATIBILITY_SUITE
+ifneq ($(filter NATIVE_TESTS NATIVE_BENCHMARK APPS, $(LOCAL_MODULE_CLASS)),)
+LOCAL_COMPATIBILITY_SUITE := null-suite
+endif
+endif
+endif
+
 ###########################################################
 ## Compatibility suite files.
 ###########################################################
@@ -499,18 +514,22 @@
 # separate the multiple architectures into subdirectories of the testcase folder.
 arch_dir :=
 is_native :=
+multi_arch :=
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
   is_native := true
+  multi_arch := true
 endif
 ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
   is_native := true
+  multi_arch := true
 endif
 ifdef LOCAL_MULTILIB
-  is_native := true
+  multi_arch := true
 endif
-ifdef is_native
+ifdef multi_arch
   arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
 endif
+multi_arch :=
 
 # The module itself.
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
@@ -537,14 +556,23 @@
     ifeq (true, $(LOCAL_IS_HOST_MODULE))
       is_instrumentation_test := false
     endif
+    # If LOCAL_MODULE_CLASS is not APPS, it's certainly not an instrumentation
+    # test. However, some packages for test data also have LOCAL_MODULE_CLASS
+    # set to APPS. These will require flag LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG
+    # to disable auto-generating test config file.
+    ifneq (APPS, $(LOCAL_MODULE_CLASS))
+      is_instrumentation_test := false
+    endif
   endif
   # CTS modules can be used for test data, so test config files must be
   # explicitly created using AndroidTest.xml
   ifeq (,$(filter cts, $(LOCAL_COMPATIBILITY_SUITE)))
-    ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
-      include $(BUILD_SYSTEM)/autogen_test_config.mk
-      test_config := $(autogen_test_config_file)
-      autogen_test_config_file :=
+    ifneq (true, $(LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG))
+      ifeq (true, $(filter true,$(is_native) $(is_instrumentation_test)))
+        include $(BUILD_SYSTEM)/autogen_test_config.mk
+        test_config := $(autogen_test_config_file)
+        autogen_test_config_file :=
+      endif
     endif
   endif
 endif
@@ -573,9 +601,6 @@
 endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
-arch_dir :=
-is_native :=
-
 ifneq ($(my_test_data_file_pairs),)
 $(foreach pair, $(my_test_data_file_pairs), \
   $(eval parts := $(subst :,$(space),$(pair))) \
@@ -586,6 +611,9 @@
       $(src_path):$(call append-path,$(dir),$(file))))))
 endif
 
+arch_dir :=
+is_native :=
+
 $(call create-suite-dependencies)
 
 endif  # LOCAL_COMPATIBILITY_SUITE
@@ -637,6 +665,9 @@
 ALL_MODULES.$(my_register_name).TARGET_REQUIRED := \
     $(strip $(ALL_MODULES.$(my_register_name).TARGET_REQUIRED)\
         $(LOCAL_TARGET_REQUIRED_MODULES))
+ALL_MODULES.$(my_register_name).HOST_REQUIRED := \
+    $(strip $(ALL_MODULES.$(my_register_name).HOST_REQUIRED)\
+        $(LOCAL_HOST_REQUIRED_MODULES))
 ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
     $(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
 ALL_MODULES.$(my_register_name).MAKEFILE := \
diff --git a/core/binary.mk b/core/binary.mk
index 06b0d0e..c2fa27c 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -180,7 +180,6 @@
   my_ndk_stl_include_path :=
   my_ndk_stl_shared_lib_fullpath :=
   my_ndk_stl_static_lib :=
-  my_ndk_cpp_std_version :=
   my_cpu_variant := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)CPU_ABI)
   ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
     my_cpu_variant := mips32r6
@@ -189,24 +188,14 @@
   ifeq (,$(LOCAL_NDK_STL_VARIANT))
     LOCAL_NDK_STL_VARIANT := system
   endif
-  ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared, $(LOCAL_NDK_STL_VARIANT))))
+  ifneq (1,$(words $(filter none system c++_static c++_shared, $(LOCAL_NDK_STL_VARIANT))))
     $(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
   endif
+
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
     my_system_shared_libraries += libstdc++
-  else # LOCAL_NDK_STL_VARIANT is not system
-  ifneq (,$(filter stlport_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/stlport/stlport
-    my_system_shared_libraries += libstdc++
-    ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
-      my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
-      my_ldlibs += -ldl
-    else
-      my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_shared.so
-    endif
-  else # LOCAL_NDK_STL_VARIANT is not stlport_* either
-  ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
+  else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
     my_ndk_stl_include_path := \
       $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
     my_ndk_stl_include_path += \
@@ -230,13 +219,9 @@
     endif
 
     my_ldlibs += -ldl
-
-    my_ndk_cpp_std_version := c++11
   else # LOCAL_NDK_STL_VARIANT must be none
     # Do nothing.
   endif
-  endif
-  endif
 endif
 
 ifneq ($(LOCAL_USE_VNDK),)
@@ -366,11 +351,6 @@
             my_clang := true
         endif
     endif
-# Add option to make gcc the default for device build
-else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
-    ifeq ($(my_clang),)
-        my_clang := false
-    endif
 else ifeq ($(my_clang),)
     my_clang := true
 endif
@@ -398,11 +378,6 @@
     my_cpp_std_version := $(DEFAULT_GCC_CPP_STD_VERSION)
 endif
 
-ifdef LOCAL_SDK_VERSION
-    # The NDK handles this itself.
-    my_cpp_std_version := $(my_ndk_cpp_std_version)
-endif
-
 ifdef LOCAL_IS_HOST_MODULE
     ifneq ($(my_clang),true)
         # The host GCC doesn't support C++14 (and is deprecated, so likely
@@ -634,6 +609,9 @@
   my_cc := $(my_cc_wrapper) $(my_cc)
 endif
 
+SYNTAX_TOOLS_PREFIX := \
+    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/libexec
+
 ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
   my_cc := CCC_CC=$(CLANG) CLANG=$(CLANG) \
            $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer
diff --git a/core/build-system.html b/core/build-system.html
index c7938cc..3d86e24 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -762,6 +762,19 @@
 Dialer, Contacts, etc.  This will probably change or go away when we switch
 to an ant-based build system for the apps.</p>
 
+<h4>LOCAL_PATCH_MODULE (experimental option)</h4>
+<p>As of January 2018, you almost certainly don't need this option, so please
+ask and only use it if you understand what you're doing. This feature is
+experimental and may go away in future.</p>
+<p>
+When compiling language level 9+ .java code in packages that are part of a
+a system module, <code>LOCAL_PATCH_MODULE</code> names the module that your
+sources and dependencies should be patched into. The Android runtime currently
+(Jan 2018) doesn't implement the JEP 261 module system so this option is only
+supported at compile time. It should only be needed to compile tests in packages
+that exist in libcore and which are inconvenient to move elsewhere.
+</p>
+
 <h4>LOCAL_PATH</h4>
 <p>The directory your Android.mk file is in. You can set it by putting the
 following as the first line in your Android.mk:</p>
diff --git a/core/clang/HOST_CROSS_x86.mk b/core/clang/HOST_CROSS_x86.mk
index bf48f95..ffd7811 100644
--- a/core/clang/HOST_CROSS_x86.mk
+++ b/core/clang/HOST_CROSS_x86.mk
@@ -1 +1 @@
-$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686.a
+$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
diff --git a/core/clang/HOST_x86.mk b/core/clang/HOST_x86.mk
index 0722b2a..2803517 100644
--- a/core/clang/HOST_x86.mk
+++ b/core/clang/HOST_x86.mk
@@ -1 +1 @@
-$(clang_2nd_arch_prefix)HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686.a
+$(clang_2nd_arch_prefix)HOST_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i386.a
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
deleted file mode 100644
index f3a206a..0000000
--- a/core/clang/versions.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-## Clang/LLVM release versions.
-
-LLVM_PREBUILTS_VERSION ?= clang-4393122
-LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index b4a03ea..baa2344 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -55,6 +55,7 @@
 LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
 LOCAL_DEX_PREOPT_PROFILE_CLASS_LISTING:=
 LOCAL_DEX_PREOPT:= # '',true,false,nostripping
+LOCAL_DISABLE_AUTO_GENERATE_TEST_CONFIG:=
 LOCAL_DONT_CHECK_MODULE:=
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
@@ -97,6 +98,7 @@
 LOCAL_GTEST:=true
 LOCAL_HAL_STATIC_LIBRARIES:=
 LOCAL_HEADER_LIBRARIES:=
+LOCAL_HOST_REQUIRED_MODULES:=
 LOCAL_INIT_RC:=
 LOCAL_INSTALLED_MODULE:=
 LOCAL_INSTALLED_MODULE_STEM:=
@@ -181,6 +183,7 @@
 LOCAL_PACKAGE_NAME:=
 LOCAL_PACKAGE_SPLITS:=
 LOCAL_PACK_MODULE_RELOCATIONS:=
+LOCAL_PATCH_MODULE:=
 LOCAL_PICKUP_FILES:=
 LOCAL_POST_INSTALL_CMD:=
 LOCAL_POST_LINK_CMD:=
@@ -193,8 +196,10 @@
 LOCAL_PREBUILT_OBJ_FILES:=
 LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
 LOCAL_PREBUILT_STRIP_COMMENTS:=
+LOCAL_PRIVATE_PLATFORM_APIS:=
 LOCAL_PRIVILEGED_MODULE:=
 # '',full,custom,disabled,obfuscation,optimization
+LOCAL_PRODUCT_MODULE:=
 LOCAL_PROGUARD_ENABLED:=
 LOCAL_PROGUARD_FLAG_FILES:=
 LOCAL_PROGUARD_FLAGS:=
@@ -235,6 +240,8 @@
 LOCAL_SOONG_PROGUARD_DICT :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
 LOCAL_SOONG_RRO_DIRS :=
+LOCAL_DROIDDOC_STUBS_JAR :=
+LOCAL_DROIDDOC_DOC_ZIP :=
 # '',true
 LOCAL_SOURCE_FILES_ALL_GENERATED:=
 LOCAL_SRC_FILES:=
@@ -432,6 +439,13 @@
 LOCAL_WHOLE_STATIC_LIBRARIES_32:=
 LOCAL_WHOLE_STATIC_LIBRARIES_64:=
 
+# Robolectric variables
+LOCAL_INSTRUMENT_SOURCE_DIRS :=
+LOCAL_ROBOTEST_FAILURE_FATAL :=
+LOCAL_ROBOTEST_FILES :=
+LOCAL_ROBOTEST_TIMEOUT :=
+LOCAL_TEST_PACKAGE :=
+
 # Aux specific variables
 LOCAL_AUX_ARCH :=
 LOCAL_AUX_CPU :=
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 73b1c04..01cf3f5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -50,7 +50,7 @@
 endif
 
 ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv5te
+$(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
 endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
diff --git a/core/combo/arch/arm/armv5te-vfp.mk b/core/combo/arch/arm/armv5te-vfp.mk
deleted file mode 100644
index 75299ac..0000000
--- a/core/combo/arch/arm/armv5te-vfp.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-# At the moment, use the same settings than the one
-# for armv5te, since TARGET_ARCH_VARIANT := armv5te-vfp
-# will only be used to select an optimized VFP-capable assembly
-# interpreter loop for Dalvik.
-#
-include $(BUILD_COMBOS)/arch/arm/armv5te.mk
-
diff --git a/core/combo/arch/arm/armv5te.mk b/core/combo/arch/arm/armv5te.mk
deleted file mode 100644
index bd75695..0000000
--- a/core/combo/arch/arm/armv5te.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-# Configuration for Linux on ARM.
-# Generating binaries for the ARMv5TE architecture and higher
-#
-
diff --git a/core/combo/select.mk b/core/combo/select.mk
index 5e181b9..eab4c72 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -28,7 +28,7 @@
 
 # Set reasonable defaults for the various variables
 
-$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD
+$(combo_var_prefix)GLOBAL_ARFLAGS := cqsD -format=gnu
 
 $(combo_var_prefix)STATIC_LIB_SUFFIX := .a
 
diff --git a/core/config.mk b/core/config.mk
index b03f21f..295644c 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -76,6 +76,7 @@
   ANDROID_PRE_BUILD_PATHS \
   ,See $(CHANGES_URL)#other_envsetup_variables)
 $(KATI_obsolete_var PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE,Set FCM Version in device manifest instead. See $(CHANGES_URL)#PRODUCT_COMPATIBILITY_MATRIX_LEVEL_OVERRIDE)
+$(KATI_obsolete_var USE_CLANG_PLATFORM_BUILD,Clang is the only supported Android compiler. See $(CHANGES_URL)#USE_CLANG_PLATFORM_BUILD)
 
 CHANGES_URL :=
 
@@ -412,33 +413,11 @@
   WITH_STATIC_ANALYZER :=
 endif
 
-# define clang/llvm versions and base directory.
-include $(BUILD_SYSTEM)/clang/versions.mk
-
 # Unset WITH_TIDY_ONLY if global WITH_TIDY_ONLY is not true nor 1.
 ifeq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
   WITH_TIDY_ONLY :=
 endif
 
-PATH_TO_CLANG_TIDY := \
-    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/clang-tidy
-ifeq ($(wildcard $(PATH_TO_CLANG_TIDY)),)
-  ifneq (,$(filter 1 true,$(WITH_TIDY)))
-    $(warning *** Disable WITH_TIDY because $(PATH_TO_CLANG_TIDY) does not exist)
-  endif
-  PATH_TO_CLANG_TIDY :=
-endif
-
-# Disable WITH_STATIC_ANALYZER if tool can't be found
-SYNTAX_TOOLS_PREFIX := \
-    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/tools/scan-build/libexec
-ifneq ($(strip $(WITH_STATIC_ANALYZER)),)
-  ifeq ($(wildcard $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer),)
-    $(warning *** Disable WITH_STATIC_ANALYZER because $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer does not exist)
-    WITH_STATIC_ANALYZER :=
-  endif
-endif
-
 # Pick a Java compiler.
 include $(BUILD_SYSTEM)/combo/javac.mk
 
@@ -601,6 +580,7 @@
 SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
 SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
 MERGE_ZIPS := $(SOONG_HOST_OUT_EXECUTABLES)/merge_zips
+XMLLINT := $(SOONG_HOST_OUT_EXECUTABLES)/xmllint
 ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
 ZIPTIME := $(prebuilt_build_tools_bin)/ziptime
 
@@ -609,13 +589,13 @@
 
 LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
 # The default PKGDATADIR built in the prebuilt bison is a relative path
-# external/bison/data.
+# prebuilts/build-tools/common/bison.
 # To run bison from elsewhere you need to set up enviromental variable
 # BISON_PKGDATADIR.
-BISON_PKGDATADIR := $(PWD)/external/bison/data
-BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison
+BISON_PKGDATADIR := $(PWD)/prebuilts/build-tools/common/bison
+BISON := prebuilts/build-tools/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bin/bison
 YACC := $(BISON) -d
-BISON_DATA := $(wildcard external/bison/data/* external/bison/data/*/*)
+BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
 
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
@@ -685,6 +665,7 @@
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
+HIDDENAPI := $(HOST_OUT_EXECUTABLES)/hiddenapi
 
 # relocation packer
 RELOCATION_PACKER := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/relocation_packer/relocation_packer
@@ -741,6 +722,19 @@
 
 APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
 
+# Boolean variable determining if the whitelist for compatible properties is enabled
+PRODUCT_COMPATIBLE_PROPERTY := false
+ifneq ($(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE),)
+  PRODUCT_COMPATIBLE_PROPERTY := $(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)
+else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
+  #$(warning no product shipping level defined)
+else ifneq ($(call math_lt,27,$(PRODUCT_SHIPPING_API_LEVEL)),)
+  PRODUCT_COMPATIBLE_PROPERTY := true
+endif
+
+.KATI_READONLY := \
+    PRODUCT_COMPATIBLE_PROPERTY
+
 # Boolean variable determining if Treble is fully enabled
 PRODUCT_FULL_TREBLE := false
 ifneq ($(PRODUCT_FULL_TREBLE_OVERRIDE),)
@@ -782,6 +776,14 @@
 
 requirements :=
 
+# BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED can be true only if early-mount of
+# partitions is supported. But the early-mount must be supported for full
+# treble products, and so BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED should be set
+# by default for full treble products.
+ifeq ($(PRODUCT_FULL_TREBLE),true)
+  BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
+endif
+
 # If PRODUCT_USE_VNDK is true and BOARD_VNDK_VERSION is not defined yet,
 # BOARD_VNDK_VERSION will be set to "current" as default.
 # PRODUCT_USE_VNDK will be true in Android-P or later launching devices.
@@ -810,6 +812,9 @@
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 27, TARGET_USES_MKE2FS must be true)
     endif
   endif
+  ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
+    $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+  endif
 endif
 
 # The default key if not set as LOCAL_CERTIFICATE
@@ -819,16 +824,39 @@
   DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey
 endif
 
-FRAMEWORK_MANIFEST_INPUT_FILES := system/libhidl/manifest.xml
-ifdef DEVICE_FRAMEWORK_MANIFEST_FILE
-  FRAMEWORK_MANIFEST_INPUT_FILES += $(DEVICE_FRAMEWORK_MANIFEST_FILE)
-endif
-$(.KATI_obsolete_var DEVICE_FRAMEWORK_MANIFEST_FILE,No one should ever need to use this.)
-
-FRAMEWORK_COMPATIBILITY_MATRIX_FILES := $(wildcard hardware/interfaces/compatibility_matrix.*.xml)
-
 BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
-BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
+BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
+
+# SEPolicy versions
+
+# PLATFORM_SEPOLICY_VERSION is a number of the form "NN.m" with "NN" mapping to
+# PLATFORM_SDK_VERSION and "m" as a minor number which allows for SELinux
+# changes independent of PLATFORM_SDK_VERSION.  This value will be set to
+# 10000.0 to represent tip-of-tree development that is inherently unstable and
+# thus designed not to work with any shipping vendor policy.  This is similar in
+# spirit to how DEFAULT_APP_TARGET_SDK is set.
+# The minor version ('m' component) must be updated every time a platform release
+# is made which breaks compatibility with the previous platform sepolicy version,
+# not just on every increase in PLATFORM_SDK_VERSION.  The minor version should
+# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
+sepolicy_major_vers := 27
+sepolicy_minor_vers := 0
+
+ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
+$(error sepolicy_major_version does not match PLATFORM_SDK_VERSION, please update.)
+endif
+ifneq (REL,$(PLATFORM_VERSION_CODENAME))
+    sepolicy_major_vers := 10000
+    sepolicy_minor_vers := 0
+endif
+PLATFORM_SEPOLICY_VERSION := $(join $(addsuffix .,$(sepolicy_major_vers)), $(sepolicy_minor_vers))
+sepolicy_major_vers :=
+sepolicy_minor_vers :=
+
+# A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
+PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
+    26.0 \
+    27.0
 
 # ###############################################################
 # Set up final options.
@@ -912,21 +940,29 @@
     $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android_system.jar)))) \
     $(TARGET_AVAILABLE_SDK_VERSIONS)
 
-# We don't have prebuilt test_current SDK yet.
-TARGET_AVAILABLE_SDK_VERSIONS := test_current $(TARGET_AVAILABLE_SDK_VERSIONS)
+# We don't have prebuilt test_current and core_current SDK yet.
+TARGET_AVAILABLE_SDK_VERSIONS := test_current core_current $(TARGET_AVAILABLE_SDK_VERSIONS)
 
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,27,$(TARGET_AVAILABLE_SDK_VERSIONS))
 
 INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
+INTERNAL_PLATFORM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private.txt
+INTERNAL_PLATFORM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/private-dex.txt
 INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
 INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
+INTERNAL_PLATFORM_SYSTEM_PRIVATE_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private.txt
+INTERNAL_PLATFORM_SYSTEM_PRIVATE_DEX_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-private-dex.txt
 INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
 INTERNAL_PLATFORM_SYSTEM_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-exact.txt
 INTERNAL_PLATFORM_TEST_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-api.txt
 INTERNAL_PLATFORM_TEST_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-removed.txt
 INTERNAL_PLATFORM_TEST_EXACT_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-exact.txt
 
+INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-light-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-dark-greylist.txt
+INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/hiddenapi-blacklist.txt
+
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
 TARGET_PREBUILT_TAG := android-$(TARGET_ARCH)
@@ -968,6 +1004,7 @@
     cacheimage-nodeps \
     bptimage-nodeps \
     vnod vendorimage-nodeps \
+    pnod productimage-nodeps \
     systemotherimage-nodeps \
     ramdisk-nodeps \
     bootimage-nodeps \
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index f07659d..5171b8a 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -74,6 +74,16 @@
 ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
     my_cflags += -D_USING_LIBCXX
 
+    ifeq ($($(my_prefix)OS),darwin)
+        # libc++'s headers are annotated with availability macros that indicate
+        # which version of Mac OS was the first to ship with a libc++ feature
+        # available in its *system's* libc++.dylib. We do not use the system's
+        # library, but rather ship our own. As such, these availability
+        # attributes are meaningless for us but cause build breaks when we try
+        # to use code that would not be available in the system's dylib.
+        my_cppflags += -D_LIBCPP_DISABLE_AVAILABILITY
+    endif
+
     # Note that the structure of this means that LOCAL_CXX_STL := libc++ will
     # use the static libc++ for static executables.
     ifeq ($(my_link_type),dynamic)
diff --git a/core/definitions.mk b/core/definitions.mk
index a20bf44..f6f5840 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1723,6 +1723,10 @@
 $(if $(PRIVATE_ALL_OBJECTS),,$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) d $(1) $(dir $(1))dummy.o \
   && rm -f $(dir $(1))dummy.o)
 endef
+else
+create-dummy.o-if-no-objs =
+get-dummy.o-if-no-objs =
+delete-dummy.o-if-no-objs =
 endif  # HOST_OS is darwin
 
 # Explicitly delete the archive first so that ar doesn't
@@ -2241,12 +2245,17 @@
     $(SOONG_JAVAC_WRAPPER) $(JAVAC_WRAPPER) $(1) -encoding UTF-8 \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     $(if $(PRIVATE_USE_SYSTEM_MODULES), \
-      $(addprefix --system=,$(PRIVATE_SYSTEM_MODULES)), \
+      $(addprefix --system=,$(PRIVATE_SYSTEM_MODULES_DIR)), \
       $(addprefix -bootclasspath ,$(strip \
           $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH)) \
           $(PRIVATE_EMPTY_BOOTCLASSPATH)))) \
-    $(addprefix -classpath ,$(strip \
-        $(call normalize-path-list,$(2)))) \
+    $(if $(PRIVATE_USE_SYSTEM_MODULES), \
+      $(if $(PRIVATE_PATCH_MODULE), \
+        --patch-module=$(PRIVATE_PATCH_MODULE)=$(call normalize-path-list,. $(2)))) \
+    $(addprefix -classpath ,$(call normalize-path-list,$(strip \
+      $(if $(PRIVATE_USE_SYSTEM_MODULES), \
+        $(filter-out $(PRIVATE_SYSTEM_MODULES_LIBS),$(PRIVATE_BOOTCLASSPATH))) \
+      $(2)))) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) -s $(PRIVATE_ANNO_INTERMEDIATES_DIR) \
     $(PRIVATE_JAVACFLAGS) \
@@ -2592,11 +2601,12 @@
 #
 define uncompress-dexs
 $(hide) if (zipinfo $@ '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
-  rm -rf $(dir $@)uncompresseddexs && mkdir $(dir $@)uncompresseddexs; \
-  unzip -q $@ '*.dex' -d $(dir $@)uncompresseddexs && \
+  tmpdir=$@.tmpdir; \
+  rm -rf $$tmpdir && mkdir $$tmpdir; \
+  unzip -q $@ '*.dex' -d $$tmpdir && \
   zip -qd $@ '*.dex' && \
-  ( cd $(dir $@)uncompresseddexs && find . -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
-  rm -rf $(dir $@)uncompresseddexs; \
+  ( cd $$tmpdir && find . -type f | sort | zip -qD -X -0 ../$(notdir $@) -@ ) && \
+  rm -rf $$tmpdir; \
   fi
 endef
 
@@ -2676,9 +2686,9 @@
 # $(1): source file
 # $(2): destination file, must end with .xml.
 define copy-xml-file-checked
-$(2): $(1)
+$(2): $(1) $(XMLLINT)
 	@echo "Copy xml: $$@"
-	$(hide) xmllint $$< >/dev/null  # Don't print the xml file to stdout.
+	$(hide) $(XMLLINT) $$< >/dev/null  # Don't print the xml file to stdout.
 	$$(copy-file-to-target)
 endef
 
@@ -2781,18 +2791,53 @@
 # $(3): LOCAL_DEX_PREOPT, if nostripping then leave classes*.dex
 define dexpreopt-copy-jar
 $(2): $(1)
-	@echo $(if $(filter nostripping,$(3)),"Copy: $$@","Copy without dex: $$@")
+	@echo "Copy: $$@"
 	$$(copy-file-to-target)
 	$(if $(filter nostripping,$(3)),,$$(call dexpreopt-remove-classes.dex,$$@))
 endef
 
-# $(1): the .jar or .apk to remove classes.dex
+# $(1): the .jar or .apk to remove classes.dex. Note that if all dex files
+# are uncompressed in the archive, then dexopt will not do a copy of the dex
+# files and we should not strip.
 define dexpreopt-remove-classes.dex
-$(hide) zip --quiet --delete $(1) classes.dex; \
+$(hide) if (zipinfo $1 '*.dex' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
+zip --quiet --delete $(1) classes.dex; \
 dex_index=2; \
 while zip --quiet --delete $(1) classes$${dex_index}.dex > /dev/null; do \
   let dex_index=dex_index+1; \
-done
+done \
+fi
+endef
+
+define hiddenapi-copy-dex-files
+$(2): $(1) $(HIDDENAPI) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@rm -rf $(dir $(2))
+	@mkdir -p $(dir $(2))
+	find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | sort | \
+		xargs -I{} cp -f {} $(dir $(2))
+	find $(dir $(2)) -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+endef
+
+define hiddenapi-copy-soong-jar
+$(2): PRIVATE_FOLDER := $(dir $(2))dex-hiddenapi
+$(2): $(1) $(HIDDENAPI) $(SOONG_ZIP) $(MERGE_ZIPS) $(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+      $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	@echo "Hidden API: $$@"
+	$$(copy-file-to-target)
+	@rm -rf $${PRIVATE_FOLDER}
+	@mkdir -p $${PRIVATE_FOLDER}
+	unzip -q $(2) 'classes*.dex' -d $${PRIVATE_FOLDER}
+	find $${PRIVATE_FOLDER} -name "classes*.dex" | sort | sed 's/^/--dex=/' | \
+		xargs $(HIDDENAPI) --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
+		                   --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
+		                   --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
+	$(SOONG_ZIP) -o $${PRIVATE_FOLDER}/classes.dex.jar -C $${PRIVATE_FOLDER} -D $${PRIVATE_FOLDER}
+	$(MERGE_ZIPS) -D -zipToNotStrip $${PRIVATE_FOLDER}/classes.dex.jar -stripFile "classes*.dex" \
+		$(2) $${PRIVATE_FOLDER}/classes.dex.jar $(1)
 endef
 
 ###########################################################
@@ -3431,3 +3476,24 @@
 $(filter-out current,\
   $(if $(call has-system-sdk-version,$(1)),$(patsubst system_%,%,$(1)),$(1)))
 endef
+
+# Convert to lower case without requiring a shell, which isn't cacheable.
+to-lower=$(subst A,a,$(subst B,b,$(subst C,c,$(subst D,d,$(subst E,e,$(subst F,f,$(subst G,g,$(subst H,h,$(subst I,i,$(subst J,j,$(subst K,k,$(subst L,l,$(subst M,m,$(subst N,n,$(subst O,o,$(subst P,p,$(subst Q,q,$(subst R,r,$(subst S,s,$(subst T,t,$(subst U,u,$(subst V,v,$(subst W,w,$(subst X,x,$(subst Y,y,$(subst Z,z,$1))))))))))))))))))))))))))
+
+# Convert to upper case without requiring a shell, which isn't cacheable.
+to-upper=$(subst a,A,$(subst b,B,$(subst c,C,$(subst d,D,$(subst e,E,$(subst f,F,$(subst g,G,$(subst h,H,$(subst i,I,$(subst j,J,$(subst k,K,$(subst l,L,$(subst m,M,$(subst n,N,$(subst o,O,$(subst p,P,$(subst q,Q,$(subst r,R,$(subst s,S,$(subst t,T,$(subst u,U,$(subst v,V,$(subst w,W,$(subst x,X,$(subst y,Y,$(subst z,Z,$1))))))))))))))))))))))))))
+
+# Sanity-check to-lower and to-upper
+lower := abcdefghijklmnopqrstuvwxyz-_
+upper := ABCDEFGHIJKLMNOPQRSTUVWXYZ-_
+
+ifneq ($(lower),$(call to-lower,$(upper)))
+  $(error to-lower sanity check failure)
+endif
+
+ifneq ($(upper),$(call to-upper,$(lower)))
+  $(error to-upper sanity check failure)
+endif
+
+lower :=
+upper :=
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 0dcb07f..83c4a95 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -35,12 +35,14 @@
 # Conditional to building on linux, as dex2oat currently does not work on darwin.
 ifeq ($(HOST_OS),linux)
   WITH_DEXPREOPT ?= true
-# For an eng build only pre-opt the boot image and system server. This gives reasonable performance
-# and still allows a simple workflow: building in frameworks/base and syncing.
   ifeq (eng,$(TARGET_BUILD_VARIANT))
+    # Don't strip for quick development turnarounds.
+    DEX_PREOPT_DEFAULT := nostripping
+    # For an eng build only pre-opt the boot image and system server. This gives reasonable performance
+    # and still allows a simple workflow: building in frameworks/base and syncing.
     WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY ?= true
   endif
-# Add mini-debug-info to the boot classpath unless explicitly asked not to.
+  # Add mini-debug-info to the boot classpath unless explicitly asked not to.
   ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
     PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
   endif
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 79e72c1..f289c22 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -7,11 +7,14 @@
 # Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
 ifeq ($(USE_DEX2OAT_DEBUG),false)
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoat$(HOST_EXECUTABLE_SUFFIX)
 else
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+PATCHOAT := $(HOST_OUT_EXECUTABLES)/patchoatd$(HOST_EXECUTABLE_SUFFIX)
 endif
 
 DEX2OAT_DEPENDENCY += $(DEX2OAT)
+PATCHOAT_DEPENDENCY += $(PATCHOAT)
 
 # Use the first preloaded-classes file in PRODUCT_COPY_FILES.
 PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
@@ -87,14 +90,19 @@
 # is converted into to boot.art (to match the legacy assumption that boot.art
 # exists), and the rest are converted to boot-<name>.art.
 # In addition, each .art file has an associated .oat file.
-LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).oat boot-$(jar).vdex)
-LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.oat boot.vdex
+LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).art.rel boot-$(jar).oat)
+LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.art.rel boot.oat
+LIBART_TARGET_BOOT_ART_VDEX_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).vdex)
+LIBART_TARGET_BOOT_ART_VDEX_FILES += boot.vdex
 
 # If we use a boot image profile.
 my_use_profile_for_boot_image := $(PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE)
 ifeq (,$(my_use_profile_for_boot_image))
-# If not set, use the default.
-my_use_profile_for_boot_image := false
+# If not set, set the default to true if we are not a PDK build. PDK builds
+# can't build the profile since they don't have frameworks/base.
+ifneq (true,$(TARGET_BUILD_PDK))
+my_use_profile_for_boot_image := true
+endif
 endif
 
 ifeq (true,$(my_use_profile_for_boot_image))
@@ -114,6 +122,7 @@
 	@mkdir -p $(dir $@)
 	ANDROID_LOG_TAGS="*:e" $(PROFMAN) \
 		--create-profile-from=$(PRIVATE_PROFILE_INPUT_LOCATION) \
+		--skip-apk-verification \
 		$(addprefix --apk=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
 		$(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
 		--reference-profile-file=$@
@@ -126,6 +135,8 @@
 
 endif
 
+LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES := $(addprefix $(PRODUCT_OUT)/$(DEXPREOPT_BOOT_JAR_DIR)/,$(LIBART_TARGET_BOOT_ART_VDEX_FILES))
+
 my_2nd_arch_prefix :=
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
 
@@ -133,10 +144,24 @@
 ifdef TARGET_2ND_ARCH
 my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
-my_2nd_arch_prefix :=
 endif
 endif
 
+# Copy shared vdex to the directory and create corresponding symlinks in primary and secondary arch.
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : PRIMARY_ARCH_DIR := $(dir $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : SECOND_ARCH_DIR := $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE))
+$(LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES) : $(DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)
+	@echo "Install: $@"
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) cp "$(dir $<)$(notdir $@)" "$@"
+	# Make symlink for both the archs. In the case its single arch the symlink will just get overridden.
+	@mkdir -p $(PRIMARY_ARCH_DIR)
+	$(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(PRIMARY_ARCH_DIR)$(notdir $@)
+	@mkdir -p $(SECOND_ARCH_DIR)
+	$(hide) ln -sf /$(DEXPREOPT_BOOT_JAR_DIR)/$(notdir $@) $(SECOND_ARCH_DIR)$(notdir $@)
+
+my_2nd_arch_prefix :=
 
 ########################################################################
 # For a single jar or APK
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8b71198..8d0539a 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -30,6 +30,8 @@
 $(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE := $(PRODUCT_OUT)$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME)
 $(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
     $(LIBART_TARGET_BOOT_ART_EXTRA_FILES))
+$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
+    $(LIBART_TARGET_BOOT_ART_VDEX_FILES))
 
 # If we have a compiled-classes file, create a parameter.
 COMPILED_CLASSES_FLAGS :=
@@ -45,7 +47,7 @@
 
 # The rule to install boot.art
 # Depends on installed boot.oat, boot-*.art, boot-*.oat
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_VDEX_INSTALLED_SHARED_FILES)
 	@echo "Install: $@"
 	$(copy-file-to-target)
 
@@ -73,14 +75,16 @@
 
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
 # Use dex2oat debug version for better error reporting
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(my_out_boot_image_profile_location)
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(PRELOADED_CLASSES) $(COMPILED_CLASSES) $(DIRTY_IMAGE_OBJECTS) $(DEX2OAT_DEPENDENCY) $(PATCHOAT_DEPENDENCY) $(my_out_boot_image_profile_location)
 	@echo "target dex2oat: $@"
 	@mkdir -p $(dir $@)
 	@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
-	@rm -f $(dir $@)/*.art $(dir $@)/*.oat
+	@rm -f $(dir $@)/*.art $(dir $@)/*.oat $(dir $@)/*.art.rel
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
+	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
 	$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
 		--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
 		$(PRIVATE_BOOT_IMAGE_FLAGS) \
@@ -99,6 +103,11 @@
 		--multi-image --no-inline-from=core-oj.jar \
 		--abort-on-hard-verifier-error \
 		--abort-on-soft-verifier-error \
-		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS)
+		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) && \
+	ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
+        --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
+        --output-image-relocation-directory=$(dir $@) \
+        --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
+        --base-offset-delta=0x10000000
 
 endif
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 93824c3..2b2800b 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -1,6 +1,20 @@
 # dexpreopt_odex_install.mk is used to define odex creation rules for JARs and APKs
 # This file depends on variables set in base_rules.mk
-# Output variables: LOCAL_DEX_PREOPT, built_odex, dexpreopt_boot_jar_module
+# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX, built_odex,
+#                   dexpreopt_boot_jar_module
+
+# We explicitly uncompress APKs of privileged apps, and used by
+# privileged apps
+LOCAL_UNCOMPRESS_DEX := false
+ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
+ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+  LOCAL_UNCOMPRESS_DEX := true
+else
+  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+    LOCAL_UNCOMPRESS_DEX := true
+  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_PRIVILEGED_MODULE
+endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 
 # Setting LOCAL_DEX_PREOPT based on WITH_DEXPREOPT, LOCAL_DEX_PREOPT, etc
 LOCAL_DEX_PREOPT := $(strip $(LOCAL_DEX_PREOPT))
@@ -46,14 +60,27 @@
 endif
 endif
 
-# if installing into system, and odex are being installed into system_other, don't strip
-ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifeq ($(LOCAL_DEX_PREOPT),true)
+
+# Don't strip with dexes we explicitly uncompress (dexopt will not store the dex code).
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+LOCAL_DEX_PREOPT := nostripping
+endif  # LOCAL_UNCOMPRESS_DEX
+
+# system_other isn't there for an OTA, so don't strip
+# if module is on system, and odex is on system_other.
+ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
 ifneq ($(call install-on-system-other, $(my_module_path)),)
 LOCAL_DEX_PREOPT := nostripping
-endif
-endif
-endif
+endif  # install-on-system-other
+endif  # BOARD_USES_SYSTEM_OTHER_ODEX
+
+# We also don't strip if all dexs are uncompressed (dexopt will not store the dex code),
+# but that requires to inspect the source file, which is too early at this point (as we
+# don't know if the source file will actually be used).
+# See dexpreopt-remove-classes.dex.
+
+endif  # LOCAL_DEX_PREOPT
 
 built_odex :=
 built_vdex :=
@@ -112,8 +139,10 @@
 		--dex-location=$(PRIVATE_DEX_LOCATION) \
 		--reference-profile-file=$@
 dex_preopt_profile_src_file:=
-# Remove compressed APK  extension.
+
+# Remove compressed APK extension.
 my_installed_profile := $(patsubst %.gz,%,$(LOCAL_INSTALLED_MODULE)).prof
+
 # my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
 $(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
 build_installed_profile:=$(my_built_profile):$(my_installed_profile)
@@ -197,6 +226,12 @@
 my_system_server_compiler_filter := speed
 endif
 
+my_default_compiler_filter := $(PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER)
+ifeq (,$(my_default_compiler_filter))
+# If no default compiler filter is specified, default to 'quicken' to save on storage.
+my_default_compiler_filter := quicken
+endif
+
 ifeq (,$(filter --compiler-filter=%, $(LOCAL_DEX_PREOPT_FLAGS)))
   ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS),$(LOCAL_MODULE)))
     # Jars of system server, use the product option if it is set, speed otherwise.
@@ -211,13 +246,39 @@
         # For non system server jars, use speed-profile when we have a profile.
         LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed-profile
       else
-        # If no compiler filter is specified, default to 'quicken' to save on storage.
-        LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=quicken
+        LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
       endif
     endif
   endif
 endif
 
+my_generate_dm := $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES)
+ifeq (,$(filter $(LOCAL_DEX_PREOPT_FLAGS),--compiler-filter=verify))
+# Generating DM files only makes sense for verify, avoid doing for non verify compiler filter APKs.
+my_generate_dm := false
+endif
+
+# No reason to use a dm file if the dex is already uncompressed.
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+my_generate_dm := false
+endif
+
+ifeq (true,$(my_generate_dm))
+LOCAL_DEX_PREOPT_FLAGS += --copy-dex-files=false
+LOCAL_DEX_PREOPT := nostripping
+my_built_dm := $(dir $(LOCAL_BUILT_MODULE))generated.dm
+my_installed_dm := $(patsubst %.apk,%,$(LOCAL_INSTALLED_MODULE)).dm
+my_copied_vdex := $(dir $(LOCAL_BUILT_MODULE))primary.vdex
+$(eval $(call copy-one-file,$(built_vdex),$(my_copied_vdex)))
+$(my_built_dm): PRIVATE_INPUT_VDEX := $(my_copied_vdex)
+$(my_built_dm): $(my_copied_vdex) $(ZIPTIME)
+	$(hide) mkdir -p $(dir $@)
+	$(hide) rm -f $@
+	$(hide) zip -qD -j -X -9 $@ $(PRIVATE_INPUT_VDEX)
+	$(ZIPTIME) $@
+$(eval $(call copy-one-file,$(my_built_dm),$(my_installed_dm)))
+endif
+
 # PRODUCT_SYSTEM_SERVER_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
 my_system_server_debug_info := $(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)
 ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
@@ -233,19 +294,35 @@
   endif
 endif
 
+# Set the compiler reason to 'prebuilt' to identify the oat files produced
+# during the build, as opposed to compiled on the device.
+LOCAL_DEX_PREOPT_FLAGS += --compilation-reason=prebuilt
+
 $(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
 $(built_vdex): $(built_odex)
 $(built_art): $(built_odex)
 endif
 
-# Add the installed_odex to the list of installed files for this module.
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
+ifneq (true,$(my_generate_dm))
+  # Add the installed_odex to the list of installed files for this module if we aren't generating a
+  # dm file.
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
+  ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
 
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+
+  # Make sure to install the .odex and .vdex when you run "make <module_name>"
+  $(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
+else
+  ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_dm)
+  ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_dm) $(my_installed_dm)
+
+  # Make sure to install the .dm when you run "make <module_name>"
+  $(my_all_targets): $(installed_dm)
+endif
 
 # Record dex-preopt config.
 DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
@@ -259,10 +336,6 @@
 DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
   $(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
 
-
-# Make sure to install the .odex and .vdex when you run "make <module_name>"
-$(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
-
 endif # LOCAL_DEX_PREOPT
 
 # Profile doesn't depend on LOCAL_DEX_PREOPT.
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index e29cde7..f32daf5 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -18,7 +18,7 @@
 $(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 $(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
 $(built_dpi_apk): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
 $(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
 $(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index 176a01d..8115481 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -71,22 +71,23 @@
   else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
     LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_test_stubs_current)
+  else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
+    LOCAL_JAVA_LIBRARIES := core.current.stubs $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core.current.stubs)
   else
-    ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
-      ifeq (,$(TARGET_BUILD_APPS))
-        LOCAL_JAVA_LIBRARIES := system_sdk_v$(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION)) $(LOCAL_JAVA_LIBRARIES)
-        $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, system_sdk_v$(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION)))
-      else
-        LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
-        $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
-      endif
-    else
-      LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
-      $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
-    endif
+    # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
+    # use the stub for <ver> when building for apps.
+    _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
+    LOCAL_JAVA_LIBRARIES := sdk_v$(_version) $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(_version))
+    _version :=
   endif
 else
-  LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+  ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+    LOCAL_JAVA_LIBRARIES := core-oj core-libart
+  else
+    LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+  endif
   $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
 endif  # LOCAL_SDK_VERSION
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 255c02b..6a7fbd1 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -179,6 +179,7 @@
 TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
 TARGET_COPY_OUT_OEM := oem
 TARGET_COPY_OUT_ODM := odm
+TARGET_COPY_OUT_PRODUCT := product
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
 
@@ -198,6 +199,17 @@
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
 ###########################################
 
+###########################################
+# Define TARGET_COPY_OUT_PRODUCT to a placeholder, for at this point
+# we don't know if the device wants to build a separate product.img
+# or just build product stuff into system.img.
+# A device can set up TARGET_COPY_OUT_PRODUCT to "product" in its
+# BoardConfig.mk.
+# We'll substitute with the real value after loading BoardConfig.mk.
+_product_path_placeholder := ||PRODUCT-PATH-PH||
+TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
+###########################################
+
 #################################################################
 # Set up minimal BOOTCLASSPATH list of jars to build/execute
 # java code with dalvikvm/art.
@@ -273,6 +285,29 @@
 else ifdef BOARD_USES_VENDORIMAGE
 $(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
 endif
+
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_PRODUCT
+ifeq ($(TARGET_COPY_OUT_PRODUCT),$(_product_path_placeholder))
+TARGET_COPY_OUT_PRODUCT := system/product
+else ifeq ($(filter product system/product,$(TARGET_COPY_OUT_PRODUCT)),)
+$(error TARGET_COPY_OUT_PRODUCT must be either 'product' or 'system/product', seeing '$(TARGET_COPY_OUT_PRODUCT)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_product_path_placeholder),$(TARGET_COPY_OUT_PRODUCT),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_PRODUCTIMAGE :=
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifdef BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE
+BOARD_USES_PRODUCTIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_PRODUCT),product)
+BOARD_USES_PRODUCTIMAGE := true
+else ifdef BOARD_USES_PRODUCTIMAGE
+$(error TARGET_COPY_OUT_PRODUCT must be set to 'product' to use a product image)
+endif
+
 ###########################################
 # Ensure that only TARGET_RECOVERY_UPDATER_LIBS *or* AB_OTA_UPDATER is set.
 TARGET_RECOVERY_UPDATER_LIBS ?=
@@ -304,6 +339,13 @@
   $(foreach v,$(PRODUCT_EXTRA_VNDK_VERSIONS),$(call check_vndk_version,$(v)))
 endif
 
+# Ensure that BOARD_SYSTEMSDK_VERSIONS are all within PLATFORM_SYSTEMSDK_VERSIONS
+_unsupported_systemsdk_versions := $(filter-out $(PLATFORM_SYSTEMSDK_VERSIONS),$(BOARD_SYSTEMSDK_VERSIONS))
+ifneq (,$(_unsupported_systemsdk_versions))
+  $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
+          Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
+endif
+
 # ---------------------------------------------------------------
 # Set up configuration for target machine.
 # The following must be set:
@@ -618,6 +660,39 @@
 endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
 
+TARGET_OUT_PRODUCT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_product_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/system
+ifeq ($(SANITIZE_LITE),true)
+# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
+# work with unsanitized app_process. For simplicity, generate APKs into /data/asan/.
+target_out_product_app_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/product
+else
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+else
+target_out_product_shared_libraries_base := $(TARGET_OUT)
+target_out_product_app_base := $(TARGET_OUT_PRODUCT)
+endif
+
+ifeq ($(TARGET_IS_64_BIT),true)
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib64
+else
+TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+TARGET_OUT_PRODUCT_JAVA_LIBRARIES:= $(TARGET_OUT_PRODUCT)/framework
+TARGET_OUT_PRODUCT_APPS := $(target_out_product_app_base)/app
+TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(target_out_product_app_base)/priv-app
+TARGET_OUT_PRODUCT_ETC := $(TARGET_OUT_PRODUCT)/etc
+
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
+else
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_SHARED_LIBRARIES := $(target_out_product_shared_libraries_base)/lib
+endif
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS := $(TARGET_OUT_PRODUCT_APPS)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_PRODUCT_APPS_PRIVILEGED := $(TARGET_OUT_PRODUCT_APPS_PRIVILEGED)
+
 TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
 
 TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
@@ -657,13 +732,3 @@
 ifeq ($(CALLED_FROM_SETUP),true)
 PRINT_BUILD_CONFIG ?= true
 endif
-
-ifeq ($(USE_CLANG_PLATFORM_BUILD),)
-USE_CLANG_PLATFORM_BUILD := true
-endif
-
-ifneq ($(USE_CLANG_PLATFORM_BUILD),true)
-ifneq ($(USE_CLANG_PLATFORM_BUILD),false)
-$(error USE_CLANG_PLATFORM_BUILD must be true or false)
-endif
-endif
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index 0aec275..4a62fbf 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -12,7 +12,9 @@
 LOCAL_MODULE_SUFFIX := $(TARGET_EXECUTABLE_SUFFIX)
 endif
 
+ifdef target-executable-hook
 $(call target-executable-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 1ff9b91..20663d1 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,8 @@
 
 include $(BUILD_SYSTEM)/java_common.mk
 
+include $(BUILD_SYSTEM)/sdk_check.mk
+
 $(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
 
 # List of dependencies for anything that needs all java sources in place
@@ -134,7 +136,7 @@
                               $(full_static_java_libs)  | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
@@ -193,7 +195,7 @@
 
 endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
 
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
   my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
   my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 19200fd..c4f9f66 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -12,7 +12,9 @@
 LOCAL_MODULE_SUFFIX := $($(my_prefix)EXECUTABLE_SUFFIX)
 endif
 
+ifdef host-executable-hook
 $(call host-executable-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index f34f2f1..5176f37 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -58,11 +58,6 @@
 # Run build/make/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 
-# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
-ifeq ($(RUN_ERROR_PRONE),true)
-LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
-endif
-
 # List of dependencies for anything that needs all java sources in place
 java_sources_deps := \
     $(java_sources) \
@@ -99,7 +94,7 @@
                               $(full_static_java_libs) | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             -stripDir META-INF -zipToNotStrip $< $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
 # Run jarjar if necessary, otherwise just copy the file.
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 8df4b37..51e2d94 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -48,3 +48,8 @@
 
 LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
+
+# If error prone is enabled then add LOCAL_ERROR_PRONE_FLAGS to LOCAL_JAVACFLAGS
+ifeq ($(RUN_ERROR_PRONE),true)
+LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
+endif
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index 5e199cc..0a3b317 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -20,7 +20,9 @@
 $(error $(LOCAL_PATH): Cannot set module stem for a library)
 endif
 
+ifdef host-shared-library-hook
 $(call host-shared-library-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 80e0c24..ab5fd2c 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -24,16 +24,11 @@
 ifdef my_embed_jni
 # App explicitly requires the prebuilt NDK stl shared libraies.
 # The NDK stl shared libraries should never go to the system image.
-ifneq ($(filter $(LOCAL_NDK_STL_VARIANT), stlport_shared c++_shared),)
+ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
 ifndef LOCAL_SDK_VERSION
 $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
     LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
 endif
-endif
-ifeq (stlport_shared,$(LOCAL_NDK_STL_VARIANT))
-my_jni_shared_libraries += \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/stlport/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libstlport_shared.so
-else ifeq (c++_shared,$(LOCAL_NDK_STL_VARIANT))
 my_jni_shared_libraries += \
     $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
 endif
diff --git a/core/java.mk b/core/java.mk
index ee071c9..442d6cb 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -73,6 +73,7 @@
 full_classes_proguard_jar := $(intermediates.COMMON)/classes-proguard.jar
 full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
 built_dex_intermediate := $(intermediates.COMMON)/dex/classes.dex
+built_dex_hiddenapi := $(intermediates.COMMON)/dex-hiddenapi/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 java_source_list_file := $(intermediates.COMMON)/java-source-list
 
@@ -124,7 +125,7 @@
 else
   ifneq (,$(LOCAL_SDK_VERSION))
     # Set target-api for LOCAL_SDK_VERSIONs other than current.
-    ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+    ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
       renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
     endif
   endif  # LOCAL_SDK_VERSION is set
@@ -149,7 +150,7 @@
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
 
 # prepend the RenderScript system include path
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
+ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
 # if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS
 LOCAL_RENDERSCRIPT_INCLUDES := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \
@@ -265,7 +266,7 @@
 
 aidl_preprocess_import :=
 ifdef LOCAL_SDK_VERSION
-ifneq ($(filter current system_current test_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
+ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
   # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
   aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
 else
@@ -356,6 +357,8 @@
 
 include $(BUILD_SYSTEM)/java_common.mk
 
+include $(BUILD_SYSTEM)/sdk_check.mk
+
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_RS_SOURCES := $(if $(renderscript_sources),true)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RS_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/renderscript
 
@@ -516,7 +519,7 @@
                               $(full_static_java_libs) | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
 
@@ -609,7 +612,7 @@
 my_proguard_sdk_raise :=
 ifdef LOCAL_SDK_VERSION
 ifdef TARGET_BUILD_APPS
-ifeq (,$(filter current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifeq (,$(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
   my_proguard_sdk_raise := $(call java-lib-header-files, sdk_vcurrent)
 endif
 else
@@ -762,7 +765,14 @@
 endif
 endif
 
-$(built_dex): $(built_dex_intermediate)
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),) # is_boot_jar
+  $(eval $(call hiddenapi-copy-dex-files,$(built_dex_intermediate),$(built_dex_hiddenapi)))
+  built_dex_copy_from := $(built_dex_hiddenapi)
+else # !is_boot_jar
+  built_dex_copy_from := $(built_dex_intermediate)
+endif # is_boot_jar
+
+$(built_dex): $(built_dex_copy_from)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) rm -f $(dir $@)/classes*.dex
@@ -798,7 +808,7 @@
 
 endif  # full_classes_jar is defined
 
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
   my_default_app_target_sdk := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
   my_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 else
diff --git a/core/java_common.mk b/core/java_common.mk
index cfc9d7f..3a5c5c6 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -258,17 +258,15 @@
       full_java_bootclasspath_libs := $(call java-lib-header-files,android_system_stubs_current)
     else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
       full_java_bootclasspath_libs := $(call java-lib-header-files,android_test_stubs_current)
+    else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),core_current)
+      full_java_bootclasspath_libs := $(call java-lib-header-files,core.current.stubs)
     else
-      ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
-        ifeq (,$(TARGET_BUILD_APPS))
-          full_java_bootclasspath_libs := $(call java-lib-header-files,system_sdk_v$(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION)))
-        else
-          full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(LOCAL_SDK_VERSION))
-        endif
-      else
-        full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(LOCAL_SDK_VERSION))
-      endif
-    endif # current, system_current, system_${VER} or test_current
+      # core_<ver> is subset of <ver>. Instead of defining a prebuilt lib for core_<ver>,
+      # use the stub for <ver> when building for apps.
+      _version := $(patsubst core_%,%,$(LOCAL_SDK_VERSION))
+      full_java_bootclasspath_libs := $(call java-lib-header-files,sdk_v$(_version))
+      _version :=
+    endif # current, system_current, system_${VER}, test_current or core_current
   endif # LOCAL_SDK_VERSION
 
   ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
@@ -358,7 +356,10 @@
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := $(full_java_bootclasspath_libs)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EMPTY_BOOTCLASSPATH := $(empty_bootclasspath)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES := $(my_system_modules_dir)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES := $(my_system_modules)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES_DIR := $(my_system_modules_dir)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SYSTEM_MODULES_LIBS := $(call java-lib-files,$(SOONG_SYSTEM_MODULES_LIBS_$(my_system_modules)))
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PATCH_MODULE := $(LOCAL_PATCH_MODULE)
 
 ifndef LOCAL_IS_HOST_MODULE
 # This is set by packages that are linking to other packages that export
@@ -457,19 +458,23 @@
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
 else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
+my_warn_types :=
+my_allowed_types := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 my_warn_types := java:system java:platform
-my_allowed_types := java:sdk
+my_allowed_types := java:sdk java:core
 else
 my_link_type := java:platform
 my_warn_types :=
-my_allowed_types := java:sdk java:system java:platform
+my_allowed_types := java:sdk java:system java:platform java:core
 endif
 
 ifdef LOCAL_AAPT2_ONLY
diff --git a/core/java_library.mk b/core/java_library.mk
index 8cf0074..1b914f5 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -72,10 +72,10 @@
 	$(call add-dex-to-package-arg,$@.tmp)
 	$(hide) $(ZIPTIME) $@.tmp
 	$(call commit-change-for-toc,$@)
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
 	$(align-package)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_UNCOMPRESS_DEX
 
 .KATI_RESTAT: $(common_javalib.jar)
 
diff --git a/core/local_systemsdk.mk b/core/local_systemsdk.mk
new file mode 100644
index 0000000..6dab346
--- /dev/null
+++ b/core/local_systemsdk.mk
@@ -0,0 +1,56 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+ifdef BOARD_SYSTEMSDK_VERSIONS
+  # Apps and jars in vendor or odm partition are forced to build against System SDK.
+  _is_vendor_app :=
+  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+    # Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
+    # set correctly before this is included.
+    _is_vendor_app := true
+  endif
+  ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
+    ifndef LOCAL_SDK_VERSION
+      ifeq ($(_is_vendor_app),true)
+        LOCAL_SDK_VERSION := system_current
+      endif
+    endif
+  endif
+endif
+
+# Ensure that the selected System SDK version is one of the supported versions.
+# The range of support versions becomes narrower when BOARD_SYSTEMSDK_VERSIONS
+# is set, which is a subset of PLATFORM_SYSTEMSDK_VERSIONS.
+ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+  ifneq ($(_is_vendor_app),true)
+    # apps bundled in system partition can use all system sdk versions provided by the platform
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  else ifdef BOARD_SYSTEMSDK_VERSIONS
+    # When BOARD_SYSTEMSDK_VERSIONS is set, vendors apps are restricted to use those versions
+    # which is equal to or smaller than PLATFORM_SYSTEMSDK_VERSIONS
+    _supported_systemsdk_versions := $(BOARD_SYSTEMSDK_VERSIONS)
+  else
+    # If not, vendor apks are treated equally to system apps
+    _supported_systemsdk_versions := $(PLATFORM_SYSTEMSDK_VERSIONS)
+  endif
+  _system_sdk_version := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
+  ifneq ($(_system_sdk_version),$(filter $(_system_sdk_version),$(_supported_systemsdk_versions)))
+    $(call pretty-error,Incompatible LOCAL_SDK_VERSION '$(LOCAL_SDK_VERSION)'. \
+           System SDK version '$(_system_sdk_version)' is not supported. Supported versions are: $(_supported_systemsdk_versions))
+  endif
+  _system_sdk_version :=
+  _supported_systemsdk_versions :=
+endif
diff --git a/core/local_vsdk.mk b/core/local_vsdk.mk
deleted file mode 100644
index f798d47..0000000
--- a/core/local_vsdk.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-
-ifdef BOARD_VSDK_VERSION
-# Set LOCAL_SDK_VERSION to system_current, If LOCAL_SDK_VERSION is not defined and LOCAL_VENDOR_MODULE is true
-  _is_vendor_app :=
-  ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
-    _is_vendor_app := true
-  else
-    ifneq (,$(filter $(TARGET_OUT_VENDOR)%,$(LOCAL_MODULE_PATH) $(LOCAL_MODULE_PATH_32) $(LOCAL_MODULE_PATH_64)))
-      _is_vendor_app := true
-    endif
-  endif
-  ifneq (,$(filter JAVA_LIBRARIES APPS,$(LOCAL_MODULE_CLASS)))
-    ifndef LOCAL_SDK_VERSION
-      ifeq ($(_is_vendor_app),true)
-        LOCAL_SDK_VERSION := system_current
-      endif
-    endif
-  endif
-endif
diff --git a/core/main.mk b/core/main.mk
index fe178da..ef55b4e 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -60,14 +60,24 @@
 # without changing the command line every time.  Avoids rebuilds
 # when using ninja.
 $(shell mkdir -p $(OUT_DIR) && \
-    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt && \
-    echo -n $(BUILD_DATETIME) > $(OUT_DIR)/build_date.txt)
+    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+
 ifeq ($(HOST_OS),darwin)
 DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
 else
 DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
 endif
 
+# Pick a reasonable string to use to identify files.
+ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+  # BUILD_NUMBER has a timestamp in it, which means that
+  # it will change every time.  Pick a stable value.
+  FILE_NAME_TAG := eng.$(USER)
+else
+  FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
+endif
+
 # Make an empty directory, which can be used to make empty jars
 EMPTY_DIRECTORY := $(OUT_DIR)/empty
 $(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*)
@@ -187,14 +197,16 @@
 
 #
 # -----------------------------------------------------------------
-# Enable dynamic linker developer warnings for userdebug, eng
-# and non-REL builds
+# Enable dynamic linker and hidden API developer warnings for
+# userdebug, eng and non-REL builds
 ifneq ($(TARGET_BUILD_VARIANT),user)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
+                                 ro.art.hiddenapi.warning=1
 else
 # Enable it for user builds as long as they are not final.
 ifneq ($(PLATFORM_VERSION_CODENAME),REL)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
+                                 ro.art.hiddenapi.warning=1
 endif
 endif
 
@@ -205,6 +217,14 @@
 	variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
 	possible.)
 
+# Sets ro.actionable_compatible_property.enabled to know on runtime whether the whitelist
+# of actionable compatible properties is enabled or not.
+ifeq ($(PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE),true)
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=false
+else
+ADDITIONAL_DEFAULT_PROPERTIES += ro.actionable_compatible_property.enabled=${PRODUCT_COMPATIBLE_PROPERTY}
+endif
+
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -600,6 +620,31 @@
 endef
 $(call add-all-host-to-target-required-modules-deps)
 
+# Sets up dependencies such that whenever a target module is installed,
+# any host modules listed in $(ALL_MODULES.$(m).HOST_REQUIRED) will also be installed
+define add-all-target-to-host-required-modules-deps
+$(foreach m,$(ALL_MODULES), \
+  $(eval req_mods := $(ALL_MODULES.$(m).HOST_REQUIRED))\
+  $(if $(req_mods), \
+    $(eval req_files := )\
+    $(foreach req_mod,$(req_mods), \
+      $(eval req_file := $(filter $(HOST_OUT)/%, $(call module-installed-files,$(req_mod)))) \
+      $(if $(strip $(req_file)),\
+        ,\
+        $(error $(m).LOCAL_HOST_REQUIRED_MODULES : illegal value $(req_mod) : not a host module. If you want to specify target modules to be required to be installed along with your target module, add those module names to LOCAL_REQUIRED_MODULES instead)\
+      )\
+      $(eval req_files := $(req_files)$(space)$(req_file))\
+    )\
+    $(eval req_files := $(strip $(req_files)))\
+    $(eval mod_files := $(filter $(TARGET_OUT_ROOT)/%, $(call module-installed-files,$(m))))\
+    $(eval mod_files := $(filter-out $(req_files),$(mod_files)))\
+    $(if $(mod_files),\
+      $(eval $(call add-required-deps, $(mod_files),$(req_files))) \
+    )\
+  )\
+)
+endef
+$(call add-all-target-to-host-required-modules-deps)
 
 t_m :=
 h_m :=
@@ -1033,6 +1078,9 @@
 .PHONY: vendorimage
 vendorimage: $(INSTALLED_VENDORIMAGE_TARGET)
 
+.PHONY: productimage
+productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
+
 .PHONY: systemotherimage
 systemotherimage: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
 
@@ -1056,9 +1104,11 @@
 	$(INSTALLED_CACHEIMAGE_TARGET) \
 	$(INSTALLED_BPTIMAGE_TARGET) \
 	$(INSTALLED_VENDORIMAGE_TARGET) \
+	$(INSTALLED_PRODUCTIMAGE_TARGET) \
 	$(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
 	$(INSTALLED_FILES_FILE) \
 	$(INSTALLED_FILES_FILE_VENDOR) \
+	$(INSTALLED_FILES_FILE_PRODUCT) \
 	$(INSTALLED_FILES_FILE_SYSTEMOTHER) \
 	soong_docs
 
@@ -1124,6 +1174,7 @@
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
     $(INSTALLED_FILES_FILE_VENDOR) \
+    $(INSTALLED_FILES_FILE_PRODUCT) \
     $(INSTALLED_FILES_FILE_SYSTEMOTHER) \
     $(INSTALLED_BUILD_PROP_TARGET) \
     $(BUILT_TARGET_FILES_PACKAGE) \
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dcee..2256f98 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -19,9 +19,7 @@
 	boottarball-nodeps \
 	brillo_tests \
 	btnod \
-	build-art% \
 	build_kernel-nodeps \
-	clean-oat% \
 	continuous_instrumentation_tests \
 	continuous_native_tests \
 	cts \
@@ -47,11 +45,9 @@
 	systemimage-nodeps \
 	systemtarball-nodeps \
 	target-files-package \
-	test-art% \
 	user \
 	userdataimage \
 	userdebug \
-	valgrind-test-art% \
 	vts \
 	win_sdk \
 	winsdk-tools
diff --git a/core/package_internal.mk b/core/package_internal.mk
index e153a8a..cd3a741 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -111,6 +111,8 @@
         enforce_rro_enabled :=
       else ifeq (true,$(LOCAL_ODM_MODULE))
         enforce_rro_enabled :=
+      else ifeq (true,$(LOCAL_PRODUCT_MODULE))
+        enforce_rro_enabled :=
       endif
     else ifeq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
       enforce_rro_enabled :=
@@ -392,7 +394,7 @@
 else
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
 renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 endif
 endif  # LOCAL_SDK_VERSION is set
@@ -462,15 +464,16 @@
 
 endif  # need_compile_res
 
-ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-# We need to explicitly clear this var so that we don't
-# inherit the value from whomever caused us to be built.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_INCLUDES :=
-else
+framework_res_package_export :=
+framework_res_package_export_deps :=
+
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
 # Most packages should link against the resources defined by framework-res.
 # Even if they don't have their own resources, they may use framework
 # resources.
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+ifeq ($(LOCAL_SDK_RES_VERSION),core_current)
+# core_current doesn't contain any framework resources.
+else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 # for released sdk versions, the platform resources were built into android.jar.
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
@@ -484,6 +487,8 @@
 framework_res_package_export_deps := \
     $(dir $(framework_res_package_export))src/R.stamp
 endif # LOCAL_SDK_RES_VERSION
+endif # LOCAL_NO_STANDARD_LIBRARIES
+
 all_library_res_package_exports := \
     $(framework_res_package_export) \
     $(foreach lib,$(LOCAL_RES_LIBRARIES),\
@@ -500,7 +505,6 @@
 ifdef LOCAL_USE_AAPT2
 $(my_res_package) : $(all_library_res_package_export_deps)
 endif
-endif # LOCAL_NO_STANDARD_LIBRARIES
 
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
@@ -580,7 +584,7 @@
 else
 $(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_LIST := $(all_res_assets)
 $(LOCAL_BUILT_MODULE) : $(all_res_assets) $(full_android_manifest) $(AAPT) $(ZIPALIGN)
-endif
+endif  # LOCAL_USE_AAPT2
 ifdef LOCAL_COMPRESSED_MODULE
 $(LOCAL_BUILT_MODULE) : $(MINIGZIP)
 endif
@@ -605,24 +609,19 @@
 	$(call add-jar-resources-to-package,$@,$(PRIVATE_FULL_CLASSES_JAR),$(PRIVATE_RESOURCE_INTERMEDIATES_DIR))
 endif
 endif  # full_classes_jar
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	@# No need to align, sign-package below will do it.
+	$(uncompress-dexs)
+endif
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
 	$(hide) cp -f $@ $(dir $@)package.dex.apk
 endif  # BUILD_PLATFORM_ZIP
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
-	@# No need to align, sign-package below will do it.
-	$(uncompress-dexs)
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
-endif
-ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	$(uncompress-dexs)
-endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+endif  # LOCAL_DEX_PREOPT
 	$(sign-package)
 ifdef LOCAL_COMPRESSED_MODULE
 	$(compress-package)
@@ -646,6 +645,10 @@
 $(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(built_dex)
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 	$(add-dex-to-package)
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
+	$(uncompress-dexs)
+	$(align-package)
+endif
 	$(hide) mv $@ $@.input
 	$(call dexpreopt-one-file,$@.input,$@)
 	$(hide) rm $@.input
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index c2f8b0a..f29c3dc 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -44,7 +44,7 @@
 	$(PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR)
 
 PDK_PLATFORM_JAVA_ZIP_CONTENTS += $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR),\
-    $(lib_dir)/classes.jar $(lib_dir)/classes.jar.toc \
+    $(lib_dir)/classes.jar $(lib_dir)/classes-header.jar \
     $(lib_dir)/javalib.jar  $(lib_dir)/classes*.dex \
     $(lib_dir)/classes.dex.toc )
 
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index d934338..cb1d401 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -390,15 +390,9 @@
 $(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(SIGNAPK_JAR)
 	$(transform-prebuilt-to-target)
 	$(uncompress-shared-libs)
-ifneq (true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))
-ifeq (true,$(LOCAL_PRIVILEGED_MODULE))
+ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
-else
-  ifneq (,$(filter $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES), $(LOCAL_MODULE)))
-	  $(uncompress-dexs)
-  endif  # PRODUCT_LOADED_BY_PRIVILEGED_MODULES
-endif  # LOCAL_PRIVILEGED_MODULE
-endif  # DONT_UNCOMPRESS_PRIV_APPS_DEXS
+endif  # LOCAL_UNCOMPRESS_DEX
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
@@ -423,11 +417,19 @@
 endif  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
 
 ###############################
-## Rule to build the odex file
+## Rule to build the odex file.
+# In case we don't strip the built module, use it, as dexpreopt
+# can do optimizations based on whether the built module only
+# contains uncompressed dex code.
 ifdef LOCAL_DEX_PREOPT
+ifeq (nostripping,$(LOCAL_DEX_PREOPT))
+$(built_odex) : $(built_module)
+	$(call dexpreopt-one-file,$<,$@)
+else
 $(built_odex) : $(my_prebuilt_src_file)
 	$(call dexpreopt-one-file,$<,$@)
 endif
+endif
 
 ###############################
 ## Install split apks.
@@ -544,6 +546,8 @@
 my_link_type := java:system
 else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
 my_link_type := java:system
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 else
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 666a207..576d14d 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -104,6 +104,7 @@
 	$(hide) echo 'PRODUCT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@
 	$(hide) echo 'PRODUCT_SYSTEM_DEFAULT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_SYSTEM_DEFAULT_PROPERTIES)' >> $$@
+	$(hide) echo 'PRODUCT_PRODUCT_PROPERTIES=$$(PRODUCTS.$(strip $(1)).PRODUCT_PRODUCT_PROPERTIES)' >> $$@
 	$(hide) echo 'PRODUCT_CHARACTERISTICS=$$(PRODUCTS.$(strip $(1)).PRODUCT_CHARACTERISTICS)' >> $$@
 	$(hide) echo 'PRODUCT_COPY_FILES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES)' >> $$@
 	$(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_OTA_PUBLIC_KEYS)' >> $$@
diff --git a/core/product.mk b/core/product.mk
index f15f6b3..6cccebf 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -88,6 +88,7 @@
     PRODUCT_BRAND \
     PRODUCT_PROPERTY_OVERRIDES \
     PRODUCT_DEFAULT_PROPERTY_OVERRIDES \
+    PRODUCT_PRODUCT_PROPERTIES \
     PRODUCT_CHARACTERISTICS \
     PRODUCT_COPY_FILES \
     PRODUCT_OTA_PUBLIC_KEYS \
@@ -125,17 +126,21 @@
     PRODUCT_VERITY_SIGNING_KEY \
     PRODUCT_SYSTEM_VERITY_PARTITION \
     PRODUCT_VENDOR_VERITY_PARTITION \
+    PRODUCT_PRODUCT_VERITY_PARTITION \
     PRODUCT_SYSTEM_SERVER_DEBUG_INFO \
     PRODUCT_DEX_PREOPT_MODULE_CONFIGS \
+    PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER \
     PRODUCT_DEX_PREOPT_DEFAULT_FLAGS \
     PRODUCT_DEX_PREOPT_BOOT_FLAGS \
     PRODUCT_DEX_PREOPT_PROFILE_DIR \
     PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
+    PRODUCT_DEX_PREOPT_GENERATE_DM_FILES \
     PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
     PRODUCT_SYSTEM_SERVER_COMPILER_FILTER \
     PRODUCT_SANITIZER_MODULE_CONFIGS \
     PRODUCT_SYSTEM_BASE_FS_PATH \
     PRODUCT_VENDOR_BASE_FS_PATH \
+    PRODUCT_PRODUCT_BASE_FS_PATH \
     PRODUCT_SHIPPING_API_LEVEL \
     VENDOR_PRODUCT_RESTRICT_VENDOR_FILES \
     VENDOR_EXCEPTION_MODULES \
@@ -149,6 +154,8 @@
     PRODUCT_ADB_KEYS \
     PRODUCT_CFI_INCLUDE_PATHS \
     PRODUCT_CFI_EXCLUDE_PATHS \
+    PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE \
+    PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE \
 
 define dump-product
 $(info ==== $(1) ====)\
@@ -302,6 +309,8 @@
 	BOARD_FLASH_BLOCK_SIZE \
 	BOARD_VENDORIMAGE_PARTITION_SIZE \
 	BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
+	BOARD_PRODUCTIMAGE_PARTITION_SIZE \
+	BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_INSTALLER_CMDLINE \
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 5b0e257..2620adb 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -365,6 +365,13 @@
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_DEFAULT_PROPERTIES))
 .KATI_READONLY := PRODUCT_SYSTEM_DEFAULT_PROPERTIES
 
+# A list of property assignments, like "key = value", with zero or more
+# whitespace characters on either side of the '='.
+# used for adding properties to build.prop of product partition
+PRODUCT_PRODUCT_PROPERTIES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PRODUCT_PROPERTIES))
+.KATI_READONLY := PRODUCT_PRODUCT_PROPERTIES
+
 # Should we use the default resources or add any product specific overlays
 PRODUCT_PACKAGE_OVERLAYS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGE_OVERLAYS))
@@ -385,8 +392,12 @@
 PRODUCT_EXTRA_RECOVERY_KEYS := $(sort \
     $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_EXTRA_RECOVERY_KEYS))
 
+PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
 PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
+PRODUCT_DEX_PREOPT_GENERATE_DM_FILES := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
 PRODUCT_DEX_PREOPT_BOOT_FLAGS := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_FLAGS))
 PRODUCT_DEX_PREOPT_PROFILE_DIR := \
@@ -482,5 +493,13 @@
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CFI_INCLUDE_PATHS))
 
 # which Soong namespaces to export to Make
-PRODUCT_SOONG_NAMESPACES :=
+PRODUCT_SOONG_NAMESPACES := \
     $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SOONG_NAMESPACES))
+
+# A flag to override PRODUCT_COMPATIBLE_PROPERTY
+PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE))
+
+# Whether the whitelist of actionable compatible properties should be disabled or not
+PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE := \
+    $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE))
diff --git a/core/sdk_check.mk b/core/sdk_check.mk
new file mode 100644
index 0000000..c5c2bc8
--- /dev/null
+++ b/core/sdk_check.mk
@@ -0,0 +1,14 @@
+
+# Enforcement checks that LOCAL_SDK_VERSION and LOCAL_PRIVATE_PLATFORM_APIS are
+# set correctly.
+# Should be included by java targets that allow specifying LOCAL_SDK_VERSION.
+
+ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_PRIVATE_PLATFORM_APIS),)
+ifneq ($(JAVA_SDK_ENFORCEMENT_WARNING),)
+$(warning Java modules must specify LOCAL_SDK_VERSION or LOCAL_PRIVATE_PLATFORM_APIS, but $(LOCAL_MODULE) specifies neither.)
+endif
+else ifneq ($(LOCAL_SDK_VERSION),)
+ifneq ($(LOCAL_PRIVATE_PLATFORM_APIS),)
+$(error $(LOCAL_MODULE) specifies both LOCAL_SDK_VERSION ($(LOCAL_SDK_VERSION)) and LOCAL_PRIVATE_PLATFORM_APIS ($(LOCAL_PRIVATE_PLATFORM_APIS)), but should specify only one.)
+endif
+endif
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 687536b..ab887e0 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -20,7 +20,9 @@
 $(error $(LOCAL_PATH): Cannot set module stem for a library)
 endif
 
+ifdef target-shared-library-hook
 $(call target-shared-library-hook)
+endif
 
 skip_build_from_source :=
 ifdef LOCAL_PREBUILT_MODULE_FILE
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 65aabff..c553c4c 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -78,15 +78,15 @@
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 my_warn_types := java:system java:platform
-my_allowed_types := java:sdk
+my_allowed_types := java:sdk java:core
 else
 my_link_type := java:platform
 my_warn_types :=
-my_allowed_types := java:sdk java:system java:platform
+my_allowed_types := java:sdk java:system java:platform java:core
 endif
 
 my_link_deps :=
diff --git a/core/soong_config.mk b/core/soong_config.mk
index bbad4c8..8b2dfd1 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -42,6 +42,9 @@
 
 $(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
 
+$(call add_json_str,  BuildId,                           $(BUILD_ID))
+$(call add_json_str,  BuildNumberFromFile,               $$$(BUILD_NUMBER_FROM_FILE))
+
 $(call add_json_val,  Platform_sdk_version,              $(PLATFORM_SDK_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
 $(call add_json_csv,  Platform_version_future_codenames, $(PLATFORM_VERSION_FUTURE_CODENAMES))
@@ -110,10 +113,11 @@
 $(call add_json_bool, Device_uses_hwc2,                  $(filter true,$(TARGET_USES_HWC2)))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_PROJECT_SYSTEM_INCLUDES))
 $(call add_json_bool, DevicePrefer32BitExecutables,      $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)))
-$(call add_json_val,  DeviceUsesClang,                   $(if $(USE_CLANG_PLATFORM_BUILD),$(USE_CLANG_PLATFORM_BUILD),false))
 $(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
 $(call add_json_str,  Platform_vndk_version,             $(PLATFORM_VNDK_VERSION))
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
+$(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
+$(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
 $(call add_json_bool, Malloc_not_svelte,                 $(call invert_bool,$(filter true,$(MALLOC_SVELTE))))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
 
@@ -123,7 +127,7 @@
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
-$(call add_json_str,  OemPath,                           $(TARGET_COPY_OUT_OEM))
+$(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
 
 $(call add_json_bool, UseGoma,                           $(filter-out false,$(USE_GOMA)))
@@ -132,6 +136,8 @@
 
 $(call add_json_list, NamespacesToExport,                $(PRODUCT_SOONG_NAMESPACES))
 
+$(call add_json_list, PgoAdditionalProfileDirs,          $(PGO_ADDITIONAL_PROFILE_DIRS))
+
 _contents := $(subst $(comma)$(newline)__SV_END,$(newline)}$(newline),$(_contents)__SV_END)
 
 $(file >$(SOONG_VARIABLES).tmp,$(_contents))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 6cf9422..f10da32 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -23,6 +23,15 @@
 $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
 $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
 
+ifdef LOCAL_DROIDDOC_STUBS_JAR
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_JAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.jar))
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.jar
+endif
+
+ifdef LOCAL_DROIDDOC_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+endif
+
 ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
   $(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
     $(intermediates.COMMON)/jacoco-report-classes.jar))
@@ -40,7 +49,11 @@
 
 ifdef LOCAL_SOONG_DEX_JAR
   ifndef LOCAL_IS_HOST_MODULE
-    $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)  # is_boot_jar
+      $(eval $(call hiddenapi-copy-soong-jar,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    else # !is_boot_jar
+      $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
+    endif # is_boot_jar
     $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar) $(full_classes_header_jar)))
 
     dex_preopt_profile_src_file := $(common_javalib.jar)
@@ -86,19 +99,23 @@
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
 else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
 my_link_type := java:system
 my_warn_types := java:platform
-my_allowed_types := java:sdk java:system
+my_allowed_types := java:sdk java:system java:core
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
+my_warn_types :=
+my_allowed_types := java:core
 else ifneq ($(LOCAL_SDK_VERSION),)
 my_link_type := java:sdk
 my_warn_types := java:system java:platform
-my_allowed_types := java:sdk
+my_allowed_types := java:sdk java:core
 else
 my_link_type := java:platform
 my_warn_types :=
-my_allowed_types := java:sdk java:system java:platform
+my_allowed_types := java:sdk java:system java:platform java:core
 endif
 
 my_link_deps :=
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 8348349..1dc0e71 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -88,7 +88,7 @@
 
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 endif  # LOCAL_USE_AAPT2
-endif  # LOCAL_RESOURCE_DIR
+endif  # need_compile_res
 
 all_res_assets := $(all_resources)
 
@@ -119,10 +119,17 @@
 endif
 
 ifdef LOCAL_USE_AAPT2
-$(intermediates.COMMON)/export_proguard_flags: $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
+import_proguard_flag_files := $(strip $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+    $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags))
+$(intermediates.COMMON)/export_proguard_flags: $(import_proguard_flag_files) $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
 	@echo "Export proguard flags: $@"
 	rm -f $@
-	cat $+ >$@
+	touch $@
+	for f in $+; do \
+		echo -e "\n# including $$f" >>$@; \
+		cat $$f >>$@; \
+	done
+import_proguard_flag_files :=
 endif
 
 # add --non-constant-id to prevent inlining constants.
@@ -156,7 +163,7 @@
 else
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
-ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current core_current, $(LOCAL_SDK_VERSION)))
 renderscript_target_api := $(call get-numeric-sdk-version,$(LOCAL_SDK_VERSION))
 endif
 endif  # LOCAL_SDK_VERSION is set
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index d84d3d9..b5c3a7c 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -8,13 +8,9 @@
   ifndef LOCAL_SDK_VERSION
     LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
   else
-    ifneq (,$(filter c++_%,$(LOCAL_NDK_STL_VARIANT)))
-        my_ndk_gtest_suffix := _c++
-    else ifneq ($(filter stlport_,$(LOCAL_NDK_STL_VARIANT)),)
-        my_ndk_gtest_suffix := _stlport
-    else # system STL, use stlport
-        my_ndk_gtest_suffix := _stlport
-    endif
+    # TODO(danalbert): Remove the suffix from the module since we only need the
+    # one variant now.
+    my_ndk_gtest_suffix := _c++
     LOCAL_STATIC_LIBRARIES += \
         libgtest_main_ndk$(my_ndk_gtest_suffix) \
         libgtest_ndk$(my_ndk_gtest_suffix)
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 4d05237..a1151e9 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,7 +151,7 @@
 	    cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	# Generate the image.
 	$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
-	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
+	  $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
 	  build/make/tools/releasetools/build_image.py \
 	  $(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index f6bc76b..e83d6fa 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -24,9 +24,9 @@
 #     DEFAULT_APP_TARGET_SDK
 #     BUILD_ID
 #     BUILD_NUMBER
-#     BUILD_DATETIME
 #     PLATFORM_SECURITY_PATCH
 #     PLATFORM_VNDK_VERSION
+#     PLATFORM_SYSTEMSDK_VERSIONS
 #
 
 # Look for an optional file containing overrides of the defaults,
@@ -202,6 +202,32 @@
   endif
 endif
 
+ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
+  # This is the oldest version of system SDK that the platform supports. Contrary
+  # to the public SDK where platform essentially supports all previous SDK versions,
+  # platform supports only a few number of recent system SDK versions as some of
+  # old system APIs are gradually deprecated, removed and then deleted.
+  # However, currently in P, we only support the single latest version since there
+  # is no old system SDK versions. Therefore, this is set to empty for now. This
+  # should later (in post P) be set to a number, like 28.
+  PLATFORM_SYSTEMSDK_MIN_VERSION :=
+endif
+
+# This is the list of system SDK versions that the current platform supports.
+PLATFORM_SYSTEMSDK_VERSIONS :=
+ifneq (,$(PLATFORM_SYSTEMSDK_MIN_VERSION))
+  $(if $(call math_is_number,$(PLATFORM_SYSTEMSDK_MIN_VERSION)),,\
+    $(error PLATFORM_SYSTEMSDK_MIN_VERSION must be a number, but was $(PLATFORM_SYSTEMSDK_MIN_VERSION)))
+  PLATFORM_SYSTEMSDK_VERSIONS := $(call int_range_list,$(PLATFORM_SYSTEMSDK_MIN_VERSION),$(PLATFORM_SDK_VERSION))
+endif
+# Platform always supports the current version
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
+else
+  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_VERSION_CODENAME)
+endif
+PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
+
 ifndef PLATFORM_SECURITY_PATCH
     #  Used to indicate the security patch that has been applied to the device.
     #  It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
@@ -240,6 +266,12 @@
 DATE := date -d @$(BUILD_DATETIME)
 endif
 
+# Everything should be using BUILD_DATETIME_FROM_FILE instead.
+# BUILD_DATETIME and DATE can be removed once BUILD_NUMBER moves
+# to soong_ui.
+BUILD_DATETIME :=
+
+HAS_BUILD_NUMBER := true
 ifndef BUILD_NUMBER
   # BUILD_NUMBER should be set to the source control value that
   # represents the current state of the source code.  E.g., a
@@ -251,4 +283,5 @@
   # from this date/time" value.  Make it start with a non-digit so that
   # anyone trying to parse it as an integer will probably get "0".
   BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+  HAS_BUILD_NUMBER := false
 endif
diff --git a/envsetup.sh b/envsetup.sh
index 394df65..cf61950 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
              Selects <product_name> as the product to build, and <build_variant> as the variant to
              build, and stores those selections in the environment to be read by subsequent
              invocations of 'm' etc.
-- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 - croot:     Changes directory to the top of the tree.
 - m:         Makes from the top of the tree.
 - mm:        Builds all of the modules in the current directory, but not their dependencies.
@@ -51,7 +51,7 @@
     cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
     # Call the build system to dump the "<val>=<value>" pairs as a shell script.
-    build_dicts_script=`\cd $T; build/soong/soong_ui.bash --dumpvars-mode \
+    build_dicts_script=`\builtin cd $T; build/soong/soong_ui.bash --dumpvars-mode \
                         --vars="$cached_vars" \
                         --abs-vars="$cached_abs_vars" \
                         --var-prefix=var_cache_ \
@@ -661,10 +661,10 @@
 function tapas()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/tapasHelp.sh
@@ -688,7 +688,6 @@
     case $arch in
       x86)    product=aosp_x86;;
       mips)   product=aosp_mips;;
-      armv5)  product=generic_armv5;;
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
       mips64)  product=aosp_mips64;;
@@ -742,33 +741,11 @@
     fi
 }
 
-# Return driver for "make", if any (eg. static analyzer)
-function getdriver()
-{
-    local T="$1"
-    test "$WITH_STATIC_ANALYZER" = "0" && unset WITH_STATIC_ANALYZER
-    if [ -n "$WITH_STATIC_ANALYZER" ]; then
-        # Use scan-build to collect all static analyzer reports into directory
-        # /tmp/scan-build-yyyy-mm-dd-hhmmss-*
-        # The clang compiler passed by --use-analyzer here is not important.
-        # build/make/core/binary.mk will set CLANG_CXX and CLANG before calling
-        # c++-analyzer and ccc-analyzer.
-        local CLANG_VERSION=$(get_build_var LLVM_PREBUILTS_VERSION)
-        local BUILD_OS=$(get_build_var BUILD_OS)
-        local CLANG_DIR="$T/prebuilts/clang/host/${BUILD_OS}-x86/${CLANG_VERSION}"
-        echo "\
-${CLANG_DIR}/tools/scan-build/bin/scan-build \
---use-analyzer ${CLANG_DIR}/bin/clang \
---status-bugs"
-    fi
-}
-
 function m()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     if [ "$T" ]; then
-        _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return 1
@@ -795,11 +772,10 @@
 function mm()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     # If we're sitting in the root of the build tree, just do a
     # normal build.
     if [ -f build/soong/soong_ui.bash ]; then
-        _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+        _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
     else
         # Find the closest Android.mk file.
         local M=$(findmakefile)
@@ -834,7 +810,7 @@
             if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
               MODULES=tidy_only
             fi
-            ONE_SHOT_MAKEFILE=$M _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $MODULES $ARGS
+            ONE_SHOT_MAKEFILE=$M _wrap_build $T/build/soong/soong_ui.bash --make-mode $MODULES $ARGS
         fi
     fi
 }
@@ -842,7 +818,6 @@
 function mmm()
 {
     local T=$(gettop)
-    local DRV=$(getdriver $T)
     if [ "$T" ]; then
         local MAKEFILE=
         local MODULES=
@@ -902,7 +877,7 @@
         fi
         # Convert "/" to "-".
         MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-        ONE_SHOT_MAKEFILE="$MAKEFILE" _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $MODULES $MODULES_IN_PATHS $ARGS
+        ONE_SHOT_MAKEFILE="$MAKEFILE" _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $MODULES $MODULES_IN_PATHS $ARGS
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
         return 1
@@ -912,9 +887,8 @@
 function mma()
 {
   local T=$(gettop)
-  local DRV=$(getdriver $T)
   if [ -f build/soong/soong_ui.bash ]; then
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@
   else
     if [ ! "$T" ]; then
       echo "Couldn't locate the top of the tree.  Try setting TOP."
@@ -926,14 +900,13 @@
     local MODULES_IN_PATHS=MODULES-IN-$(dirname ${M})
     # Convert "/" to "-".
     MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $@ $MODULES_IN_PATHS
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $@ $MODULES_IN_PATHS
   fi
 }
 
 function mmma()
 {
   local T=$(gettop)
-  local DRV=$(getdriver $T)
   if [ "$T" ]; then
     local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/')
     local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/')
@@ -964,7 +937,7 @@
     done
     # Convert "/" to "-".
     MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
-    _wrap_build $DRV $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $ARGS $MODULES_IN_PATHS
+    _wrap_build $T/build/soong/soong_ui.bash --make-mode $DASH_ARGS $ARGS $MODULES_IN_PATHS
   else
     echo "Couldn't locate the top of the tree.  Try setting TOP."
     return 1
diff --git a/help.sh b/help.sh
index 3f39c77..c143542 100755
--- a/help.sh
+++ b/help.sh
@@ -38,6 +38,8 @@
                             Stands for "System, NO Dependencies"
     vnod                    Quickly rebuild the vendor image from built packages
                             Stands for "Vendor, NO Dependencies"
+    pnod                    Quickly rebuild the product image from built packages
+                            Stands for "Product, NO Dependencies"
 
 
 So, for example, you could run:
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 058ac1d..38b3e34 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
 cd ../..
 TOP="${PWD}"
 
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
 
 tapas selects individual apps to be built by the Android build system. Unlike
 "lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index f4d6b93..7fe45eb 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -37,7 +37,7 @@
 LOCAL_MODULE        := device_manifest.xml
 LOCAL_MODULE_STEM   := manifest.xml
 LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
+LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
 
 GEN := $(local-generated-sources-dir)/manifest.xml
 $(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
@@ -52,107 +52,3 @@
 include $(BUILD_PREBUILT)
 BUILT_VENDOR_MANIFEST := $(LOCAL_BUILT_MODULE)
 endif
-
-# Device Compatibility Matrix
-ifdef DEVICE_MATRIX_FILE
-include $(CLEAR_VARS)
-LOCAL_MODULE        := device_compatibility_matrix.xml
-LOCAL_MODULE_STEM   := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-$(GEN): $(DEVICE_MATRIX_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37342627): put BOARD_VNDK_VERSION & BOARD_VNDK_LIBRARIES into device matrix.
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -i $< -o $@
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_VENDOR_MATRIX := $(LOCAL_BUILT_MODULE)
-endif
-
-# Framework Manifest
-include $(CLEAR_VARS)
-LOCAL_MODULE        := framework_manifest.xml
-LOCAL_MODULE_STEM   := manifest.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/manifest.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
-ifdef BUILT_VENDOR_MATRIX
-$(GEN): $(BUILT_VENDOR_MATRIX)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MATRIX)"
-endif
-endif
-
-$(GEN): PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES := $(FRAMEWORK_MANIFEST_INPUT_FILES)
-$(GEN): $(FRAMEWORK_MANIFEST_INPUT_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) $(HOST_OUT_EXECUTABLES)/assemble_vintf \
-		-i $(call normalize-path-list,$(PRIVATE_FRAMEWORK_MANIFEST_INPUT_FILES)) \
-		-o $@ $(PRIVATE_FLAGS)
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_MANIFEST := $(LOCAL_BUILT_MODULE)
-
-# Framework Compatibility Matrix
-include $(CLEAR_VARS)
-LOCAL_MODULE        := framework_compatibility_matrix.xml
-LOCAL_MODULE_STEM   := compatibility_matrix.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT)
-
-GEN := $(local-generated-sources-dir)/compatibility_matrix.xml
-
-$(GEN): PRIVATE_FLAGS :=
-
-ifdef BUILT_VENDOR_MANIFEST
-$(GEN): $(BUILT_VENDOR_MANIFEST)
-$(GEN): PRIVATE_FLAGS += -c "$(BUILT_VENDOR_MANIFEST)"
-endif
-
-ifeq (true,$(BOARD_AVB_ENABLE))
-$(GEN): $(AVBTOOL)
-# INTERNAL_AVB_SYSTEM_SIGNING_ARGS consists of BOARD_AVB_SYSTEM_KEY_PATH and
-# BOARD_AVB_SYSTEM_ALGORITHM. We should add the dependency of key path, which
-# is a file, here.
-$(GEN): $(BOARD_AVB_SYSTEM_KEY_PATH)
-# Use deferred assignment (=) instead of immediate assignment (:=).
-# Otherwise, cannot get INTERNAL_AVB_SYSTEM_SIGNING_ARGS.
-FRAMEWORK_VBMETA_VERSION = $$("$(AVBTOOL)" add_hashtree_footer \
-                              --print_required_libavb_version \
-                              $(INTERNAL_AVB_SYSTEM_SIGNING_ARGS) \
-                              $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
-else
-FRAMEWORK_VBMETA_VERSION := 0.0
-endif
-
-# All kernel versions that the system image works with.
-KERNEL_VERSIONS := 3.18 4.4 4.9
-KERNEL_CONFIG_DATA := kernel/configs
-
-$(GEN): $(foreach version,$(KERNEL_VERSIONS),\
-	$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg))
-$(GEN): PRIVATE_FLAGS += $(foreach version,$(KERNEL_VERSIONS),\
-	--kernel=$(version):$(call normalize-path-list,\
-		$(wildcard $(KERNEL_CONFIG_DATA)/android-$(version)/android-base*.cfg)))
-
-KERNEL_VERSIONS :=
-KERNEL_CONFIG_DATA :=
-
-$(GEN): $(FRAMEWORK_COMPATIBILITY_MATRIX_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# TODO(b/37405869) (b/37715375) inject avb versions as well for devices that have avb enabled.
-	POLICYVERS=$(POLICYVERS) \
-		BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
-		FRAMEWORK_VBMETA_VERSION=$(FRAMEWORK_VBMETA_VERSION) \
-		PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-		$(HOST_OUT_EXECUTABLES)/assemble_vintf \
-		-i $(call normalize-path-list,$(FRAMEWORK_COMPATIBILITY_MATRIX_FILES)) \
-		-o $@ $(PRIVATE_FLAGS)
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-BUILT_SYSTEM_COMPATIBILITY_MATRIX := $(LOCAL_BUILT_MODULE)
diff --git a/target/board/generic/sepolicy/bootanim.te b/target/board/generic/sepolicy/bootanim.te
index b23e1ca..e4f7c73 100644
--- a/target/board/generic/sepolicy/bootanim.te
+++ b/target/board/generic/sepolicy/bootanim.te
@@ -3,7 +3,6 @@
 #TODO: This can safely be ignored until b/62954877 is fixed
 dontaudit bootanim system_data_file:dir read;
 
-allow bootanim vendor_file:file { execute getattr open read };
 allow bootanim graphics_device:chr_file { read ioctl open };
 
 set_prop(bootanim, qemu_prop)
diff --git a/target/board/generic/sepolicy/genfs_contexts b/target/board/generic/sepolicy/genfs_contexts
index bdcead1..91cedf1 100644
--- a/target/board/generic/sepolicy/genfs_contexts
+++ b/target/board/generic/sepolicy/genfs_contexts
@@ -2,3 +2,16 @@
 # /sys/bus/platform/devices/ANDR0001:00/properties/android/ which is a symlink to
 # /sys/devices/platform/ANDR0001:00/properties/android/
 genfscon sysfs /devices/platform/ANDR0001:00/properties/android u:object_r:sysfs_dt_firmware_android:s0
+
+# We expect /sys/class/power_supply/* and everything it links to to be labeled
+# as sysfs_batteryinfo.
+genfscon sysfs /devices/platform/GFSH0001:00/power_supply u:object_r:sysfs_batteryinfo:s0
+
+# /sys/class/rtc
+genfscon sysfs /devices/pnp0/00:00/rtc u:object_r:sysfs_rtc:s0
+genfscon sysfs /devices/platform/GFSH0007:00/rtc u:object_r:sysfs_rtc:s0
+
+# /sys/class/net
+genfscon sysfs /devices/pci0000:00/0000:00:08.0/virtio5/net u:object_r:sysfs_net:s0
+genfscon sysfs /devices/virtual/mac80211_hwsim/hwsim0/net u:object_r:sysfs_net:s0
+genfscon sysfs /devices/virtual/mac80211_hwsim/hwsim1/net u:object_r:sysfs_net:s0
diff --git a/target/board/generic/sepolicy/hal_fingerprint_default.te b/target/board/generic/sepolicy/hal_fingerprint_default.te
new file mode 100644
index 0000000..e5b06f1
--- /dev/null
+++ b/target/board/generic/sepolicy/hal_fingerprint_default.te
@@ -0,0 +1,5 @@
+# TODO(b/36644492): Remove data_between_core_and_vendor_violators once
+# hal_fingerprint no longer directly accesses fingerprintd_data_file.
+typeattribute hal_fingerprint_default data_between_core_and_vendor_violators;
+allow hal_fingerprint_default fingerprintd_data_file:file create_file_perms;
+allow hal_fingerprint_default fingerprintd_data_file:dir rw_dir_perms;
diff --git a/target/board/generic/sepolicy/healthd.te b/target/board/generic/sepolicy/healthd.te
new file mode 100644
index 0000000..ced6704
--- /dev/null
+++ b/target/board/generic/sepolicy/healthd.te
@@ -0,0 +1,2 @@
+# Allow to read /sys/class/power_supply directory
+allow healthd sysfs:dir r_dir_perms;
diff --git a/target/board/generic/sepolicy/property.te b/target/board/generic/sepolicy/property.te
index a486702..56e02ef 100644
--- a/target/board/generic/sepolicy/property.te
+++ b/target/board/generic/sepolicy/property.te
@@ -1,4 +1,3 @@
 type qemu_prop, property_type;
 type qemu_cmdline, property_type;
 type radio_noril_prop, property_type;
-type opengles_prop, property_type;
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index c66a85f..3a61b6b 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -3,4 +3,3 @@
 ro.emu.                 u:object_r:qemu_prop:s0
 ro.emulator.            u:object_r:qemu_prop:s0
 ro.radio.noril          u:object_r:radio_noril_prop:s0
-ro.opengles.            u:object_r:opengles_prop:s0
diff --git a/target/board/generic/sepolicy/system_server.te b/target/board/generic/sepolicy/system_server.te
index 9063095..dd70b12 100644
--- a/target/board/generic/sepolicy/system_server.te
+++ b/target/board/generic/sepolicy/system_server.te
@@ -1,2 +1 @@
-get_prop(system_server, opengles_prop)
 get_prop(system_server, radio_noril_prop)
diff --git a/target/board/generic_arm64_a/BoardConfig.mk b/target/board/generic_arm64_a/BoardConfig.mk
index 8f4043f..34a8ac0 100644
--- a/target/board/generic_arm64_a/BoardConfig.mk
+++ b/target/board/generic_arm64_a/BoardConfig.mk
@@ -23,7 +23,7 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index e0d7372..00afee6 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -23,10 +23,10 @@
 TARGET_CPU_VARIANT := generic
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_ARCH_VARIANT := armv8-a
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
-TARGET_2ND_CPU_VARIANT := cortex-a15
+TARGET_2ND_CPU_VARIANT := generic
 
 # Enable A/B update
 TARGET_NO_RECOVERY := true
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 011bcdf..b21e907 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -28,3 +28,11 @@
 # Enable A/B update
 TARGET_NO_RECOVERY := true
 BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+
+# TODO(jiyong) These might be SoC specific.
+BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
+BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+
+# TODO(b/36764215): remove this setting when the generic system image
+# no longer has QCOM-specific directories under /.
+BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/generic_armv5/AndroidBoard.mk b/target/board/generic_armv5/AndroidBoard.mk
deleted file mode 100644
index 7daff27..0000000
--- a/target/board/generic_armv5/AndroidBoard.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
--include build/target/board/generic/AndroidBoard.mk
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_armv5/BoardConfig.mk
deleted file mode 100644
index 016937a..0000000
--- a/target/board/generic_armv5/BoardConfig.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/BoardConfig.mk
-
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
-
-WITH_DEXPREOPT := false
diff --git a/target/board/generic_armv5/README.txt b/target/board/generic_armv5/README.txt
deleted file mode 100644
index 25d590a..0000000
--- a/target/board/generic_armv5/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-The "generic_armv5" product defines a non-hardware-specific target
-without a kernel or bootloader.
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_armv5/device.mk b/target/board/generic_armv5/device.mk
deleted file mode 100644
index 7c4aaf2..0000000
--- a/target/board/generic_armv5/device.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/device.mk
diff --git a/target/board/generic_armv5/system.prop b/target/board/generic_armv5/system.prop
deleted file mode 100644
index 137a0f9..0000000
--- a/target/board/generic_armv5/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/system/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index f8fb88f..a73a31b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -11,6 +11,10 @@
 TARGET_ARCH_VARIANT := x86
 TARGET_PRELINK_MODULE := false
 
+#emulator now uses 64bit kernel to run 32bit x86 image
+#
+TARGET_USES_64_BIT_BINDER := true
+
 # The IA emulator (qemu) uses the Goldfish devices
 HAVE_HTC_AUDIO_DRIVER := true
 BOARD_USES_GENERIC_AUDIO := true
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
index b4777b6..a8c9bc5 100644
--- a/target/board/treble_common.mk
+++ b/target/board/treble_common.mk
@@ -36,18 +36,6 @@
 # Generic AOSP image always requires separate vendor.img
 TARGET_COPY_OUT_VENDOR := vendor
 
-# Enable dex pre-opt to speed up initial boot
-ifeq ($(HOST_OS),linux)
-  ifeq ($(WITH_DEXPREOPT),)
-    WITH_DEXPREOPT := true
-    WITH_DEXPREOPT_PIC := true
-    ifneq ($(TARGET_BUILD_VARIANT),user)
-      # Retain classes.dex in APK's for non-user builds
-      DEX_PREOPT_DEFAULT := nostripping
-    endif
-  endif
-endif
-
 # Generic AOSP image does NOT support HWC1
 TARGET_USES_HWC2 := true
 # Set emulator framebuffer display device buffer count to 3
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 9e2adee..85330b3 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -36,7 +36,6 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/full.mk \
-    $(LOCAL_DIR)/generic_armv5.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
     $(LOCAL_DIR)/full_x86.mk \
     $(LOCAL_DIR)/aosp_mips.mk \
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 03203ce..811c330 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 include $(SRC_TARGET_DIR)/product/full_x86.mk
 
diff --git a/target/product/base.mk b/target/product/base.mk
index c3eb3b2..14ff1c2 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -31,6 +31,7 @@
     bit \
     blkid \
     bmgr \
+    bpfloader \
     bugreport \
     bugreportz \
     cameraserver \
@@ -114,6 +115,7 @@
     mtpd \
     ndc \
     netd \
+    perfetto \
     ping \
     ping6 \
     platform.xml \
@@ -132,6 +134,8 @@
     svc \
     tc \
     telecom \
+    traced \
+    traced_probes \
     vdc \
     vold \
     wm
diff --git a/target/product/core.mk b/target/product/core.mk
index cab8d97..bbc2b75 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -56,6 +56,7 @@
     StorageManager \
     Telecom \
     TeleService \
+    Traceur \
     VpnDialogs \
     vr \
     MmsService
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
new file mode 100644
index 0000000..72d30f5
--- /dev/null
+++ b/target/product/core_64_bit_only.mk
@@ -0,0 +1,30 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit from this product for devices that support only 64-bit apps using:
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+# The inheritance for this must come before the inheritance chain that leads
+# to core_minimal.mk.
+
+# Copy the 64-bit zygote startup script
+PRODUCT_COPY_FILES += system/core/rootdir/init.zygote64.rc:root/init.zygote64.rc
+
+# Set the zygote property to select the 64-bit script.
+# This line must be parsed before the one in core_minimal.mk
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += ro.zygote=zygote64
+
+TARGET_SUPPORTS_32_BIT_APPS := false
+TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index b252349..16599cb 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -85,6 +85,7 @@
     telephony-common \
     uiautomator \
     uncrypt \
+    vndk_snapshot_package \
     voip-common \
     webview \
     webview_zygote \
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 20f0ebf..3f1d6df 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -20,11 +20,13 @@
 PRODUCT_PACKAGES += \
     adb \
     adbd \
+    usbd \
     android.hardware.configstore@1.0-service \
     android.hidl.allocator@1.0-service \
     android.hidl.memory@1.0-impl \
     android.hidl.memory@1.0-impl.vendor \
     atrace \
+    blank_screen \
     bootanimation \
     bootstat \
     charger \
@@ -49,6 +51,7 @@
     libbinder \
     libc \
     libc_malloc_debug \
+    libc_malloc_hooks \
     libcutils \
     libdl \
     libgui \
diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk
index 375c679..af4097d 100644
--- a/target/product/full_base_telephony.mk
+++ b/target/product/full_base_telephony.mk
@@ -24,7 +24,7 @@
     ro.com.android.dataroaming=true
 
 PRODUCT_COPY_FILES := \
-    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+    device/sample/etc/apns-full-conf.xml:system/etc/apns-conf.xml \
     frameworks/native/data/etc/handheld_core_hardware.xml:$(TARGET_COPY_OUT_VENDOR)/etc/permissions/handheld_core_hardware.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
diff --git a/target/product/generic_armv5.mk b/target/product/generic_armv5.mk
deleted file mode 100644
index daa321a..0000000
--- a/target/product/generic_armv5.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic product that isn't specialized for a specific device.
-# It includes the base Android platform.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic.mk)
-
-# Overrides
-PRODUCT_BRAND := generic_armv5
-PRODUCT_DEVICE := generic_armv5
-PRODUCT_NAME := generic_armv5
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 6e7038e..f9030cf 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -56,6 +56,9 @@
 PRODUCT_PACKAGES += \
     cacerts \
 
+PRODUCT_PACKAGES += \
+    hiddenapi-package-whitelist.xml \
+
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     dalvik.vm.image-dex2oat-Xms=64m \
     dalvik.vm.image-dex2oat-Xmx=64m \
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 1e82773..b9820d3 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
 PRODUCT_COPY_FILES += \
     development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
     device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
-    prebuilts/qemu-kernel/x86/3.18/kernel-qemu2:kernel-ranchu
+    prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index 5880bf8..e9a97cd 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -70,12 +70,15 @@
 PRODUCT_COPY_FILES += \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
 
-#GSI support for the devices that disable VNDK enforcing
+# Support for the devices with no VNDK enforcing
 PRODUCT_COPY_FILES += \
-    system/core/rootdir/etc/ld.config.txt:system/etc/ld.config.noenforce.txt \
     build/make/target/product/vndk/init.gsi.rc:system/etc/init/init.gsi.rc \
     build/make/target/product/vndk/init.noenforce.rc:system/etc/init/gsi/init.noenforce.rc
 
-#Set current VNDK version for GSI
+# Name space configuration file for non-enforcing VNDK
+PRODUCT_PACKAGES += \
+    ld.config.noenforce.txt
+
+# Set current VNDK version for GSI
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     ro.gsi.vndk.version=$(PLATFORM_VNDK_VERSION)
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index a134d02..93aaf37 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -77,26 +77,19 @@
 	@chmod a+x $@
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_current
+LOCAL_MODULE := vndk_package
 LOCAL_REQUIRED_MODULES := \
     $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES)) \
     $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
     $(LLNDK_LIBRARIES) \
     llndk.libraries.txt \
     vndksp.libraries.txt
-
 include $(BUILD_PHONY_PACKAGE)
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := vndk_package
-ifeq (current,$(BOARD_VNDK_VERSION))
+LOCAL_MODULE := vndk_snapshot_package
 LOCAL_REQUIRED_MODULES := \
-    vndk_current
-else
-LOCAL_REQUIRED_MODULES := \
-    vndk_v$(BOARD_VNDK_VERSION)_$(TARGET_ARCH)
-endif
-LOCAL_REQUIRED_MODULES += \
     $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
 include $(BUILD_PHONY_PACKAGE)
+
 endif # BOARD_VNDK_VERSION is set
diff --git a/tools/adbs b/tools/adbs
deleted file mode 100755
index 8d73630..0000000
--- a/tools/adbs
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import os.path
-import re
-import string
-import sys
-
-sys.path.insert(0, os.path.dirname(__file__) + "/../../../development/scripts")
-import stack_core
-import symbol
-
-if __name__ == '__main__':
-  # pass the options to adb
-  adb_cmd  = "adb " + ' '.join(sys.argv[1:])
-
-  # create tracer for line parsing
-  tracer = stack_core.TraceConverter()
-
-  # invoke the adb command and filter its output
-  stream = os.popen(adb_cmd)
-  while (True):
-    line = stream.readline()
-    if (line == ''):
-      break
-    if(tracer.ProcessLine(line) == False):
-      print(line.strip())
-      sys.stdout.flush()
-
-  # adb itself aborts
-  stream.close()
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index da4443c..c7c5bdc 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -70,8 +70,9 @@
     label = module
   runner = instrumentation.attributes[ATTRIBUTE_RUNNER].value
   package = manifest.attributes[ATTRIBUTE_PACKAGE].value
-  test_type = ('AndroidJUnitTest' if runner.endswith('.AndroidJUnitRunner')
-               else 'InstrumentationTest')
+  test_type = ('InstrumentationTest'
+               if runner.endswith('.InstrumentationTestRunner')
+               else 'AndroidJUnitTest')
 
   with open(instrumentation_test_config_template) as template:
     config = template.read()
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
index e70eff8..e68c27f 100644
--- a/tools/auto_gen_test_config_test.py
+++ b/tools/auto_gen_test_config_test.py
@@ -155,7 +155,7 @@
       self.assertEqual(config_file.read(), EXPECTED_JUNIT_TEST_CONFIG)
 
   def testCreateInstrumentationTestConfig(self):
-    """Test creating test config for AndroidJUnitTest.
+    """Test creating test config for InstrumentationTest.
     """
     with open(self.manifest_file, 'w') as f:
       f.write(MANIFEST_INSTRUMENTATION_TEST)
diff --git a/tools/docker/.gitignore b/tools/docker/.gitignore
new file mode 100644
index 0000000..df0b367
--- /dev/null
+++ b/tools/docker/.gitignore
@@ -0,0 +1 @@
+gitconfig
diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile
new file mode 100644
index 0000000..ec65aaf
--- /dev/null
+++ b/tools/docker/Dockerfile
@@ -0,0 +1,25 @@
+FROM ubuntu:14.04
+ARG userid
+ARG groupid
+ARG username
+
+RUN apt-get update && apt-get install -y git-core gnupg flex bison gperf build-essential zip curl zlib1g-dev gcc-multilib g++-multilib libc6-dev-i386 lib32ncurses5-dev x11proto-core-dev libx11-dev lib32z-dev ccache libgl1-mesa-dev libxml2-utils xsltproc unzip python openjdk-7-jdk
+
+RUN curl -o jdk8.tgz https://android.googlesource.com/platform/prebuilts/jdk/jdk8/+archive/master.tar.gz \
+ && tar -zxf jdk8.tgz linux-x86 \
+ && mv linux-x86 /usr/lib/jvm/java-8-openjdk-amd64 \
+ && rm -rf jdk8.tgz
+
+RUN curl -o /usr/local/bin/repo https://storage.googleapis.com/git-repo-downloads/repo \
+ && echo "e147f0392686c40cfd7d5e6f332c6ee74c4eab4d24e2694b3b0a0c037bf51dc5  /usr/local/bin/repo" | sha256sum --strict -c - \
+ && chmod a+x /usr/local/bin/repo
+
+RUN groupadd -g $groupid $username \
+ && useradd -m -u $userid -g $groupid $username \
+ && echo $username >/root/username \
+ && echo "export USER="$username >>/home/$username/.gitconfig
+COPY gitconfig /home/$username/.gitconfig
+RUN chown $userid:$groupid /home/$username/.gitconfig
+ENV HOME=/home/$username
+
+ENTRYPOINT chroot --userspec=$(cat /root/username):$(cat /root/username) / /bin/bash -i
diff --git a/tools/docker/README.md b/tools/docker/README.md
new file mode 100644
index 0000000..304fd18
--- /dev/null
+++ b/tools/docker/README.md
@@ -0,0 +1,18 @@
+The Dockerfile in this directory sets up an Ubuntu Trusty image ready to build
+a variety of Android branches (>= Lollipop). It's particulary useful to build
+older branches that required 14.04 if you've upgraded to something newer.
+
+First, build the image:
+```
+# Copy your host gitconfig, or create a stripped down version
+$ cp ~/.gitconfig gitconfig
+$ docker build --build-arg userid=$(id -u) --build-arg groupid=$(id -g) --build-arg username=$(id -un) -t android-build-trusty .
+```
+
+Then you can start up new instances with:
+```
+$ docker run -it --rm -v $ANDROID_BUILD_TOP:/src android-build-trusty
+> cd /src; source build/envsetup.sh
+> lunch aosp_arm-eng
+> m -j50
+```
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 3773d38..1247896 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -261,6 +261,7 @@
 
 LOCAL_MODULE := passwd
 LOCAL_MODULE_CLASS := ETC
+LOCAL_VENDOR_MODULE := true
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
@@ -279,6 +280,7 @@
 
 LOCAL_MODULE := group
 LOCAL_MODULE_CLASS := ETC
+LOCAL_VENDOR_MODULE := true
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index c8d1dd3..d51d075 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -146,18 +146,27 @@
             found (str): The file found in, not required to be specified.
 
         Raises:
+            ValueError: if the friendly name is longer than 31 characters as
+                that is bionic's internal buffer size for name.
             ValueError: if value is not a valid string number as processed by
                 int(x, 0)
         """
         self.identifier = identifier
         self.value = value
         self.found = found
-        self.normalized_value = str(int(value, 0))
+        try:
+            self.normalized_value = str(int(value, 0))
+        except ValueException:
+            raise ValueError('Invalid "value", not aid number, got: \"%s\"' % value)
 
         # Where we calculate the friendly name
         friendly = identifier[len(AID.PREFIX):].lower()
         self.friendly = AID._fixup_friendly(friendly)
 
+        if len(self.friendly) > 31:
+            raise ValueError('AID names must be under 32 characters "%s"' % self.friendly)
+
+
     def __eq__(self, other):
 
         return self.identifier == other.identifier \
@@ -639,10 +648,8 @@
 
         try:
             aid = AID(section_name, value, file_name)
-        except ValueError:
-            sys.exit(
-                error_message('Invalid "value", not aid number, got: \"%s\"' %
-                              value))
+        except ValueError as exception:
+            sys.exit(error_message(exception))
 
         # Values must be within OEM range
         if not Utils.in_any_range(int(aid.value, 0), self._oem_ranges):
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 9601d88..bbb5abd 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -46,13 +46,11 @@
 from __future__ import print_function
 
 import datetime
-import hashlib
 import os
 import shlex
 import shutil
 import subprocess
 import sys
-import tempfile
 import uuid
 import zipfile
 
@@ -75,6 +73,10 @@
 OPTIONS.is_signing = False
 
 
+# Partitions that should have their care_map added to META/care_map.txt.
+PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product')
+
+
 class OutputFile(object):
   def __init__(self, output_zip, input_dir, prefix, name):
     self._output_zip = output_zip
@@ -94,13 +96,19 @@
 
 
 def GetCareMap(which, imgname):
-  """Generate care_map of system (or vendor) partition"""
+  """Returns the care_map string for the given partition.
 
-  assert which in ("system", "vendor")
+  Args:
+    which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
+    imgname: The filename of the image.
+
+  Returns:
+    (which, care_map_ranges): care_map_ranges is the raw string of the care_map
+    RangeSet.
+  """
+  assert which in PARTITIONS_WITH_CARE_MAP
 
   simg = sparse_img.SparseImage(imgname)
-  care_map_list = [which]
-
   care_map_ranges = simg.care_map
   key = which + "_adjusted_partition_size"
   adjusted_blocks = OPTIONS.info_dict.get(key)
@@ -109,8 +117,7 @@
     care_map_ranges = care_map_ranges.intersect(rangelib.RangeSet(
         "0-%d" % (adjusted_blocks,)))
 
-  care_map_list.append(care_map_ranges.to_string_raw())
-  return care_map_list
+  return [which, care_map_ranges.to_string_raw()]
 
 
 def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
@@ -173,6 +180,20 @@
   return img.name
 
 
+def AddProduct(output_zip, prefix="IMAGES/"):
+  """Turn the contents of PRODUCT into a product image and store it in output_zip."""
+
+  img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.img")
+  if os.path.exists(img.input_name):
+    print("product.img already exists in %s, no need to rebuild..." % (prefix,))
+    return img.input_name
+
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.map")
+  CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "product", img,
+              block_list=block_list)
+  return img.name
+
+
 def AddDtbo(output_zip, prefix="IMAGES/"):
   """Adds the DTBO image.
 
@@ -462,6 +483,126 @@
   img.Write()
 
 
+def AddRadioImagesForAbOta(output_zip, ab_partitions):
+  """Adds the radio images needed for A/B OTA to the output file.
+
+  It parses the list of A/B partitions, looks for the missing ones from RADIO/
+  or VENDOR_IMAGES/ dirs, and copies them to IMAGES/ of the output file (or
+  dir).
+
+  It also ensures that on returning from the function all the listed A/B
+  partitions must have their images available under IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+
+  Raises:
+    AssertionError: If it can't find an image.
+  """
+  for partition in ab_partitions:
+    img_name = partition.strip() + ".img"
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    if os.path.exists(img_radio_path):
+      if output_zip:
+        common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+      else:
+        shutil.copy(img_radio_path, prebuilt_path)
+      continue
+
+    # Walk through VENDOR_IMAGES/ since files could be under subdirs.
+    img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
+    for root, _, files in os.walk(img_vendor_dir):
+      if img_name in files:
+        if output_zip:
+          common.ZipWrite(output_zip, os.path.join(root, img_name),
+                          "IMAGES/" + img_name)
+        else:
+          shutil.copy(os.path.join(root, img_name), prebuilt_path)
+        break
+
+    # Assert that the image is present under IMAGES/ now.
+    if output_zip:
+      # Zip spec says: All slashes MUST be forward slashes.
+      img_path = 'IMAGES/' + img_name
+      assert img_path in output_zip.namelist(), "cannot find " + img_name
+    else:
+      img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+      assert os.path.exists(img_path), "cannot find " + img_name
+
+
+def AddCareMapTxtForAbOta(output_zip, ab_partitions, image_paths):
+  """Generates and adds care_map.txt for system and vendor partitions.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    ab_partitions: The list of A/B partitions.
+    image_paths: A map from the partition name to the image path.
+  """
+  care_map_list = []
+  for partition in ab_partitions:
+    partition = partition.strip()
+    if partition not in PARTITIONS_WITH_CARE_MAP:
+      continue
+
+    verity_block_device = "{}_verity_block_device".format(partition)
+    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+    if (verity_block_device in OPTIONS.info_dict or
+        OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+      image_path = image_paths[partition]
+      assert os.path.exists(image_path)
+      care_map_list += GetCareMap(partition, image_path)
+
+  if care_map_list:
+    care_map_path = "META/care_map.txt"
+    if output_zip and care_map_path not in output_zip.namelist():
+      common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
+    else:
+      with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
+        fp.write('\n'.join(care_map_list))
+      if output_zip:
+        OPTIONS.replace_updated_files_list.append(care_map_path)
+
+
+def AddPackRadioImages(output_zip, images):
+  """Copies images listed in META/pack_radioimages.txt from RADIO/ to IMAGES/.
+
+  Args:
+    output_zip: The output zip file (needs to be already open), or None to
+        write images to OPTIONS.input_tmp/.
+    images: A list of image names.
+
+  Raises:
+    AssertionError: If a listed image can't be found.
+  """
+  for image in images:
+    img_name = image.strip()
+    _, ext = os.path.splitext(img_name)
+    if not ext:
+      img_name += ".img"
+
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
+    if os.path.exists(prebuilt_path):
+      print("%s already exists, no need to overwrite..." % (img_name,))
+      continue
+
+    img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+    assert os.path.exists(img_radio_path), \
+        "Failed to find %s at %s" % (img_name, img_radio_path)
+
+    if output_zip:
+      common.ZipWrite(output_zip, img_radio_path, "IMAGES/" + img_name)
+    else:
+      shutil.copy(img_radio_path, prebuilt_path)
+
+
 def ReplaceUpdatedFiles(zip_filename, files_list):
   """Updates all the ZIP entries listed in files_list.
 
@@ -502,13 +643,16 @@
       print("target_files appears to already contain images.")
       sys.exit(1)
 
-  # vendor.img is unlike system.img or system_other.img. Because it could be
-  # built from source, or dropped into target_files.zip as a prebuilt blob. We
-  # consider either of them as vendor.img being available, which could be used
-  # when generating vbmeta.img for AVB.
+  # {vendor,product}.img is unlike system.img or system_other.img. Because it could
+  # be built from source, or dropped into target_files.zip as a prebuilt blob.
+  # We consider either of them as {vendor,product}.img being available, which could
+  # be used when generating vbmeta.img for AVB.
   has_vendor = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) or
                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
                                             "vendor.img")))
+  has_product = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "PRODUCT")) or
+                 os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
+                                             "product.img")))
   has_system_other = os.path.isdir(os.path.join(OPTIONS.input_tmp,
                                                 "SYSTEM_OTHER"))
 
@@ -532,17 +676,6 @@
 
   has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true")
 
-  if OPTIONS.info_dict.get("avb_enable") == "true":
-    fp = None
-    if "build.prop" in OPTIONS.info_dict:
-      build_prop = OPTIONS.info_dict["build.prop"]
-      if "ro.build.fingerprint" in build_prop:
-        fp = build_prop["ro.build.fingerprint"]
-      elif "ro.build.thumbprint" in build_prop:
-        fp = build_prop["ro.build.thumbprint"]
-    if fp:
-      OPTIONS.info_dict["avb_salt"] = hashlib.sha256(fp).hexdigest()
-
   # A map between partition names and their paths, which could be used when
   # generating AVB vbmeta image.
   partitions = dict()
@@ -589,12 +722,16 @@
           recovery_two_step_image.AddToZip(output_zip)
 
   banner("system")
-  partitions['system'] = system_img_path = AddSystem(
+  partitions['system'] = AddSystem(
       output_zip, recovery_img=recovery_image, boot_img=boot_image)
 
   if has_vendor:
     banner("vendor")
-    partitions['vendor'] = vendor_img_path = AddVendor(output_zip)
+    partitions['vendor'] = AddVendor(output_zip)
+
+  if has_product:
+    banner("product")
+    partitions['product'] = AddProduct(output_zip)
 
   if has_system_other:
     banner("system_other")
@@ -618,95 +755,28 @@
     banner("vbmeta")
     AddVBMeta(output_zip, partitions)
 
-  # For devices using A/B update, copy over images from RADIO/ and/or
-  # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
-  # images ready under IMAGES/. All images should have '.img' as extension.
   banner("radio")
-  ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
-  if os.path.exists(ab_partitions):
-    with open(ab_partitions, 'r') as f:
-      lines = f.readlines()
-    # For devices using A/B update, generate care_map for system and vendor
-    # partitions (if present), then write this file to target_files package.
-    care_map_list = []
-    for line in lines:
-      if line.strip() == "system" and (
-          "system_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_system_hashtree_enable") == "true"):
-        assert os.path.exists(system_img_path)
-        care_map_list += GetCareMap("system", system_img_path)
-      if line.strip() == "vendor" and (
-          "vendor_verity_block_device" in OPTIONS.info_dict or
-          OPTIONS.info_dict.get("avb_vendor_hashtree_enable") == "true"):
-        assert os.path.exists(vendor_img_path)
-        care_map_list += GetCareMap("vendor", vendor_img_path)
+  ab_partitions_txt = os.path.join(OPTIONS.input_tmp, "META",
+                                   "ab_partitions.txt")
+  if os.path.exists(ab_partitions_txt):
+    with open(ab_partitions_txt, 'r') as f:
+      ab_partitions = f.readlines()
 
-      img_name = line.strip() + ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
+    # For devices using A/B update, copy over images from RADIO/ and/or
+    # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
+    # images ready under IMAGES/. All images should have '.img' as extension.
+    AddRadioImagesForAbOta(output_zip, ab_partitions)
 
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      if os.path.exists(img_radio_path):
-        if output_zip:
-          common.ZipWrite(output_zip, img_radio_path,
-                          os.path.join("IMAGES", img_name))
-        else:
-          shutil.copy(img_radio_path, prebuilt_path)
-      else:
-        img_vendor_dir = os.path.join(OPTIONS.input_tmp, "VENDOR_IMAGES")
-        for root, _, files in os.walk(img_vendor_dir):
-          if img_name in files:
-            if output_zip:
-              common.ZipWrite(output_zip, os.path.join(root, img_name),
-                              os.path.join("IMAGES", img_name))
-            else:
-              shutil.copy(os.path.join(root, img_name), prebuilt_path)
-            break
-
-      if output_zip:
-        # Zip spec says: All slashes MUST be forward slashes.
-        img_path = 'IMAGES/' + img_name
-        assert img_path in output_zip.namelist(), "cannot find " + img_name
-      else:
-        img_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-        assert os.path.exists(img_path), "cannot find " + img_name
-
-    if care_map_list:
-      care_map_path = "META/care_map.txt"
-      if output_zip and care_map_path not in output_zip.namelist():
-        common.ZipWriteStr(output_zip, care_map_path, '\n'.join(care_map_list))
-      else:
-        with open(os.path.join(OPTIONS.input_tmp, care_map_path), 'w') as fp:
-          fp.write('\n'.join(care_map_list))
-        if output_zip:
-          OPTIONS.replace_updated_files_list.append(care_map_path)
+    # Generate care_map.txt for system and vendor partitions (if present), then
+    # write this file to target_files package.
+    AddCareMapTxtForAbOta(output_zip, ab_partitions, partitions)
 
   # Radio images that need to be packed into IMAGES/, and product-img.zip.
-  pack_radioimages = os.path.join(
+  pack_radioimages_txt = os.path.join(
       OPTIONS.input_tmp, "META", "pack_radioimages.txt")
-  if os.path.exists(pack_radioimages):
-    with open(pack_radioimages, 'r') as f:
-      lines = f.readlines()
-    for line in lines:
-      img_name = line.strip()
-      _, ext = os.path.splitext(img_name)
-      if not ext:
-        img_name += ".img"
-      prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", img_name)
-      if os.path.exists(prebuilt_path):
-        print("%s already exists, no need to overwrite..." % (img_name,))
-        continue
-
-      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
-      assert os.path.exists(img_radio_path), \
-          "Failed to find %s at %s" % (img_name, img_radio_path)
-      if output_zip:
-        common.ZipWrite(output_zip, img_radio_path,
-                        os.path.join("IMAGES", img_name))
-      else:
-        shutil.copy(img_radio_path, prebuilt_path)
+  if os.path.exists(pack_radioimages_txt):
+    with open(pack_radioimages_txt, 'r') as f:
+      AddPackRadioImages(output_zip, f.readlines())
 
   if output_zip:
     common.ZipClose(output_zip)
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 69750b2..24c5b2d 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -15,7 +15,6 @@
 from __future__ import print_function
 
 import array
-import common
 import copy
 import functools
 import heapq
@@ -27,9 +26,10 @@
 import subprocess
 import sys
 import threading
-
 from collections import deque, OrderedDict
 from hashlib import sha1
+
+import common
 from rangelib import RangeSet
 
 
@@ -191,8 +191,6 @@
     self.tgt_sha1 = tgt_sha1
     self.src_sha1 = src_sha1
     self.style = style
-    self.intact = (getattr(tgt_ranges, "monotonic", False) and
-                   getattr(src_ranges, "monotonic", False))
 
     # We use OrderedDict rather than dict so that the output is repeatable;
     # otherwise it would depend on the hash values of the Transfer objects.
@@ -258,6 +256,72 @@
     return self.score <= other.score
 
 
+class ImgdiffStats(object):
+  """A class that collects imgdiff stats.
+
+  It keeps track of the files that will be applied imgdiff while generating
+  BlockImageDiff. It also logs the ones that cannot use imgdiff, with specific
+  reasons. The stats is only meaningful when imgdiff not being disabled by the
+  caller of BlockImageDiff. In addition, only files with supported types
+  (BlockImageDiff.FileTypeSupportedByImgdiff()) are allowed to be logged.
+  """
+
+  USED_IMGDIFF = "APK files diff'd with imgdiff"
+  USED_IMGDIFF_LARGE_APK = "Large APK files split and diff'd with imgdiff"
+
+  # Reasons for not applying imgdiff on APKs.
+  SKIPPED_TRIMMED = "Not used imgdiff due to trimmed RangeSet"
+  SKIPPED_NONMONOTONIC = "Not used imgdiff due to having non-monotonic ranges"
+  SKIPPED_SHARED_BLOCKS = "Not used imgdiff due to using shared blocks"
+  SKIPPED_INCOMPLETE = "Not used imgdiff due to incomplete RangeSet"
+
+  # The list of valid reasons, which will also be the dumped order in a report.
+  REASONS = (
+      USED_IMGDIFF,
+      USED_IMGDIFF_LARGE_APK,
+      SKIPPED_TRIMMED,
+      SKIPPED_NONMONOTONIC,
+      SKIPPED_SHARED_BLOCKS,
+      SKIPPED_INCOMPLETE,
+  )
+
+  def  __init__(self):
+    self.stats = {}
+
+  def Log(self, filename, reason):
+    """Logs why imgdiff can or cannot be applied to the given filename.
+
+    Args:
+      filename: The filename string.
+      reason: One of the reason constants listed in REASONS.
+
+    Raises:
+      AssertionError: On unsupported filetypes or invalid reason.
+    """
+    assert BlockImageDiff.FileTypeSupportedByImgdiff(filename)
+    assert reason in self.REASONS
+
+    if reason not in self.stats:
+      self.stats[reason] = set()
+    self.stats[reason].add(filename)
+
+  def Report(self):
+    """Prints a report of the collected imgdiff stats."""
+
+    def print_header(header, separator):
+      print(header)
+      print(separator * len(header) + '\n')
+
+    print_header('  Imgdiff Stats Report  ', '=')
+    for key in self.REASONS:
+      if key not in self.stats:
+        continue
+      values = self.stats[key]
+      section_header = ' {} (count: {}) '.format(key, len(values))
+      print_header(section_header, '-')
+      print(''.join(['  {}\n'.format(name) for name in values]))
+
+
 # BlockImageDiff works on two image objects.  An image object is
 # anything that provides the following attributes:
 #
@@ -313,6 +377,7 @@
     self.touched_src_ranges = RangeSet()
     self.touched_src_sha1 = None
     self.disable_imgdiff = disable_imgdiff
+    self.imgdiff_stats = ImgdiffStats() if not disable_imgdiff else None
 
     assert version in (3, 4)
 
@@ -334,6 +399,65 @@
   def max_stashed_size(self):
     return self._max_stashed_size
 
+  @staticmethod
+  def FileTypeSupportedByImgdiff(filename):
+    """Returns whether the file type is supported by imgdiff."""
+    return filename.lower().endswith(('.apk', '.jar', '.zip'))
+
+  def CanUseImgdiff(self, name, tgt_ranges, src_ranges, large_apk=False):
+    """Checks whether we can apply imgdiff for the given RangeSets.
+
+    For files in ZIP format (e.g., APKs, JARs, etc.) we would like to use
+    'imgdiff -z' if possible. Because it usually produces significantly smaller
+    patches than bsdiff.
+
+    This is permissible if all of the following conditions hold.
+      - The imgdiff hasn't been disabled by the caller (e.g. squashfs);
+      - The file type is supported by imgdiff;
+      - The source and target blocks are monotonic (i.e. the data is stored with
+        blocks in increasing order);
+      - Both files don't contain shared blocks;
+      - Both files have complete lists of blocks;
+      - We haven't removed any blocks from the source set.
+
+    If all these conditions are satisfied, concatenating all the blocks in the
+    RangeSet in order will produce a valid ZIP file (plus possibly extra zeros
+    in the last block). imgdiff is fine with extra zeros at the end of the file.
+
+    Args:
+      name: The filename to be diff'd.
+      tgt_ranges: The target RangeSet.
+      src_ranges: The source RangeSet.
+      large_apk: Whether this is to split a large APK.
+
+    Returns:
+      A boolean result.
+    """
+    if self.disable_imgdiff or not self.FileTypeSupportedByImgdiff(name):
+      return False
+
+    if not tgt_ranges.monotonic or not src_ranges.monotonic:
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_NONMONOTONIC)
+      return False
+
+    if (tgt_ranges.extra.get('uses_shared_blocks') or
+        src_ranges.extra.get('uses_shared_blocks')):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_SHARED_BLOCKS)
+      return False
+
+    if tgt_ranges.extra.get('incomplete') or src_ranges.extra.get('incomplete'):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_INCOMPLETE)
+      return False
+
+    if tgt_ranges.extra.get('trimmed') or src_ranges.extra.get('trimmed'):
+      self.imgdiff_stats.Log(name, ImgdiffStats.SKIPPED_TRIMMED)
+      return False
+
+    reason = (ImgdiffStats.USED_IMGDIFF_LARGE_APK if large_apk
+              else ImgdiffStats.USED_IMGDIFF)
+    self.imgdiff_stats.Log(name, reason)
+    return True
+
   def Compute(self, prefix):
     # When looking for a source file to use as the diff input for a
     # target file, we try:
@@ -362,10 +486,15 @@
 
     # Double-check our work.
     self.AssertSequenceGood()
+    self.AssertSha1Good()
 
     self.ComputePatches(prefix)
     self.WriteTransfers(prefix)
 
+    # Report the imgdiff stats.
+    if common.OPTIONS.verbose and not self.disable_imgdiff:
+      self.imgdiff_stats.Report()
+
   def WriteTransfers(self, prefix):
     def WriteSplitTransfers(out, style, target_blocks):
       """Limit the size of operand in command 'new' and 'zero' to 1024 blocks.
@@ -418,7 +547,7 @@
       #   <# blocks> - <stash refs...>
 
       size = xf.src_ranges.size()
-      src_str = [str(size)]
+      src_str_buffer = [str(size)]
 
       unstashed_src_ranges = xf.src_ranges
       mapped_stashes = []
@@ -428,7 +557,7 @@
         sr = xf.src_ranges.map_within(sr)
         mapped_stashes.append(sr)
         assert sh in stashes
-        src_str.append("%s:%s" % (sh, sr.to_string_raw()))
+        src_str_buffer.append("%s:%s" % (sh, sr.to_string_raw()))
         stashes[sh] -= 1
         if stashes[sh] == 0:
           free_string.append("free %s\n" % (sh,))
@@ -436,17 +565,17 @@
           stashes.pop(sh)
 
       if unstashed_src_ranges:
-        src_str.insert(1, unstashed_src_ranges.to_string_raw())
+        src_str_buffer.insert(1, unstashed_src_ranges.to_string_raw())
         if xf.use_stash:
           mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges)
-          src_str.insert(2, mapped_unstashed.to_string_raw())
+          src_str_buffer.insert(2, mapped_unstashed.to_string_raw())
           mapped_stashes.append(mapped_unstashed)
           self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
       else:
-        src_str.insert(1, "-")
+        src_str_buffer.insert(1, "-")
         self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
 
-      src_str = " ".join(src_str)
+      src_str = " ".join(src_str_buffer)
 
       # version 3+:
       #   zero <rangeset>
@@ -567,11 +696,11 @@
       max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
       print("max stashed blocks: %d  (%d bytes), "
             "limit: %d bytes (%.2f%%)\n" % (
-            max_stashed_blocks, self._max_stashed_size, max_allowed,
-            self._max_stashed_size * 100.0 / max_allowed))
+                max_stashed_blocks, self._max_stashed_size, max_allowed,
+                self._max_stashed_size * 100.0 / max_allowed))
     else:
       print("max stashed blocks: %d  (%d bytes), limit: <unknown>\n" % (
-            max_stashed_blocks, self._max_stashed_size))
+          max_stashed_blocks, self._max_stashed_size))
 
   def ReviseStashSize(self):
     print("Revising stash size...")
@@ -711,28 +840,13 @@
               # transfer is intact.
               assert not self.disable_imgdiff
               imgdiff = True
-              if not xf.intact:
+              if (xf.src_ranges.extra.get('trimmed') or
+                  xf.tgt_ranges.extra.get('trimmed')):
                 imgdiff = False
                 xf.patch = None
             else:
-              # For files in zip format (eg, APKs, JARs, etc.) we would
-              # like to use imgdiff -z if possible (because it usually
-              # produces significantly smaller patches than bsdiff).
-              # This is permissible if:
-              #
-              #  - imgdiff is not disabled, and
-              #  - the source and target files are monotonic (ie, the
-              #    data is stored with blocks in increasing order), and
-              #  - we haven't removed any blocks from the source set.
-              #
-              # If these conditions are satisfied then appending all the
-              # blocks in the set together in order will produce a valid
-              # zip file (plus possibly extra zeros in the last block),
-              # which is what imgdiff needs to operate.  (imgdiff is
-              # fine with extra zeros at the end of the file.)
-              imgdiff = (not self.disable_imgdiff and xf.intact and
-                         xf.tgt_name.split(".")[-1].lower()
-                         in ("apk", "jar", "zip"))
+              imgdiff = self.CanUseImgdiff(
+                  xf.tgt_name, xf.tgt_ranges, xf.src_ranges)
             xf.style = "imgdiff" if imgdiff else "bsdiff"
             diff_queue.append((index, imgdiff, patch_num))
             patch_num += 1
@@ -749,10 +863,6 @@
       diff_total = len(diff_queue)
       patches = [None] * diff_total
       error_messages = []
-      warning_messages = []
-      if sys.stdout.isatty():
-        global diff_done
-        diff_done = 0
 
       # Using multiprocessing doesn't give additional benefits, due to the
       # pattern of the code. The diffing work is done by subprocess.call, which
@@ -768,22 +878,27 @@
             if not diff_queue:
               return
             xf_index, imgdiff, patch_index = diff_queue.pop()
+            xf = self.transfers[xf_index]
 
-          xf = self.transfers[xf_index]
+            if sys.stdout.isatty():
+              diff_left = len(diff_queue)
+              progress = (diff_total - diff_left) * 100 / diff_total
+              # '\033[K' is to clear to EOL.
+              print(' [%3d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
+              sys.stdout.flush()
+
           patch = xf.patch
           if not patch:
             src_ranges = xf.src_ranges
             tgt_ranges = xf.tgt_ranges
 
-            # Needs lock since WriteRangeDataToFd() is stateful (calling seek).
-            with lock:
-              src_file = common.MakeTempFile(prefix="src-")
-              with open(src_file, "wb") as fd:
-                self.src.WriteRangeDataToFd(src_ranges, fd)
+            src_file = common.MakeTempFile(prefix="src-")
+            with open(src_file, "wb") as fd:
+              self.src.WriteRangeDataToFd(src_ranges, fd)
 
-              tgt_file = common.MakeTempFile(prefix="tgt-")
-              with open(tgt_file, "wb") as fd:
-                self.tgt.WriteRangeDataToFd(tgt_ranges, fd)
+            tgt_file = common.MakeTempFile(prefix="tgt-")
+            with open(tgt_file, "wb") as fd:
+              self.tgt.WriteRangeDataToFd(tgt_ranges, fd)
 
             message = []
             try:
@@ -791,40 +906,16 @@
             except ValueError as e:
               message.append(
                   "Failed to generate %s for %s: tgt=%s, src=%s:\n%s" % (
-                  "imgdiff" if imgdiff else "bsdiff",
-                  xf.tgt_name if xf.tgt_name == xf.src_name else
+                      "imgdiff" if imgdiff else "bsdiff",
+                      xf.tgt_name if xf.tgt_name == xf.src_name else
                       xf.tgt_name + " (from " + xf.src_name + ")",
-                  xf.tgt_ranges, xf.src_ranges, e.message))
-              # TODO(b/68016761): Better handle the holes in mke2fs created
-              # images.
-              if imgdiff:
-                try:
-                  patch = compute_patch(src_file, tgt_file, imgdiff=False)
-                  message.append(
-                      "Fell back and generated with bsdiff instead for %s" % (
-                      xf.tgt_name,))
-                  xf.style = "bsdiff"
-                  with lock:
-                    warning_messages.extend(message)
-                  del message[:]
-                except ValueError as e:
-                  message.append(
-                      "Also failed to generate with bsdiff for %s:\n%s" % (
-                      xf.tgt_name, e.message))
-
+                      xf.tgt_ranges, xf.src_ranges, e.message))
             if message:
               with lock:
                 error_messages.extend(message)
 
           with lock:
             patches[patch_index] = (xf_index, patch)
-            if sys.stdout.isatty():
-              global diff_done
-              diff_done += 1
-              progress = diff_done * 100 / diff_total
-              # '\033[K' is to clear to EOL.
-              print(' [%d%%] %s\033[K' % (progress, xf.tgt_name), end='\r')
-              sys.stdout.flush()
 
       threads = [threading.Thread(target=diff_worker)
                  for _ in range(self.threads)]
@@ -836,11 +927,6 @@
       if sys.stdout.isatty():
         print('\n')
 
-      if warning_messages:
-        print('WARNING:')
-        print('\n'.join(warning_messages))
-        print('\n\n\n')
-
       if error_messages:
         print('ERROR:')
         print('\n'.join(error_messages))
@@ -861,11 +947,26 @@
         if common.OPTIONS.verbose:
           tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize
           print("%10d %10d (%6.2f%%) %7s %s %s %s" % (
-                xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
-                xf.style,
-                xf.tgt_name if xf.tgt_name == xf.src_name else (
-                    xf.tgt_name + " (from " + xf.src_name + ")"),
-                xf.tgt_ranges, xf.src_ranges))
+              xf.patch_len, tgt_size, xf.patch_len * 100.0 / tgt_size,
+              xf.style,
+              xf.tgt_name if xf.tgt_name == xf.src_name else (
+                  xf.tgt_name + " (from " + xf.src_name + ")"),
+              xf.tgt_ranges, xf.src_ranges))
+
+  def AssertSha1Good(self):
+    """Check the SHA-1 of the src & tgt blocks in the transfer list.
+
+    Double check the SHA-1 value to avoid the issue in b/71908713, where
+    SparseImage.RangeSha1() messed up with the hash calculation in multi-thread
+    environment. That specific problem has been fixed by protecting the
+    underlying generator function 'SparseImage._GetRangeData()' with lock.
+    """
+    for xf in self.transfers:
+      tgt_sha1 = self.tgt.RangeSha1(xf.tgt_ranges)
+      assert xf.tgt_sha1 == tgt_sha1
+      if xf.style == "diff":
+        src_sha1 = self.src.RangeSha1(xf.src_ranges)
+        assert xf.src_sha1 == src_sha1
 
   def AssertSequenceGood(self):
     # Simulate the sequences of transfers we will output, and check that:
@@ -964,7 +1065,7 @@
           out_of_order += 1
           assert xf.src_ranges.overlaps(u.tgt_ranges)
           xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
-          xf.intact = False
+          xf.src_ranges.extra['trimmed'] = True
 
       if xf.style == "diff" and not xf.src_ranges:
         # nothing left to diff from; treat as new data
@@ -1083,7 +1184,8 @@
       while sinks:
         new_sinks = OrderedDict()
         for u in sinks:
-          if u not in G: continue
+          if u not in G:
+            continue
           s2.appendleft(u)
           del G[u]
           for iu in u.incoming:
@@ -1096,7 +1198,8 @@
       while sources:
         new_sources = OrderedDict()
         for u in sources:
-          if u not in G: continue
+          if u not in G:
+            continue
           s1.append(u)
           del G[u]
           for iu in u.outgoing:
@@ -1105,7 +1208,8 @@
               new_sources[iu] = None
         sources = new_sources
 
-      if not G: break
+      if not G:
+        break
 
       # Find the "best" vertex to put next.  "Best" is the one that
       # maximizes the net difference in source blocks saved we get by
@@ -1162,14 +1266,16 @@
       intersections = OrderedDict()
       for s, e in a.tgt_ranges:
         for i in range(s, e):
-          if i >= len(source_ranges): break
+          if i >= len(source_ranges):
+            break
           # Add all the Transfers in source_ranges[i] to the (ordered) set.
           if source_ranges[i] is not None:
             for j in source_ranges[i]:
               intersections[j] = None
 
       for b in intersections:
-        if a is b: continue
+        if a is b:
+          continue
 
         # If the blocks written by A are read by B, then B needs to go before A.
         i = a.tgt_ranges.intersect(b.src_ranges)
@@ -1248,11 +1354,12 @@
                  style, by_id)
         return
 
-      if tgt_name.split(".")[-1].lower() in ("apk", "jar", "zip"):
-        split_enable = (not self.disable_imgdiff and src_ranges.monotonic and
-                        tgt_ranges.monotonic)
-        if split_enable and (self.tgt.RangeSha1(tgt_ranges) !=
-                             self.src.RangeSha1(src_ranges)):
+      # Split large APKs with imgdiff, if possible. We're intentionally checking
+      # file types one more time (CanUseImgdiff() checks that as well), before
+      # calling the costly RangeSha1()s.
+      if (self.FileTypeSupportedByImgdiff(tgt_name) and
+          self.tgt.RangeSha1(tgt_ranges) != self.src.RangeSha1(src_ranges)):
+        if self.CanUseImgdiff(tgt_name, tgt_ranges, src_ranges, True):
           large_apks.append((tgt_name, src_name, tgt_ranges, src_ranges))
           return
 
@@ -1305,8 +1412,9 @@
 
         if tgt_changed < tgt_size * crop_threshold:
           assert tgt_changed + tgt_skipped.size() == tgt_size
-          print('%10d %10d (%6.2f%%) %s' % (tgt_skipped.size(), tgt_size,
-                tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
+          print('%10d %10d (%6.2f%%) %s' % (
+              tgt_skipped.size(), tgt_size,
+              tgt_skipped.size() * 100.0 / tgt_size, tgt_name))
           AddSplitTransfers(
               "%s-skipped" % (tgt_name,),
               "%s-skipped" % (src_name,),
@@ -1385,8 +1493,8 @@
       assert patch_start == patch_size
       return split_info_list
 
-    def AddSplitTransferForLargeApks():
-      """Create split transfers for large apk files.
+    def SplitLargeApks():
+      """Split the large apks files.
 
       Example: Chrome.apk will be split into
         src-0: Chrome.apk-0, tgt-0: Chrome.apk-0
@@ -1399,11 +1507,6 @@
       be valid because the block ranges of src-X & tgt-X will always stay the
       same afterwards; but there's a chance we don't use the patch if we
       convert the "diff" command into "new" or "move" later.
-
-      The split will be attempted by calling imgdiff, which expects the input
-      files to be valid zip archives. If imgdiff fails for some reason (i.e.
-      holes in the APK file), we will fall back to split the failed APKs into
-      fixed size chunks.
       """
 
       while True:
@@ -1414,11 +1517,10 @@
 
         src_file = common.MakeTempFile(prefix="src-")
         tgt_file = common.MakeTempFile(prefix="tgt-")
-        with transfer_lock:
-          with open(src_file, "wb") as src_fd:
-            self.src.WriteRangeDataToFd(src_ranges, src_fd)
-          with open(tgt_file, "wb") as tgt_fd:
-            self.tgt.WriteRangeDataToFd(tgt_ranges, tgt_fd)
+        with open(src_file, "wb") as src_fd:
+          self.src.WriteRangeDataToFd(src_ranges, src_fd)
+        with open(tgt_file, "wb") as tgt_fd:
+          self.tgt.WriteRangeDataToFd(tgt_ranges, tgt_fd)
 
         patch_file = common.MakeTempFile(prefix="patch-")
         patch_info_file = common.MakeTempFile(prefix="split_info-")
@@ -1426,16 +1528,11 @@
                "--block-limit={}".format(max_blocks_per_transfer),
                "--split-info=" + patch_info_file,
                src_file, tgt_file, patch_file]
-        p = common.Run(cmd, stdout=subprocess.PIPE)
-        p.communicate()
-        if p.returncode != 0:
-          print("Failed to create patch between {} and {},"
-                " falling back to bsdiff".format(src_name, tgt_name))
-          with transfer_lock:
-            AddSplitTransfersWithFixedSizeChunks(tgt_name, src_name,
-                                                 tgt_ranges, src_ranges,
-                                                 "diff", self.transfers)
-          continue
+        p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        imgdiff_output, _ = p.communicate()
+        assert p.returncode == 0, \
+            "Failed to create imgdiff patch between {} and {}:\n{}".format(
+                src_name, tgt_name, imgdiff_output)
 
         with open(patch_info_file) as patch_info:
           lines = patch_info.readlines()
@@ -1445,29 +1542,29 @@
                                                     tgt_ranges, src_ranges,
                                                     lines)
         for index, (patch_start, patch_length, split_tgt_ranges,
-            split_src_ranges) in enumerate(split_info_list):
+                    split_src_ranges) in enumerate(split_info_list):
           with open(patch_file) as f:
             f.seek(patch_start)
             patch_content = f.read(patch_length)
 
           split_src_name = "{}-{}".format(src_name, index)
           split_tgt_name = "{}-{}".format(tgt_name, index)
-          transfer_split = Transfer(split_tgt_name, split_src_name,
-                                    split_tgt_ranges, split_src_ranges,
-                                    self.tgt.RangeSha1(split_tgt_ranges),
-                                    self.src.RangeSha1(split_src_ranges),
-                                    "diff", self.transfers)
-          transfer_split.patch = patch_content
+          split_large_apks.append((split_tgt_name,
+                                   split_src_name,
+                                   split_tgt_ranges,
+                                   split_src_ranges,
+                                   patch_content))
 
     print("Finding transfers...")
 
     large_apks = []
+    split_large_apks = []
     cache_size = common.OPTIONS.cache_size
     split_threshold = 0.125
     max_blocks_per_transfer = int(cache_size * split_threshold /
                                   self.tgt.blocksize)
     empty = RangeSet()
-    for tgt_fn, tgt_ranges in self.tgt.file_map.items():
+    for tgt_fn, tgt_ranges in sorted(self.tgt.file_map.items()):
       if tgt_fn == "__ZERO":
         # the special "__ZERO" domain is all the blocks not contained
         # in any file and that are filled with zeros.  We have a
@@ -1511,13 +1608,23 @@
       AddTransfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
 
     transfer_lock = threading.Lock()
-    threads = [threading.Thread(target=AddSplitTransferForLargeApks)
+    threads = [threading.Thread(target=SplitLargeApks)
                for _ in range(self.threads)]
     for th in threads:
       th.start()
     while threads:
       threads.pop().join()
 
+    # Sort the split transfers for large apks to generate a determinate package.
+    split_large_apks.sort()
+    for (tgt_name, src_name, tgt_ranges, src_ranges,
+         patch) in split_large_apks:
+      transfer_split = Transfer(tgt_name, src_name, tgt_ranges, src_ranges,
+                                self.tgt.RangeSha1(tgt_ranges),
+                                self.src.RangeSha1(src_ranges),
+                                "diff", self.transfers)
+      transfer_split.patch = patch
+
   def AbbreviateSourceNames(self):
     for k in self.src.file_map.keys():
       b = os.path.basename(k)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index ed60188..123ec7c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -325,8 +325,10 @@
     else:
       return True, unsparse_image_path
   inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
-  (_, exit_code) = RunCommand(inflate_command)
+  (inflate_output, exit_code) = RunCommand(inflate_command)
   if exit_code != 0:
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        inflate_command, exit_code, inflate_output))
     os.remove(unsparse_image_path)
     return False, None
   return True, unsparse_image_path
@@ -553,6 +555,8 @@
         build_command.extend(["-U", prop_dict["uuid"]])
       if "hash_seed" in prop_dict:
         build_command.extend(["-S", prop_dict["hash_seed"]])
+    if "ext4_share_dup_blocks" in prop_dict:
+      build_command.append("-c")
     if "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
   elif fs_type.startswith("squash"):
@@ -569,12 +573,12 @@
       build_command.extend(["-c", prop_dict["selinux_fc"]])
     if "block_list" in prop_dict:
       build_command.extend(["-B", prop_dict["block_list"]])
+    if "squashfs_block_size" in prop_dict:
+      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
     if "squashfs_compressor" in prop_dict:
       build_command.extend(["-z", prop_dict["squashfs_compressor"]])
     if "squashfs_compressor_opt" in prop_dict:
       build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
-    if "squashfs_block_size" in prop_dict:
-      build_command.extend(["-b", prop_dict["squashfs_block_size"]])
     if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
@@ -607,7 +611,8 @@
 
   (mkfs_output, exit_code) = RunCommand(build_command)
   if exit_code != 0:
-    print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
+    print("Error: '%s' failed with exit code %d:\n%s" % (
+        build_command, exit_code, mkfs_output))
     return False
 
   # Check if there's enough headroom space available for ext4 image.
@@ -654,13 +659,13 @@
 
     # Run e2fsck on the inflated image file
     e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
-    (_, exit_code) = RunCommand(e2fsck_command)
+    (e2fsck_output, exit_code) = RunCommand(e2fsck_command)
 
     os.remove(unsparse_image)
 
     if exit_code != 0:
-      print("Error: '%s' failed with exit code %d" % (e2fsck_command,
-                                                      exit_code))
+      print("Error: '%s' failed with exit code %d:\n%s" % (
+          e2fsck_command, exit_code, e2fsck_output))
       return False
 
   return True
@@ -720,6 +725,7 @@
     copy_prop("system_root_image", "system_root_image")
     copy_prop("ramdisk_dir", "ramdisk_dir")
     copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("system_squashfs_compressor", "squashfs_compressor")
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("system_squashfs_block_size", "squashfs_block_size")
@@ -765,12 +771,29 @@
     copy_prop("vendor_size", "partition_size")
     copy_prop("vendor_journal_size", "journal_size")
     copy_prop("vendor_verity_block_device", "verity_block_device")
+    copy_prop("ext4_share_dup_blocks", "ext4_share_dup_blocks")
     copy_prop("vendor_squashfs_compressor", "squashfs_compressor")
     copy_prop("vendor_squashfs_compressor_opt", "squashfs_compressor_opt")
     copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
     copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align")
     copy_prop("vendor_base_fs_file", "base_fs_file")
     copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
+  elif mount_point == "product":
+    copy_prop("avb_product_hashtree_enable", "avb_hashtree_enable")
+    copy_prop("avb_product_add_hashtree_footer_args",
+              "avb_add_hashtree_footer_args")
+    copy_prop("avb_product_key_path", "avb_key_path")
+    copy_prop("avb_product_algorithm", "avb_algorithm")
+    copy_prop("product_fs_type", "fs_type")
+    copy_prop("product_size", "partition_size")
+    copy_prop("product_journal_size", "journal_size")
+    copy_prop("product_verity_block_device", "verity_block_device")
+    copy_prop("product_squashfs_compressor", "squashfs_compressor")
+    copy_prop("product_squashfs_compressor_opt", "squashfs_compressor_opt")
+    copy_prop("product_squashfs_block_size", "squashfs_block_size")
+    copy_prop("product_squashfs_disable_4k_align", "squashfs_disable_4k_align")
+    copy_prop("product_base_fs_file", "base_fs_file")
+    copy_prop("product_extfs_inode_count", "extfs_inode_count")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
     copy_prop("oem_size", "partition_size")
@@ -824,6 +847,8 @@
       mount_point = "vendor"
     elif image_filename == "oem.img":
       mount_point = "oem"
+    elif image_filename == "product.img":
+      mount_point = "product"
     else:
       print("error: unknown image file name ", image_filename, file=sys.stderr)
       sys.exit(1)
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index b5e9d8b..3cac90a 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -154,22 +154,18 @@
   print('Verifying A/B OTA payload signatures...')
 
   # Dump pubkey from the certificate.
-  pubkey = common.MakeTempFile(prefix="key-", suffix=".key")
-  cmd = ['openssl', 'x509', '-pubkey', '-noout', '-in', cert, '-out', pubkey]
-  proc = common.Run(cmd, stdout=subprocess.PIPE)
-  stdoutdata, _ = proc.communicate()
-  assert proc.returncode == 0, \
-      'Failed to dump public key from certificate: %s\n%s' % (cert, stdoutdata)
+  pubkey = common.MakeTempFile(prefix="key-", suffix=".pem")
+  with open(pubkey, 'wb') as pubkey_fp:
+    pubkey_fp.write(common.ExtractPublicKey(cert))
 
-  package_dir = tempfile.mkdtemp(prefix='package-')
-  common.OPTIONS.tempfiles.append(package_dir)
+  package_dir = common.MakeTempDir(prefix='package-')
 
   # Signature verification with delta_generator.
   payload_file = package_zip.extract('payload.bin', package_dir)
   cmd = ['delta_generator',
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
-  proc = common.Run(cmd, stdout=subprocess.PIPE)
+  proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
   stdoutdata, _ = proc.communicate()
   assert proc.returncode == 0, \
       'Failed to verify payload with delta_generator: %s\n%s' % (package,
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index c4877e0..db63fd3 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -53,11 +53,13 @@
 
 import common
 
-# Work around a bug in python's zipfile module that prevents opening
-# of zipfiles if any entry has an extra field of between 1 and 3 bytes
-# (which is common with zipaligned APKs).  This overrides the
-# ZipInfo._decodeExtra() method (which contains the bug) with an empty
-# version (since we don't need to decode the extra field anyway).
+# Work around a bug in Python's zipfile module that prevents opening of zipfiles
+# if any entry has an extra field of between 1 and 3 bytes (which is common with
+# zipaligned APKs). This overrides the ZipInfo._decodeExtra() method (which
+# contains the bug) with an empty version (since we don't need to decode the
+# extra field anyway).
+# Issue #14315: https://bugs.python.org/issue14315, fixed in Python 2.7.8 and
+# Python 3.5.0 alpha 1.
 class MyZipInfo(zipfile.ZipInfo):
   def _decodeExtra(self):
     pass
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index ebebd63..c4fb29b 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -25,17 +25,17 @@
 import re
 import shlex
 import shutil
+import string
 import subprocess
 import sys
 import tempfile
 import threading
 import time
 import zipfile
+from hashlib import sha1, sha256
 
 import blockimgdiff
-
-from hashlib import sha1 as sha1
-
+import sparse_img
 
 class Options(object):
   def __init__(self):
@@ -78,7 +78,7 @@
 
 
 # The partitions allowed to be signed by AVB (Android verified boot 2.0).
-AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'dtbo')
+AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product', 'dtbo')
 
 
 class ErrorCode(object):
@@ -126,6 +126,11 @@
   return subprocess.Popen(args, **kwargs)
 
 
+def RoundUpTo4K(value):
+  rounded_up = value + 4095
+  return rounded_up - (rounded_up % 4096)
+
+
 def CloseInheritedPipes():
   """ Gmake in MAC OS has file descriptor (PIPE) leak. We close those fds
   before doing other work."""
@@ -216,21 +221,6 @@
             vendor_base_fs_file,))
         del d["vendor_base_fs_file"]
 
-  try:
-    data = read_helper("META/imagesizes.txt")
-    for line in data.split("\n"):
-      if not line:
-        continue
-      name, value = line.split(" ", 1)
-      if not value:
-        continue
-      if name == "blocksize":
-        d[name] = value
-      else:
-        d[name + "_size"] = value
-  except KeyError:
-    pass
-
   def makeint(key):
     if key in d:
       d[key] = int(d[key], 0)
@@ -259,6 +249,20 @@
 
   d["build.prop"] = LoadBuildProp(read_helper, 'SYSTEM/build.prop')
   d["vendor.build.prop"] = LoadBuildProp(read_helper, 'VENDOR/build.prop')
+
+  # Set up the salt (based on fingerprint or thumbprint) that will be used when
+  # adding AVB footer.
+  if d.get("avb_enable") == "true":
+    fp = None
+    if "build.prop" in d:
+      build_prop = d["build.prop"]
+      if "ro.build.fingerprint" in build_prop:
+        fp = build_prop["ro.build.fingerprint"]
+      elif "ro.build.thumbprint" in build_prop:
+        fp = build_prop["ro.build.thumbprint"]
+    if fp:
+      d["avb_salt"] = sha256(fp).hexdigest()
+
   return d
 
 
@@ -606,6 +610,65 @@
   return tmp, zipfile.ZipFile(filename, "r")
 
 
+def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
+  """Returns a SparseImage object suitable for passing to BlockImageDiff.
+
+  This function loads the specified sparse image from the given path, and
+  performs additional processing for OTA purpose. For example, it always adds
+  block 0 to clobbered blocks list. It also detects files that cannot be
+  reconstructed from the block list, for whom we should avoid applying imgdiff.
+
+  Args:
+    which: The partition name, which must be "system" or "vendor".
+    tmpdir: The directory that contains the prebuilt image and block map file.
+    input_zip: The target-files ZIP archive.
+    allow_shared_blocks: Whether having shared blocks is allowed.
+
+  Returns:
+    A SparseImage object, with file_map info loaded.
+  """
+  assert which in ("system", "vendor")
+
+  path = os.path.join(tmpdir, "IMAGES", which + ".img")
+  mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
+
+  # The image and map files must have been created prior to calling
+  # ota_from_target_files.py (since LMP).
+  assert os.path.exists(path) and os.path.exists(mappath)
+
+  # In ext4 filesystems, block 0 might be changed even being mounted R/O. We add
+  # it to clobbered_blocks so that it will be written to the target
+  # unconditionally. Note that they are still part of care_map. (Bug: 20939131)
+  clobbered_blocks = "0"
+
+  image = sparse_img.SparseImage(path, mappath, clobbered_blocks,
+                                 allow_shared_blocks=allow_shared_blocks)
+
+  # block.map may contain less blocks, because mke2fs may skip allocating blocks
+  # if they contain all zeros. We can't reconstruct such a file from its block
+  # list. Tag such entries accordingly. (Bug: 65213616)
+  for entry in image.file_map:
+    # "/system/framework/am.jar" => "SYSTEM/framework/am.jar".
+    arcname = string.replace(entry, which, which.upper(), 1)[1:]
+    # Skip artificial names, such as "__ZERO", "__NONZERO-1".
+    if arcname not in input_zip.namelist():
+      continue
+
+    info = input_zip.getinfo(arcname)
+    ranges = image.file_map[entry]
+
+    # If a RangeSet has been tagged as using shared blocks while loading the
+    # image, its block list must be already incomplete due to that reason. Don't
+    # give it 'incomplete' tag to avoid messing up the imgdiff stats.
+    if ranges.extra.get('uses_shared_blocks'):
+      continue
+
+    if RoundUpTo4K(info.file_size) > ranges.size() * 4096:
+      ranges.extra['incomplete'] = True
+
+  return image
+
+
 def GetKeyPasswords(keylist):
   """Given a list of keys, prompt the user to enter passwords for
   those which require them.  Return a {key: password} dict.  password
@@ -1385,7 +1448,7 @@
           p.kill()
           th.join()
 
-      if err or p.returncode != 0:
+      if p.returncode != 0:
         print("WARNING: failure running %s:\n%s\n" % (
             diff_program, "".join(err)))
         self.patch = None
@@ -1722,19 +1785,50 @@
 
 
 def ParseCertificate(data):
-  """Parse a PEM-format certificate."""
-  cert = []
+  """Parses and converts a PEM-encoded certificate into DER-encoded.
+
+  This gives the same result as `openssl x509 -in <filename> -outform DER`.
+
+  Returns:
+    The decoded certificate string.
+  """
+  cert_buffer = []
   save = False
   for line in data.split("\n"):
     if "--END CERTIFICATE--" in line:
       break
     if save:
-      cert.append(line)
+      cert_buffer.append(line)
     if "--BEGIN CERTIFICATE--" in line:
       save = True
-  cert = "".join(cert).decode('base64')
+  cert = "".join(cert_buffer).decode('base64')
   return cert
 
+
+def ExtractPublicKey(cert):
+  """Extracts the public key (PEM-encoded) from the given certificate file.
+
+  Args:
+    cert: The certificate filename.
+
+  Returns:
+    The public key string.
+
+  Raises:
+    AssertionError: On non-zero return from 'openssl'.
+  """
+  # The behavior with '-out' is different between openssl 1.1 and openssl 1.0.
+  # While openssl 1.1 writes the key into the given filename followed by '-out',
+  # openssl 1.0 (both of 1.0.1 and 1.0.2) doesn't. So we collect the output from
+  # stdout instead.
+  cmd = ['openssl', 'x509', '-pubkey', '-noout', '-in', cert]
+  proc = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  pubkey, stderrdata = proc.communicate()
+  assert proc.returncode == 0, \
+      'Failed to dump public key from certificate: %s\n%s' % (cert, stderrdata)
+  return pubkey
+
+
 def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
                       info_dict=None):
   """Generate a binary patch that creates the recovery image starting
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 0c44faf..7a81928 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,14 +77,14 @@
     with temporary=True) to this one."""
     self.script.extend(other.script)
 
-  def AssertOemProperty(self, name, values):
+  def AssertOemProperty(self, name, values, oem_no_mount):
     """Assert that a property on the OEM paritition matches allowed values."""
     if not name:
       raise ValueError("must specify an OEM property")
     if not values:
       raise ValueError("must specify the OEM value")
-    get_prop_command = None
-    if common.OPTIONS.oem_no_mount:
+
+    if oem_no_mount:
       get_prop_command = 'getprop("%s")' % name
     else:
       get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index d5ac922..6e3ef0a 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -49,8 +49,10 @@
 
   -o  (--oem_settings)  <main_file[,additional_files...]>
       Comma seperated list of files used to specify the expected OEM-specific
-      properties on the OEM partition of the intended device.
-      Multiple expected values can be used by providing multiple files.
+      properties on the OEM partition of the intended device. Multiple expected
+      values can be used by providing multiple files. Only the first dict will
+      be used to compute fingerprint, while the rest will be used to assert
+      OEM-specific properties.
 
   --oem_no_mount
       For devices with OEM-specific properties but without an OEM partition,
@@ -90,6 +92,24 @@
       first, so that any changes made to the system partition are done
       using the new recovery (new kernel, etc.).
 
+  --include_secondary
+      Additionally include the payload for secondary slot images (default:
+      False). Only meaningful when generating A/B OTAs.
+
+      By default, an A/B OTA package doesn't contain the images for the
+      secondary slot (e.g. system_other.img). Specifying this flag allows
+      generating a separate payload that will install secondary slot images.
+
+      Such a package needs to be applied in a two-stage manner, with a reboot
+      in-between. During the first stage, the updater applies the primary
+      payload only. Upon finishing, it reboots the device into the newly updated
+      slot. It then continues to install the secondary payload to the inactive
+      slot, but without switching the active slot at the end (needs the matching
+      support in update_engine, i.e. SWITCH_SLOT_ON_REBOOT flag).
+
+      Due to the special install procedure, the secondary payload will be always
+      generated as a full payload.
+
   --block
       Generate a block-based OTA for non-A/B device. We have deprecated the
       support for file-based OTA since O. Block-based OTA will be used by
@@ -124,26 +144,33 @@
 
   --payload_signer_args <args>
       Specify the arguments needed for payload signer.
+
+  --skip_postinstall
+      Skip the postinstall hooks when generating an A/B OTA package (default:
+      False). Note that this discards ALL the hooks, including non-optional
+      ones. Should only be used if caller knows it's safe to do so (e.g. all the
+      postinstall work is to dexopt apps and a data wipe will happen immediately
+      after). Only meaningful when generating A/B OTAs.
 """
 
 from __future__ import print_function
 
-import sys
-
-if sys.hexversion < 0x02070000:
-  print("Python 2.7 or newer is required.", file=sys.stderr)
-  sys.exit(1)
-
 import multiprocessing
 import os.path
-import subprocess
 import shlex
+import shutil
+import subprocess
+import sys
 import tempfile
 import zipfile
 
 import common
 import edify_generator
-import sparse_img
+
+if sys.hexversion < 0x02070000:
+  print("Python 2.7 or newer is required.", file=sys.stderr)
+  sys.exit(1)
+
 
 OPTIONS = common.OPTIONS
 OPTIONS.package_key = None
@@ -158,12 +185,12 @@
 if OPTIONS.worker_threads == 0:
   OPTIONS.worker_threads = 1
 OPTIONS.two_step = False
+OPTIONS.include_secondary = False
 OPTIONS.no_signing = False
 OPTIONS.block_based = True
 OPTIONS.updater_binary = None
 OPTIONS.oem_source = None
 OPTIONS.oem_no_mount = False
-OPTIONS.fallback_to_full = True
 OPTIONS.full_radio = False
 OPTIONS.full_bootloader = False
 # Stash size cannot exceed cache_size * threshold.
@@ -174,11 +201,329 @@
 OPTIONS.payload_signer_args = []
 OPTIONS.extracted_input = None
 OPTIONS.key_passwords = []
+OPTIONS.skip_postinstall = False
+
 
 METADATA_NAME = 'META-INF/com/android/metadata'
+POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*']
 
 
+class BuildInfo(object):
+  """A class that holds the information for a given build.
+
+  This class wraps up the property querying for a given source or target build.
+  It abstracts away the logic of handling OEM-specific properties, and caches
+  the commonly used properties such as fingerprint.
+
+  There are two types of info dicts: a) build-time info dict, which is generated
+  at build time (i.e. included in a target_files zip); b) OEM info dict that is
+  specified at package generation time (via command line argument
+  '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
+  having "oem_fingerprint_properties" in build-time info dict), all the queries
+  would be answered based on build-time info dict only. Otherwise if using
+  OEM-specific properties, some of them will be calculated from two info dicts.
+
+  Users can query properties similarly as using a dict() (e.g. info['fstab']),
+  or to query build properties via GetBuildProp() or GetVendorBuildProp().
+
+  Attributes:
+    info_dict: The build-time info dict.
+    is_ab: Whether it's a build that uses A/B OTA.
+    oem_dicts: A list of OEM dicts.
+    oem_props: A list of OEM properties that should be read from OEM dicts; None
+        if the build doesn't use any OEM-specific property.
+    fingerprint: The fingerprint of the build, which would be calculated based
+        on OEM properties if applicable.
+    device: The device name, which could come from OEM dicts if applicable.
+  """
+
+  def __init__(self, info_dict, oem_dicts):
+    """Initializes a BuildInfo instance with the given dicts.
+
+    Arguments:
+      info_dict: The build-time info dict.
+      oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
+          that it always uses the first dict to calculate the fingerprint or the
+          device name. The rest would be used for asserting OEM properties only
+          (e.g.  one package can be installed on one of these devices).
+    """
+    self.info_dict = info_dict
+    self.oem_dicts = oem_dicts
+
+    self._is_ab = info_dict.get("ab_update") == "true"
+    self._oem_props = info_dict.get("oem_fingerprint_properties")
+
+    if self._oem_props:
+      assert oem_dicts, "OEM source required for this build"
+
+    # These two should be computed only after setting self._oem_props.
+    self._device = self.GetOemProperty("ro.product.device")
+    self._fingerprint = self.CalculateFingerprint()
+
+  @property
+  def is_ab(self):
+    return self._is_ab
+
+  @property
+  def device(self):
+    return self._device
+
+  @property
+  def fingerprint(self):
+    return self._fingerprint
+
+  @property
+  def oem_props(self):
+    return self._oem_props
+
+  def __getitem__(self, key):
+    return self.info_dict[key]
+
+  def get(self, key, default=None):
+    return self.info_dict.get(key, default)
+
+  def GetBuildProp(self, prop):
+    """Returns the inquired build property."""
+    try:
+      return self.info_dict.get("build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+
+  def GetVendorBuildProp(self, prop):
+    """Returns the inquired vendor build property."""
+    try:
+      return self.info_dict.get("vendor.build.prop", {})[prop]
+    except KeyError:
+      raise common.ExternalError(
+          "couldn't find %s in vendor.build.prop" % (prop,))
+
+  def GetOemProperty(self, key):
+    if self.oem_props is not None and key in self.oem_props:
+      return self.oem_dicts[0][key]
+    return self.GetBuildProp(key)
+
+  def CalculateFingerprint(self):
+    if self.oem_props is None:
+      return self.GetBuildProp("ro.build.fingerprint")
+    return "%s/%s/%s:%s" % (
+        self.GetOemProperty("ro.product.brand"),
+        self.GetOemProperty("ro.product.name"),
+        self.GetOemProperty("ro.product.device"),
+        self.GetBuildProp("ro.build.thumbprint"))
+
+  def WriteMountOemScript(self, script):
+    assert self.oem_props is not None
+    recovery_mount_options = self.info_dict.get("recovery_mount_options")
+    script.Mount("/oem", recovery_mount_options)
+
+  def WriteDeviceAssertions(self, script, oem_no_mount):
+    # Read the property directly if not using OEM properties.
+    if not self.oem_props:
+      script.AssertDevice(self.device)
+      return
+
+    # Otherwise assert OEM properties.
+    if not self.oem_dicts:
+      raise common.ExternalError(
+          "No OEM file provided to answer expected assertions")
+
+    for prop in self.oem_props.split():
+      values = []
+      for oem_dict in self.oem_dicts:
+        if prop in oem_dict:
+          values.append(oem_dict[prop])
+      if not values:
+        raise common.ExternalError(
+            "The OEM file is missing the property %s" % (prop,))
+      script.AssertOemProperty(prop, values, oem_no_mount)
+
+
+class PayloadSigner(object):
+  """A class that wraps the payload signing works.
+
+  When generating a Payload, hashes of the payload and metadata files will be
+  signed with the device key, either by calling an external payload signer or
+  by calling openssl with the package key. This class provides a unified
+  interface, so that callers can just call PayloadSigner.Sign().
+
+  If an external payload signer has been specified (OPTIONS.payload_signer), it
+  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
+  that the signing key should be provided as part of the payload_signer_args.
+  Otherwise without an external signer, it uses the package key
+  (OPTIONS.package_key) and calls openssl for the signing works.
+  """
+
+  def __init__(self):
+    if OPTIONS.payload_signer is None:
+      # Prepare the payload signing key.
+      private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
+      pw = OPTIONS.key_passwords[OPTIONS.package_key]
+
+      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
+      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
+      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
+      cmd.extend(["-out", signing_key])
+
+      get_signing_key = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+                                   stderr=subprocess.STDOUT)
+      stdoutdata, _ = get_signing_key.communicate()
+      assert get_signing_key.returncode == 0, \
+          "Failed to get signing key: {}".format(stdoutdata)
+
+      self.signer = "openssl"
+      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
+                          "-pkeyopt", "digest:sha256"]
+    else:
+      self.signer = OPTIONS.payload_signer
+      self.signer_args = OPTIONS.payload_signer_args
+
+  def Sign(self, in_file):
+    """Signs the given input file. Returns the output filename."""
+    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
+    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
+    signing = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdoutdata, _ = signing.communicate()
+    assert signing.returncode == 0, \
+        "Failed to sign the input file: {}".format(stdoutdata)
+    return out_file
+
+
+class Payload(object):
+  """Manages the creation and the signing of an A/B OTA Payload."""
+
+  PAYLOAD_BIN = 'payload.bin'
+  PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+  SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
+  SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
+
+  def __init__(self, secondary=False):
+    """Initializes a Payload instance.
+
+    Args:
+      secondary: Whether it's generating a secondary payload (default: False).
+    """
+    # The place where the output from the subprocess should go.
+    self._log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
+    self.payload_file = None
+    self.payload_properties = None
+    self.secondary = secondary
+
+  def Generate(self, target_file, source_file=None, additional_args=None):
+    """Generates a payload from the given target-files zip(s).
+
+    Args:
+      target_file: The filename of the target build target-files zip.
+      source_file: The filename of the source build target-files zip; or None if
+          generating a full OTA.
+      additional_args: A list of additional args that should be passed to
+          brillo_update_payload script; or None.
+    """
+    if additional_args is None:
+      additional_args = []
+
+    payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+    cmd = ["brillo_update_payload", "generate",
+           "--payload", payload_file,
+           "--target_image", target_file]
+    if source_file is not None:
+      cmd.extend(["--source_image", source_file])
+    cmd.extend(additional_args)
+    p = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    stdoutdata, _ = p.communicate()
+    assert p.returncode == 0, \
+        "brillo_update_payload generate failed: {}".format(stdoutdata)
+
+    self.payload_file = payload_file
+    self.payload_properties = None
+
+  def Sign(self, payload_signer):
+    """Generates and signs the hashes of the payload and metadata.
+
+    Args:
+      payload_signer: A PayloadSigner() instance that serves the signing work.
+
+    Raises:
+      AssertionError: On any failure when calling brillo_update_payload script.
+    """
+    assert isinstance(payload_signer, PayloadSigner)
+
+    # 1. Generate hashes of the payload and metadata files.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ["brillo_update_payload", "hash",
+           "--unsigned_payload", self.payload_file,
+           "--signature_size", "256",
+           "--metadata_hash_file", metadata_sig_file,
+           "--payload_hash_file", payload_sig_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload hash failed"
+
+    # 2. Sign the hashes.
+    signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
+    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+
+    # 3. Insert the signatures back into the payload file.
+    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                              suffix=".bin")
+    cmd = ["brillo_update_payload", "sign",
+           "--unsigned_payload", self.payload_file,
+           "--payload", signed_payload_file,
+           "--signature_size", "256",
+           "--metadata_signature_file", signed_metadata_sig_file,
+           "--payload_signature_file", signed_payload_sig_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload sign failed"
+
+    # 4. Dump the signed payload properties.
+    properties_file = common.MakeTempFile(prefix="payload-properties-",
+                                          suffix=".txt")
+    cmd = ["brillo_update_payload", "properties",
+           "--payload", signed_payload_file,
+           "--properties_file", properties_file]
+    p1 = common.Run(cmd, stdout=self._log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "brillo_update_payload properties failed"
+
+    if self.secondary:
+      with open(properties_file, "a") as f:
+        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+
+    if OPTIONS.wipe_user_data:
+      with open(properties_file, "a") as f:
+        f.write("POWERWASH=1\n")
+
+    self.payload_file = signed_payload_file
+    self.payload_properties = properties_file
+
+  def WriteToZip(self, output_zip):
+    """Writes the payload to the given zip.
+
+    Args:
+      output_zip: The output ZipFile instance.
+    """
+    assert self.payload_file is not None
+    assert self.payload_properties is not None
+
+    if self.secondary:
+      payload_arcname = Payload.SECONDARY_PAYLOAD_BIN
+      payload_properties_arcname = Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT
+    else:
+      payload_arcname = Payload.PAYLOAD_BIN
+      payload_properties_arcname = Payload.PAYLOAD_PROPERTIES_TXT
+
+    # Add the signed payload file and properties into the zip. In order to
+    # support streaming, we pack them as ZIP_STORED. So these entries can be
+    # read directly with the offset and length pairs.
+    common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+    common.ZipWrite(output_zip, self.payload_properties,
+                    arcname=payload_properties_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+
+
 def SignOutput(temp_zip_name, output_zip_name):
   pw = OPTIONS.key_passwords[OPTIONS.package_key]
 
@@ -186,37 +531,15 @@
                   whole_file=True)
 
 
-def AppendAssertions(script, info_dict, oem_dicts=None):
-  oem_props = info_dict.get("oem_fingerprint_properties")
-  if not oem_props:
-    device = GetBuildProp("ro.product.device", info_dict)
-    script.AssertDevice(device)
-  else:
-    if not oem_dicts:
-      raise common.ExternalError(
-          "No OEM file provided to answer expected assertions")
-    for prop in oem_props.split():
-      values = []
-      for oem_dict in oem_dicts:
-        if oem_dict.get(prop):
-          values.append(oem_dict[prop])
-      if not values:
-        raise common.ExternalError(
-            "The OEM file is missing the property %s" % prop)
-      script.AssertOemProperty(prop, values)
-
-
-def _LoadOemDicts(script, recovery_mount_options=None):
+def _LoadOemDicts(oem_source):
   """Returns the list of loaded OEM properties dict."""
-  oem_dicts = None
-  if OPTIONS.oem_source is None:
-    raise common.ExternalError("OEM source required for this build")
-  if not OPTIONS.oem_no_mount and script:
-    script.Mount("/oem", recovery_mount_options)
+  if not oem_source:
+    return None
+
   oem_dicts = []
-  for oem_file in OPTIONS.oem_source:
-    oem_dicts.append(common.LoadDictionaryFromLines(
-        open(oem_file).readlines()))
+  for oem_file in oem_source:
+    with open(oem_file) as fp:
+      oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
   return oem_dicts
 
 
@@ -267,55 +590,34 @@
     return False
 
 
-def HasTrebleEnabled(target_files_zip, info_dict):
+def HasTrebleEnabled(target_files_zip, target_info):
   return (HasVendorPartition(target_files_zip) and
-          GetBuildProp("ro.treble.enabled", info_dict) == "true")
+          target_info.GetBuildProp("ro.treble.enabled") == "true")
 
 
-def GetOemProperty(name, oem_props, oem_dict, info_dict):
-  if oem_props is not None and name in oem_props:
-    return oem_dict[name]
-  return GetBuildProp(name, info_dict)
+def WriteFingerprintAssertion(script, target_info, source_info):
+  source_oem_props = source_info.oem_props
+  target_oem_props = target_info.oem_props
+
+  if source_oem_props is None and target_oem_props is None:
+    script.AssertSomeFingerprint(
+        source_info.fingerprint, target_info.fingerprint)
+  elif source_oem_props is not None and target_oem_props is not None:
+    script.AssertSomeThumbprint(
+        target_info.GetBuildProp("ro.build.thumbprint"),
+        source_info.GetBuildProp("ro.build.thumbprint"))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_info.fingerprint,
+        target_info.GetBuildProp("ro.build.thumbprint"))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_info.fingerprint,
+        source_info.GetBuildProp("ro.build.thumbprint"))
 
 
-def CalculateFingerprint(oem_props, oem_dict, info_dict):
-  if oem_props is None:
-    return GetBuildProp("ro.build.fingerprint", info_dict)
-  return "%s/%s/%s:%s" % (
-      GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
-      GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
-      GetBuildProp("ro.build.thumbprint", info_dict))
-
-
-def GetImage(which, tmpdir):
-  """Returns an image object suitable for passing to BlockImageDiff.
-
-  'which' partition must be "system" or "vendor". A prebuilt image and file
-  map must already exist in tmpdir.
-  """
-
-  assert which in ("system", "vendor")
-
-  path = os.path.join(tmpdir, "IMAGES", which + ".img")
-  mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
-
-  # The image and map files must have been created prior to calling
-  # ota_from_target_files.py (since LMP).
-  assert os.path.exists(path) and os.path.exists(mappath)
-
-  # Bug: http://b/20939131
-  # In ext4 filesystems, block 0 might be changed even being mounted
-  # R/O. We add it to clobbered_blocks so that it will be written to the
-  # target unconditionally. Note that they are still part of care_map.
-  clobbered_blocks = "0"
-
-  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
-
-
-def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip,
-                                           target_info_dict,
-                                           source_info_dict=None):
+def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
+                                           source_info=None):
   """Adds compatibility info into the output zip if it's Treble-enabled target.
 
   Metadata used for on-device compatibility verification is retrieved from
@@ -328,9 +630,9 @@
   Args:
     target_zip: Zip file containing the source files to be included for OTA.
     output_zip: Zip file that will be sent for OTA.
-    target_info_dict: The dict that holds the target build info.
-    source_info_dict: The dict that holds the source build info, if generating
-        an incremental OTA; None otherwise.
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, if
+        generating an incremental OTA; None otherwise.
   """
 
   def AddCompatibilityArchive(system_updated, vendor_updated):
@@ -353,8 +655,8 @@
 
     # Create new archive.
     compatibility_archive = tempfile.NamedTemporaryFile()
-    compatibility_archive_zip = zipfile.ZipFile(compatibility_archive, "w",
-        compression=zipfile.ZIP_DEFLATED)
+    compatibility_archive_zip = zipfile.ZipFile(
+        compatibility_archive, "w", compression=zipfile.ZIP_DEFLATED)
 
     # Add metadata.
     for file_name in compatibility_files:
@@ -375,59 +677,50 @@
 
   # Will only proceed if the target has enabled the Treble support (as well as
   # having a /vendor partition).
-  if not HasTrebleEnabled(target_zip, target_info_dict):
+  if not HasTrebleEnabled(target_zip, target_info):
     return
 
   # We don't support OEM thumbprint in Treble world (which calculates
   # fingerprints in a different way as shown in CalculateFingerprint()).
-  assert not target_info_dict.get("oem_fingerprint_properties")
+  assert not target_info.oem_props
 
   # Full OTA carries the info for system/vendor both.
-  if source_info_dict is None:
+  if source_info is None:
     AddCompatibilityArchive(True, True)
     return
 
-  assert not source_info_dict.get("oem_fingerprint_properties")
+  assert not source_info.oem_props
 
-  source_fp = GetBuildProp("ro.build.fingerprint", source_info_dict)
-  target_fp = GetBuildProp("ro.build.fingerprint", target_info_dict)
+  source_fp = source_info.fingerprint
+  target_fp = target_info.fingerprint
   system_updated = source_fp != target_fp
 
-  source_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        source_info_dict)
-  target_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
-                                        target_info_dict)
+  source_fp_vendor = source_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
+  target_fp_vendor = target_info.GetVendorBuildProp(
+      "ro.vendor.build.fingerprint")
   vendor_updated = source_fp_vendor != target_fp_vendor
 
   AddCompatibilityArchive(system_updated, vendor_updated)
 
 
 def WriteFullOTAPackage(input_zip, output_zip):
-  # TODO: how to determine this?  We don't know what version it will
-  # be installed on top of. For now, we expect the API just won't
-  # change very often. Similarly for fstab, it might have changed
-  # in the target build.
-  script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+  target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if oem_props:
-    recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  # We don't know what version it will be installed on top of. We expect the API
+  # just won't change very often. Similarly for fstab, it might have changed in
+  # the target build.
+  target_api_version = target_info["recovery_api_version"]
+  script = edify_generator.EdifyGenerator(target_api_version, target_info)
 
-  target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict)
-  metadata = {
-      "post-build": target_fp,
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
-  }
+  if target_info.oem_props and not OPTIONS.oem_no_mount:
+    target_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info)
 
   device_specific = common.DeviceSpecificParams(
       input_zip=input_zip,
-      input_version=OPTIONS.info_dict["recovery_api_version"],
+      input_version=target_api_version,
       output_zip=output_zip,
       script=script,
       input_tmp=OPTIONS.input_tmp,
@@ -436,13 +729,12 @@
 
   assert HasRecoveryPatch(input_zip)
 
-  metadata["ota-type"] = "BLOCK"
-
-  ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
-  ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
+  # Assertions (e.g. downgrade check, device properties check).
+  ts = target_info.GetBuildProp("ro.build.date.utc")
+  ts_text = target_info.GetBuildProp("ro.build.date")
   script.AssertOlderBuild(ts, ts_text)
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.FullOTA_Assertions()
 
   # Two-step package strategy (in chronological order, which is *not*
@@ -468,9 +760,9 @@
   recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
                                          OPTIONS.input_tmp, "RECOVERY")
   if OPTIONS.two_step:
-    if not OPTIONS.info_dict.get("multistage_support", None):
+    if not target_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
-    fs = OPTIONS.info_dict["fstab"]["/misc"]
+    fs = target_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
     bcb_dev = {"bcb_dev": fs.device}
@@ -492,7 +784,7 @@
     script.Comment("Stage 3/3")
 
   # Dump fingerprints
-  script.Print("Target: %s" % target_fp)
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   device_specific.FullOTA_InstallBegin()
 
@@ -505,11 +797,15 @@
 
   script.ShowProgress(system_progress, 0)
 
+  # See the notes in WriteBlockIncrementalOTAPackage().
+  allow_shared_blocks = target_info.get('ext4_share_dup_blocks') == "true"
+
   # Full OTA is done as an "incremental" against an empty source image. This
   # has the effect of writing new data from the package to the entire
   # partition, but lets us reuse the updater code that writes incrementals to
   # do it.
-  system_tgt = GetImage("system", OPTIONS.input_tmp)
+  system_tgt = common.GetSparseImage("system", OPTIONS.input_tmp, input_zip,
+                                     allow_shared_blocks)
   system_tgt.ResetFileMap()
   system_diff = common.BlockDifference("system", system_tgt, src=None)
   system_diff.WriteScript(script, output_zip)
@@ -520,15 +816,15 @@
   if HasVendorPartition(input_zip):
     script.ShowProgress(0.1, 0)
 
-    vendor_tgt = GetImage("vendor", OPTIONS.input_tmp)
+    vendor_tgt = common.GetSparseImage("vendor", OPTIONS.input_tmp, input_zip,
+                                       allow_shared_blocks)
     vendor_tgt.ResetFileMap()
     vendor_diff = common.BlockDifference("vendor", vendor_tgt)
     vendor_diff.WriteScript(script, output_zip)
 
-  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip,
-                                         OPTIONS.info_dict)
+  AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
 
-  common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
+  common.CheckSize(boot_img.data, "boot.img", target_info)
   common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
 
   script.ShowProgress(0.05, 5)
@@ -575,29 +871,12 @@
                      compress_type=zipfile.ZIP_STORED)
 
 
-def GetBuildProp(prop, info_dict):
-  """Returns the inquired build property from a given info_dict."""
-  try:
-    return info_dict.get("build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
-
-
-def GetVendorBuildProp(prop, info_dict):
-  """Returns the inquired vendor build property from a given info_dict."""
-  try:
-    return info_dict.get("vendor.build.prop", {})[prop]
-  except KeyError:
-    raise common.ExternalError(
-        "couldn't find %s in vendor.build.prop" % (prop,))
-
-
-def HandleDowngradeMetadata(metadata):
+def HandleDowngradeMetadata(metadata, target_info, source_info):
   # Only incremental OTAs are allowed to reach here.
   assert OPTIONS.incremental_source is not None
 
-  post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
-  pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+  post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
+  pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
   is_downgrade = long(post_timestamp) < long(pre_timestamp)
 
   if OPTIONS.downgrade:
@@ -607,95 +886,138 @@
     metadata["ota-downgrade"] = "yes"
   elif OPTIONS.timestamp:
     if not is_downgrade:
-      raise RuntimeError("--timestamp specified but no timestamp hack needed: "
-                         "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+      raise RuntimeError("--override_timestamp specified but no timestamp hack "
+                         "needed: pre: %s, post: %s" % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = str(long(pre_timestamp) + 1)
   else:
     if is_downgrade:
       raise RuntimeError("Downgrade detected based on timestamp check: "
-                         "pre: %s, post: %s. Need to specify --timestamp OR "
-                         "--downgrade to allow building the incremental." % (
-                             pre_timestamp, post_timestamp))
+                         "pre: %s, post: %s. Need to specify "
+                         "--override_timestamp OR --downgrade to allow "
+                         "building the incremental." % (pre_timestamp,
+                                                        post_timestamp))
     metadata["post-timestamp"] = post_timestamp
 
 
-def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
-  source_version = OPTIONS.source_info_dict["recovery_api_version"]
-  target_version = OPTIONS.target_info_dict["recovery_api_version"]
+def GetPackageMetadata(target_info, source_info=None):
+  """Generates and returns the metadata dict.
 
-  if source_version == 0:
-    print("WARNING: generating edify script for a source that "
-          "can't install it.")
-  script = edify_generator.EdifyGenerator(
-      source_version, OPTIONS.target_info_dict,
-      fstab=OPTIONS.source_info_dict["fstab"])
+  It generates a dict() that contains the info to be written into an OTA
+  package (META-INF/com/android/metadata). It also handles the detection of
+  downgrade / timestamp override / data wipe based on the global options.
 
-  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
-  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
-  oem_dicts = None
-  if source_oem_props or target_oem_props:
-    recovery_mount_options = OPTIONS.source_info_dict.get(
-        "recovery_mount_options")
-    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+  Args:
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, or
+        None if generating full OTA.
+
+  Returns:
+    A dict to be written into package metadata entry.
+  """
+  assert isinstance(target_info, BuildInfo)
+  assert source_info is None or isinstance(source_info, BuildInfo)
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict),
-      "ota-type": "BLOCK",
+      'post-build' : target_info.fingerprint,
+      'post-build-incremental' : target_info.GetBuildProp(
+          'ro.build.version.incremental'),
+      'post-sdk-level' : target_info.GetBuildProp(
+          'ro.build.version.sdk'),
+      'post-security-patch-level' : target_info.GetBuildProp(
+          'ro.build.version.security_patch'),
   }
 
-  HandleDowngradeMetadata(metadata)
+  if target_info.is_ab:
+    metadata['ota-type'] = 'AB'
+    metadata['ota-required-cache'] = '0'
+  else:
+    metadata['ota-type'] = 'BLOCK'
+
+  if OPTIONS.wipe_user_data:
+    metadata['ota-wipe'] = 'yes'
+
+  is_incremental = source_info is not None
+  if is_incremental:
+    metadata['pre-build'] = source_info.fingerprint
+    metadata['pre-build-incremental'] = source_info.GetBuildProp(
+        'ro.build.version.incremental')
+    metadata['pre-device'] = source_info.device
+  else:
+    metadata['pre-device'] = target_info.device
+
+  # Detect downgrades, or fill in the post-timestamp.
+  if is_incremental:
+    HandleDowngradeMetadata(metadata, target_info, source_info)
+  else:
+    metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+  return metadata
+
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
+  target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+  source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+
+  target_api_version = target_info["recovery_api_version"]
+  source_api_version = source_info["recovery_api_version"]
+  if source_api_version == 0:
+    print("WARNING: generating edify script for a source that "
+          "can't install it.")
+
+  script = edify_generator.EdifyGenerator(
+      source_api_version, target_info, fstab=source_info["fstab"])
+
+  if target_info.oem_props or source_info.oem_props:
+    if not OPTIONS.oem_no_mount:
+      source_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info, source_info)
 
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
-      source_version=source_version,
+      source_version=source_api_version,
       target_zip=target_zip,
-      target_version=target_version,
+      target_version=target_api_version,
       output_zip=output_zip,
       script=script,
       metadata=metadata,
-      info_dict=OPTIONS.source_info_dict)
-
-  source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.source_info_dict)
-  target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
-                                   OPTIONS.target_info_dict)
-  metadata["pre-build"] = source_fp
-  metadata["post-build"] = target_fp
-  metadata["pre-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.source_info_dict)
-  metadata["post-build-incremental"] = GetBuildProp(
-      "ro.build.version.incremental", OPTIONS.target_info_dict)
+      info_dict=source_info)
 
   source_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
-      OPTIONS.source_info_dict)
+      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
   target_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
   updating_boot = (not OPTIONS.two_step and
                    (source_boot.data != target_boot.data))
 
   target_recovery = common.GetBootableImage(
       "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
 
-  system_src = GetImage("system", OPTIONS.source_tmp)
-  system_tgt = GetImage("system", OPTIONS.target_tmp)
+  # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
+  # shared blocks (i.e. some blocks will show up in multiple files' block
+  # list). We can only allocate such shared blocks to the first "owner", and
+  # disable imgdiff for all later occurrences.
+  allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or
+                         target_info.get('ext4_share_dup_blocks') == "true")
+  system_src = common.GetSparseImage("system", OPTIONS.source_tmp, source_zip,
+                                     allow_shared_blocks)
+  system_tgt = common.GetSparseImage("system", OPTIONS.target_tmp, target_zip,
+                                     allow_shared_blocks)
 
   blockimgdiff_version = max(
-      int(i) for i in
-      OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+      int(i) for i in target_info.get("blockimgdiff_versions", "1").split(","))
   assert blockimgdiff_version >= 3
 
   # Check the first block of the source system partition for remount R/W only
   # if the filesystem is ext4.
-  system_src_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+  system_src_partition = source_info["fstab"]["/system"]
   check_first_block = system_src_partition.fs_type == "ext4"
   # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
   # in zip formats. However with squashfs, a) all files are compressed in LZ4;
   # b) the blocks listed in block map may not contain all the bytes for a given
   # file (because they're rounded to be 4K-aligned).
-  system_tgt_partition = OPTIONS.target_info_dict["fstab"]["/system"]
+  system_tgt_partition = target_info["fstab"]["/system"]
   disable_imgdiff = (system_src_partition.fs_type == "squashfs" or
                      system_tgt_partition.fs_type == "squashfs")
   system_diff = common.BlockDifference("system", system_tgt, system_src,
@@ -706,12 +1028,14 @@
   if HasVendorPartition(target_zip):
     if not HasVendorPartition(source_zip):
       raise RuntimeError("can't generate incremental that adds /vendor")
-    vendor_src = GetImage("vendor", OPTIONS.source_tmp)
-    vendor_tgt = GetImage("vendor", OPTIONS.target_tmp)
+    vendor_src = common.GetSparseImage("vendor", OPTIONS.source_tmp, source_zip,
+                                       allow_shared_blocks)
+    vendor_tgt = common.GetSparseImage("vendor", OPTIONS.target_tmp, target_zip,
+                                       allow_shared_blocks)
 
     # Check first block of vendor partition for remount R/W only if
     # disk type is ext4
-    vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+    vendor_partition = source_info["fstab"]["/vendor"]
     check_first_block = vendor_partition.fs_type == "ext4"
     disable_imgdiff = vendor_partition.fs_type == "squashfs"
     vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
@@ -722,10 +1046,10 @@
     vendor_diff = None
 
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.target_info_dict,
-      OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
-  AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
+  # Assertions (e.g. device properties check).
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -751,12 +1075,12 @@
   #    (allow recovery to mark itself finished and reboot)
 
   if OPTIONS.two_step:
-    if not OPTIONS.source_info_dict.get("multistage_support", None):
+    if not source_info.get("multistage_support"):
       assert False, "two-step packages not supported by this build"
-    fs = OPTIONS.source_info_dict["fstab"]["/misc"]
+    fs = source_info["fstab"]["/misc"]
     assert fs.fs_type.upper() == "EMMC", \
         "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev": fs.device}
+    bcb_dev = {"bcb_dev" : fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
     script.AppendExtra("""
 if get_stage("%(bcb_dev)s") == "2/3" then
@@ -776,27 +1100,14 @@
     script.Comment("Stage 1/3")
 
   # Dump fingerprints
-  script.Print("Source: %s" % (source_fp,))
-  script.Print("Target: %s" % (target_fp,))
+  script.Print("Source: {}".format(source_info.fingerprint))
+  script.Print("Target: {}".format(target_info.fingerprint))
 
   script.Print("Verifying current system...")
 
   device_specific.IncrementalOTA_VerifyBegin()
 
-  if source_oem_props is None and target_oem_props is None:
-    script.AssertSomeFingerprint(source_fp, target_fp)
-  elif source_oem_props is not None and target_oem_props is not None:
-    script.AssertSomeThumbprint(
-        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
-        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-  elif source_oem_props is None and target_oem_props is not None:
-    script.AssertFingerprintOrThumbprint(
-        source_fp,
-        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
-  else:
-    script.AssertFingerprintOrThumbprint(
-        target_fp,
-        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+  WriteFingerprintAssertion(script, target_info, source_info)
 
   # Check the required cache size (i.e. stashed blocks).
   size = []
@@ -806,8 +1117,7 @@
     size.append(vendor_diff.required_cache)
 
   if updating_boot:
-    boot_type, boot_device = common.GetTypeAndDevice(
-        "/boot", OPTIONS.source_info_dict)
+    boot_type, boot_device = common.GetTypeAndDevice("/boot", source_info)
     d = common.Difference(target_boot, source_boot)
     _, _, d = d.ComputePatch()
     if d is None:
@@ -897,7 +1207,6 @@
   if OPTIONS.wipe_user_data:
     script.Print("Erasing user data...")
     script.FormatPartition("/data")
-    metadata["ota-wipe"] = "yes"
 
   if OPTIONS.two_step:
     script.AppendExtra("""
@@ -917,6 +1226,77 @@
   WriteMetadata(metadata, output_zip)
 
 
+def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
+  """Returns a target-files.zip file for generating secondary payload.
+
+  Although the original target-files.zip already contains secondary slot
+  images (i.e. IMAGES/system_other.img), we need to rename the files to the
+  ones without _other suffix. Note that we cannot instead modify the names in
+  META/ab_partitions.txt, because there are no matching partitions on device.
+
+  For the partitions that don't have secondary images, the ones for primary
+  slot will be used. This is to ensure that we always have valid boot, vbmeta,
+  bootloader images in the inactive slot.
+
+  Args:
+    input_file: The input target-files.zip file.
+    skip_postinstall: Whether to skip copying the postinstall config file.
+
+  Returns:
+    The filename of the target-files.zip for generating secondary payload.
+  """
+  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+  target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
+
+  input_tmp, input_zip = common.UnzipTemp(input_file, UNZIP_PATTERN)
+  for info in input_zip.infolist():
+    unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
+    if info.filename == 'IMAGES/system_other.img':
+      common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
+
+    # Primary images and friends need to be skipped explicitly.
+    elif info.filename in ('IMAGES/system.img',
+                           'IMAGES/system.map'):
+      pass
+
+    # Skip copying the postinstall config if requested.
+    elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
+      pass
+
+    elif info.filename.startswith(('META/', 'IMAGES/')):
+      common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
+
+  common.ZipClose(input_zip)
+  common.ZipClose(target_zip)
+
+  return target_file
+
+
+def GetTargetFilesZipWithoutPostinstallConfig(input_file):
+  """Returns a target-files.zip that's not containing postinstall_config.txt.
+
+  This allows brillo_update_payload script to skip writing all the postinstall
+  hooks in the generated payload. The input target-files.zip file will be
+  duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
+  contain the postinstall_config.txt entry, the input file will be returned.
+
+  Args:
+    input_file: The input target-files.zip filename.
+
+  Returns:
+    The filename of target-files.zip that doesn't contain postinstall config.
+  """
+  # We should only make a copy if postinstall_config entry exists.
+  with zipfile.ZipFile(input_file, 'r') as input_zip:
+    if POSTINSTALL_CONFIG not in input_zip.namelist():
+      return input_file
+
+  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+  shutil.copyfile(input_file, target_file)
+  common.ZipDelete(target_file, POSTINSTALL_CONFIG)
+  return target_file
+
+
 def WriteABOTAPackageWithBrilloScript(target_file, output_file,
                                       source_file=None):
   """Generate an Android OTA package that has A/B update payload."""
@@ -971,172 +1351,69 @@
       value += ' ' * (expected_length - len(value))
     return value
 
-  # The place where the output from the subprocess should go.
-  log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
-
-  # A/B updater expects a signing key in RSA format. Gets the key ready for
-  # later use in step 3, unless a payload_signer has been specified.
-  if OPTIONS.payload_signer is None:
-    cmd = ["openssl", "pkcs8",
-           "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-           "-inform", "DER"]
-    pw = OPTIONS.key_passwords[OPTIONS.package_key]
-    cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
-    rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
-    cmd.extend(["-out", rsa_key])
-    p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
-    p1.communicate()
-    assert p1.returncode == 0, "openssl pkcs8 failed"
-
   # Stage the output zip package for package signing.
   temp_zip_file = tempfile.NamedTemporaryFile()
   output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                compression=zipfile.ZIP_DEFLATED)
 
+  if source_file is not None:
+    target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+    source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+  else:
+    target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+    source_info = None
+
   # Metadata to comply with Android OTA package format.
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
-  oem_dicts = None
-  if oem_props:
-    oem_dicts = _LoadOemDicts(None)
+  metadata = GetPackageMetadata(target_info, source_info)
 
-  metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
-                                         OPTIONS.info_dict),
-      "post-build-incremental" : GetBuildProp("ro.build.version.incremental",
-                                              OPTIONS.info_dict),
-      "pre-device": GetOemProperty("ro.product.device", oem_props,
-                                   oem_dicts and oem_dicts[0],
-                                   OPTIONS.info_dict),
-      "ota-required-cache": "0",
-      "ota-type": "AB",
-  }
+  if OPTIONS.skip_postinstall:
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
 
-  if source_file is not None:
-    metadata["pre-build"] = CalculateFingerprint(oem_props,
-                                                 oem_dicts and oem_dicts[0],
-                                                 OPTIONS.source_info_dict)
-    metadata["pre-build-incremental"] = GetBuildProp(
-        "ro.build.version.incremental", OPTIONS.source_info_dict)
+  # Generate payload.
+  payload = Payload()
+  payload.Generate(target_file, source_file)
 
-    HandleDowngradeMetadata(metadata)
-  else:
-    metadata["post-timestamp"] = GetBuildProp(
-        "ro.build.date.utc", OPTIONS.info_dict)
+  # Sign the payload.
+  payload_signer = PayloadSigner()
+  payload.Sign(payload_signer)
 
-  # 1. Generate payload.
-  payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-  cmd = ["brillo_update_payload", "generate",
-         "--payload", payload_file,
-         "--target_image", target_file]
-  if source_file is not None:
-    cmd.extend(["--source_image", source_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload generate failed"
+  # Write the payload into output zip.
+  payload.WriteToZip(output_zip)
 
-  # 2. Generate hashes of the payload and metadata files.
-  payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-  metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-  cmd = ["brillo_update_payload", "hash",
-         "--unsigned_payload", payload_file,
-         "--signature_size", "256",
-         "--metadata_hash_file", metadata_sig_file,
-         "--payload_hash_file", payload_sig_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload hash failed"
-
-  # 3. Sign the hashes and insert them back into the payload file.
-  signed_payload_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                suffix=".bin")
-  signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
-                                                 suffix=".bin")
-  # 3a. Sign the payload hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", payload_sig_file,
-              "-out", signed_payload_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign payload failed"
-
-  # 3b. Sign the metadata hash.
-  if OPTIONS.payload_signer is not None:
-    cmd = [OPTIONS.payload_signer]
-    cmd.extend(OPTIONS.payload_signer_args)
-  else:
-    cmd = ["openssl", "pkeyutl", "-sign",
-           "-inkey", rsa_key,
-           "-pkeyopt", "digest:sha256"]
-  cmd.extend(["-in", metadata_sig_file,
-              "-out", signed_metadata_sig_file])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl sign metadata failed"
-
-  # 3c. Insert the signatures back into the payload file.
-  signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
-                                            suffix=".bin")
-  cmd = ["brillo_update_payload", "sign",
-         "--unsigned_payload", payload_file,
-         "--payload", signed_payload_file,
-         "--signature_size", "256",
-         "--metadata_signature_file", signed_metadata_sig_file,
-         "--payload_signature_file", signed_payload_sig_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload sign failed"
-
-  # 4. Dump the signed payload properties.
-  properties_file = common.MakeTempFile(prefix="payload-properties-",
-                                        suffix=".txt")
-  cmd = ["brillo_update_payload", "properties",
-         "--payload", signed_payload_file,
-         "--properties_file", properties_file]
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "brillo_update_payload properties failed"
-
-  if OPTIONS.wipe_user_data:
-    with open(properties_file, "a") as f:
-      f.write("POWERWASH=1\n")
-    metadata["ota-wipe"] = "yes"
-
-  # Add the signed payload file and properties into the zip. In order to
-  # support streaming, we pack payload.bin, payload_properties.txt and
-  # care_map.txt as ZIP_STORED. So these entries can be read directly with
-  # the offset and length pairs.
-  common.ZipWrite(output_zip, signed_payload_file, arcname="payload.bin",
-                  compress_type=zipfile.ZIP_STORED)
-  common.ZipWrite(output_zip, properties_file,
-                  arcname="payload_properties.txt",
-                  compress_type=zipfile.ZIP_STORED)
+  # Generate and include the secondary payload that installs secondary images
+  # (e.g. system_other.img).
+  if OPTIONS.include_secondary:
+    # We always include a full payload for the secondary slot, even when
+    # building an incremental OTA. See the comments for "--include_secondary".
+    secondary_target_file = GetTargetFilesZipForSecondaryImages(
+        target_file, OPTIONS.skip_postinstall)
+    secondary_payload = Payload(secondary=True)
+    secondary_payload.Generate(secondary_target_file)
+    secondary_payload.Sign(payload_signer)
+    secondary_payload.WriteToZip(output_zip)
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r")
-  if (OPTIONS.info_dict.get("verity") == "true" or
-      OPTIONS.info_dict.get("avb_enable") == "true"):
+  if (target_info.get("verity") == "true" or
+      target_info.get("avb_enable") == "true"):
     care_map_path = "META/care_map.txt"
     namelist = target_zip.namelist()
     if care_map_path in namelist:
       care_map_data = target_zip.read(care_map_path)
+      # In order to support streaming, care_map.txt needs to be packed as
+      # ZIP_STORED.
       common.ZipWriteStr(output_zip, "care_map.txt", care_map_data,
-          compress_type=zipfile.ZIP_STORED)
+                         compress_type=zipfile.ZIP_STORED)
     else:
       print("Warning: cannot find care map file in target_file package")
 
-  # OPTIONS.source_info_dict must be None for incrementals.
+  # source_info must be None for full OTAs.
   if source_file is None:
-    assert OPTIONS.source_info_dict is None
+    assert source_info is None
 
   AddCompatibilityArchiveIfTrebleEnabled(
-      target_zip, output_zip, OPTIONS.info_dict, OPTIONS.source_info_dict)
+      target_zip, output_zip, target_info, source_info)
 
   common.ZipClose(target_zip)
 
@@ -1213,6 +1490,8 @@
                          "integers are allowed." % (a, o))
     elif o in ("-2", "--two_step"):
       OPTIONS.two_step = True
+    elif o == "--include_secondary":
+      OPTIONS.include_secondary = True
     elif o == "--no_signing":
       OPTIONS.no_signing = True
     elif o == "--verify":
@@ -1221,8 +1500,6 @@
       OPTIONS.block_based = True
     elif o in ("-b", "--binary"):
       OPTIONS.updater_binary = a
-    elif o in ("--no_fallback_to_full",):
-      OPTIONS.fallback_to_full = False
     elif o == "--stash_threshold":
       try:
         OPTIONS.stash_threshold = float(a)
@@ -1237,6 +1514,8 @@
       OPTIONS.payload_signer_args = shlex.split(a)
     elif o == "--extracted_input_target_files":
       OPTIONS.extracted_input = a
+    elif o == "--skip_postinstall":
+      OPTIONS.skip_postinstall = True
     else:
       return False
     return True
@@ -1254,18 +1533,19 @@
                                  "extra_script=",
                                  "worker_threads=",
                                  "two_step",
+                                 "include_secondary",
                                  "no_signing",
                                  "block",
                                  "binary=",
                                  "oem_settings=",
                                  "oem_no_mount",
                                  "verify",
-                                 "no_fallback_to_full",
                                  "stash_threshold=",
                                  "log_diff=",
                                  "payload_signer=",
                                  "payload_signer_args=",
                                  "extracted_input_target_files=",
+                                 "skip_postinstall",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1286,14 +1566,35 @@
   assert not (OPTIONS.downgrade and OPTIONS.timestamp), \
       "Cannot have --downgrade AND --override_timestamp both"
 
-  # Load the dict file from the zip directly to have a peek at the OTA type.
-  # For packages using A/B update, unzipping is not needed.
+  # Load the build info dicts from the zip directly or the extracted input
+  # directory. We don't need to unzip the entire target-files zips, because they
+  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
+  # When loading the info dicts, we don't need to provide the second parameter
+  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
+  # some properties with their actual paths, such as 'selinux_fc',
+  # 'ramdisk_dir', which won't be used during OTA generation.
   if OPTIONS.extracted_input is not None:
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input, OPTIONS.extracted_input)
+    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    input_zip = zipfile.ZipFile(args[0], "r")
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-    common.ZipClose(input_zip)
+    with zipfile.ZipFile(args[0], 'r') as input_zip:
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+
+  if OPTIONS.verbose:
+    print("--- target info ---")
+    common.DumpInfoDict(OPTIONS.info_dict)
+
+  # Load the source build dict if applicable.
+  if OPTIONS.incremental_source is not None:
+    OPTIONS.target_info_dict = OPTIONS.info_dict
+    with zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+
+    if OPTIONS.verbose:
+      print("--- source info ---")
+      common.DumpInfoDict(OPTIONS.source_info_dict)
+
+  # Load OEM dicts if provided.
+  OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
 
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
 
@@ -1309,20 +1610,6 @@
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
 
   if ab_update:
-    if OPTIONS.incremental_source is not None:
-      OPTIONS.target_info_dict = OPTIONS.info_dict
-      source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
-      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-      common.ZipClose(source_zip)
-
-    if OPTIONS.verbose:
-      print("--- target info ---")
-      common.DumpInfoDict(OPTIONS.info_dict)
-
-      if OPTIONS.incremental_source is not None:
-        print("--- source info ---")
-        common.DumpInfoDict(OPTIONS.source_info_dict)
-
     WriteABOTAPackageWithBrilloScript(
         target_file=args[0],
         output_file=args[1],
@@ -1331,48 +1618,45 @@
     print("done.")
     return
 
+  # Sanity check the loaded info dicts first.
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
+
+  # Non-A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size")
+  if cache_size is None:
+    print("--- can't determine the cache partition size ---")
+  OPTIONS.cache_size = cache_size
+
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
     input_zip = zipfile.ZipFile(args[0], "r")
   else:
     print("unzipping target target-files...")
     OPTIONS.input_tmp, input_zip = common.UnzipTemp(
         args[0], UNZIP_PATTERN)
+  OPTIONS.target_tmp = OPTIONS.input_tmp
 
-    OPTIONS.target_tmp = OPTIONS.input_tmp
-    OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
-
-  if OPTIONS.verbose:
-    print("--- target info ---")
-    common.DumpInfoDict(OPTIONS.info_dict)
-
-  # If the caller explicitly specified the device-specific extensions
-  # path via -s/--device_specific, use that.  Otherwise, use
-  # META/releasetools.py if it is present in the target target_files.
-  # Otherwise, take the path of the file from 'tool_extensions' in the
-  # info dict and look for that in the local filesystem, relative to
-  # the current directory.
-
+  # If the caller explicitly specified the device-specific extensions path via
+  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+  # is present in the target target_files. Otherwise, take the path of the file
+  # from 'tool_extensions' in the info dict and look for that in the local
+  # filesystem, relative to the current directory.
   if OPTIONS.device_specific is None:
     from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
     if os.path.exists(from_input):
       print("(using device-specific extensions from target_files)")
       OPTIONS.device_specific = from_input
     else:
-      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
 
   if OPTIONS.device_specific is not None:
     OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  if OPTIONS.info_dict.get("no_recovery") == "true":
-    raise common.ExternalError(
-        "--- target build has specified no recovery ---")
-
   # Set up the output zip. Create a temporary zip file if signing is needed.
   if OPTIONS.no_signing:
     if os.path.exists(args[1]):
@@ -1384,46 +1668,26 @@
     output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                  compression=zipfile.ZIP_DEFLATED)
 
-  # Non A/B OTAs rely on /cache partition to store temporary files.
-  cache_size = OPTIONS.info_dict.get("cache_size", None)
-  if cache_size is None:
-    print("--- can't determine the cache partition size ---")
-  OPTIONS.cache_size = cache_size
-
   # Generate a full OTA.
   if OPTIONS.incremental_source is None:
     WriteFullOTAPackage(input_zip, output_zip)
 
-  # Generate an incremental OTA. It will fall back to generate a full OTA on
-  # failure unless no_fallback_to_full is specified.
+  # Generate an incremental OTA.
   else:
     print("unzipping source target-files...")
     OPTIONS.source_tmp, source_zip = common.UnzipTemp(
         OPTIONS.incremental_source,
         UNZIP_PATTERN)
-    OPTIONS.target_info_dict = OPTIONS.info_dict
-    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
-                                                   OPTIONS.source_tmp)
-    if OPTIONS.verbose:
-      print("--- source info ---")
-      common.DumpInfoDict(OPTIONS.source_info_dict)
-    try:
-      WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
-      if OPTIONS.log_diff:
-        out_file = open(OPTIONS.log_diff, 'w')
-        import target_files_diff
-        target_files_diff.recursiveDiff('',
-                                        OPTIONS.source_tmp,
-                                        OPTIONS.input_tmp,
-                                        out_file)
-        out_file.close()
-    except ValueError:
-      if not OPTIONS.fallback_to_full:
-        raise
-      print("--- failed to build incremental; falling back to full ---")
-      OPTIONS.incremental_source = None
-      WriteFullOTAPackage(input_zip, output_zip)
 
+    WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
+
+    if OPTIONS.log_diff:
+      with open(OPTIONS.log_diff, 'w') as out_file:
+        import target_files_diff
+        target_files_diff.recursiveDiff(
+            '', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
+
+  common.ZipClose(input_zip)
   common.ZipClose(output_zip)
 
   # Sign the generated zip package unless no_signing is specified.
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 87380a5..36becf4 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -13,18 +13,26 @@
 # limitations under the License.
 
 from __future__ import print_function
+
 import heapq
 import itertools
 
+
 __all__ = ["RangeSet"]
 
+
 class RangeSet(object):
-  """A RangeSet represents a set of nonoverlapping ranges on the
-  integers (ie, a set of integers, but efficient when the set contains
-  lots of runs."""
+  """A RangeSet represents a set of non-overlapping ranges on integers.
+
+  Attributes:
+    monotonic: Whether the input has all its integers in increasing order.
+    extra: A dict that can be used by the caller, e.g. to store info that's
+        only meaningful to caller.
+  """
 
   def __init__(self, data=None):
     self.monotonic = False
+    self._extra = {}
     if isinstance(data, str):
       self._parse_internal(data)
     elif data:
@@ -56,18 +64,24 @@
   def __repr__(self):
     return '<RangeSet("' + self.to_string() + '")>'
 
+  @property
+  def extra(self):
+    return self._extra
+
   @classmethod
   def parse(cls, text):
-    """Parse a text string consisting of a space-separated list of
-    blocks and ranges, eg "10-20 30 35-40".  Ranges are interpreted to
-    include both their ends (so the above example represents 18
-    individual blocks.  Returns a RangeSet object.
+    """Parses a text string into a RangeSet.
 
-    If the input has all its blocks in increasing order, then returned
-    RangeSet will have an extra attribute 'monotonic' that is set to
-    True.  For example the input "10-20 30" is monotonic, but the input
-    "15-20 30 10-14" is not, even though they represent the same set
-    of blocks (and the two RangeSets will compare equal with ==).
+    The input text string consists of a space-separated list of blocks and
+    ranges, e.g. "10-20 30 35-40". Ranges are interpreted to include both their
+    ends (so the above example represents 18 individual blocks). Returns a
+    RangeSet object.
+
+    If the input has all its blocks in increasing order, then the 'monotonic'
+    attribute of the returned RangeSet will be set to True. For example the
+    input "10-20 30" is monotonic, but the input "15-20 30 10-14" is not, even
+    though they represent the same set of blocks (and the two RangeSets will
+    compare equal with ==).
     """
     return cls(text)
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 7a1126c..fa62c8f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -104,6 +104,7 @@
 import sys
 import tempfile
 import zipfile
+from xml.etree import ElementTree
 
 import add_img_to_target_files
 import common
@@ -290,6 +291,8 @@
         new_data = RewriteProps(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
 
+    # Replace the certs in *mac_permissions.xml (there could be multiple, such
+    # as {system,vendor}/etc/selinux/{plat,nonplat}_mac_permissions.xml).
     elif info.filename.endswith("mac_permissions.xml"):
       print("Rewriting %s with new keys." % (info.filename,))
       new_data = ReplaceCerts(data)
@@ -361,31 +364,54 @@
 
 
 def ReplaceCerts(data):
-  """Given a string of data, replace all occurences of a set
-  of X509 certs with a newer set of X509 certs and return
-  the updated data string."""
-  for old, new in OPTIONS.key_map.iteritems():
-    try:
-      if OPTIONS.verbose:
-        print("    Replacing %s.x509.pem with %s.x509.pem" % (old, new))
-      f = open(old + ".x509.pem")
-      old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
-      f.close()
-      f = open(new + ".x509.pem")
-      new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
-      f.close()
-      # Only match entire certs.
-      pattern = "\\b" + old_cert16 + "\\b"
-      (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
-      if OPTIONS.verbose:
-        print("    Replaced %d occurence(s) of %s.x509.pem with "
-              "%s.x509.pem" % (num, old, new))
-    except IOError as e:
-      if e.errno == errno.ENOENT and not OPTIONS.verbose:
-        continue
+  """Replaces all the occurences of X.509 certs with the new ones.
 
-      print("    Error accessing %s. %s. Skip replacing %s.x509.pem with "
-            "%s.x509.pem." % (e.filename, e.strerror, old, new))
+  The mapping info is read from OPTIONS.key_map. Non-existent certificate will
+  be skipped. After the replacement, it additionally checks for duplicate
+  entries, which would otherwise fail the policy loading code in
+  frameworks/base/services/core/java/com/android/server/pm/SELinuxMMAC.java.
+
+  Args:
+    data: Input string that contains a set of X.509 certs.
+
+  Returns:
+    A string after the replacement.
+
+  Raises:
+    AssertionError: On finding duplicate entries.
+  """
+  for old, new in OPTIONS.key_map.iteritems():
+    if OPTIONS.verbose:
+      print("    Replacing %s.x509.pem with %s.x509.pem" % (old, new))
+
+    try:
+      with open(old + ".x509.pem") as old_fp:
+        old_cert16 = base64.b16encode(
+            common.ParseCertificate(old_fp.read())).lower()
+      with open(new + ".x509.pem") as new_fp:
+        new_cert16 = base64.b16encode(
+            common.ParseCertificate(new_fp.read())).lower()
+    except IOError as e:
+      if OPTIONS.verbose or e.errno != errno.ENOENT:
+        print("    Error accessing %s: %s.\nSkip replacing %s.x509.pem with "
+              "%s.x509.pem." % (e.filename, e.strerror, old, new))
+      continue
+
+    # Only match entire certs.
+    pattern = "\\b" + old_cert16 + "\\b"
+    (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
+
+    if OPTIONS.verbose:
+      print("    Replaced %d occurence(s) of %s.x509.pem with %s.x509.pem" % (
+          num, old, new))
+
+  # Verify that there're no duplicate entries after the replacement. Note that
+  # it's only checking entries with global seinfo at the moment (i.e. ignoring
+  # the ones with inner packages). (Bug: 69479366)
+  root = ElementTree.fromstring(data)
+  signatures = [signer.attrib['signature'] for signer in root.findall('signer')]
+  assert len(signatures) == len(set(signatures)), \
+      "Found duplicate entries after cert replacement: {}".format(data)
 
   return data
 
@@ -538,10 +564,7 @@
             " as payload verification key.\n\n")
 
     print("Using %s for payload verification." % (mapped_keys[0],))
-    cmd = common.Run(
-        ["openssl", "x509", "-pubkey", "-noout", "-in", mapped_keys[0]],
-        stdout=subprocess.PIPE)
-    pubkey, _ = cmd.communicate()
+    pubkey = common.ExtractPublicKey(mapped_keys[0])
     common.ZipWriteStr(
         output_tf_zip,
         "SYSTEM/etc/update_engine/update-payload-key.pub.pem",
@@ -600,7 +623,7 @@
 
     # Extract keyid using openssl command.
     p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
-                   stdout=subprocess.PIPE)
+                   stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     keyid, stderr = p.communicate()
     assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
     keyid = re.search(
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 7eb60d9..083da7a 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -15,6 +15,7 @@
 import bisect
 import os
 import struct
+import threading
 from hashlib import sha1
 
 import rangelib
@@ -32,7 +33,7 @@
   """
 
   def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
-               mode="rb", build_map=True):
+               mode="rb", build_map=True, allow_shared_blocks=False):
     self.simg_f = f = open(simg_fn, mode)
 
     header_bin = f.read(28)
@@ -111,6 +112,8 @@
         raise ValueError("Unknown chunk type 0x%04X not supported" %
                          (chunk_type,))
 
+    self.generator_lock = threading.Lock()
+
     self.care_map = rangelib.RangeSet(care_data)
     self.offset_index = [i[0] for i in offset_map]
 
@@ -126,7 +129,8 @@
     self.extended = extended
 
     if file_map_fn:
-      self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks)
+      self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
+                            allow_shared_blocks)
     else:
       self.file_map = {"__DATA": self.care_map}
 
@@ -173,40 +177,47 @@
     particular is not necessarily equal to the number of ranges in
     'ranges'.
 
-    This generator is stateful -- it depends on the open file object
-    contained in this SparseImage, so you should not try to run two
+    Use a lock to protect the generator so that we will not run two
     instances of this generator on the same object simultaneously."""
 
     f = self.simg_f
-    for s, e in ranges:
-      to_read = e-s
-      idx = bisect.bisect_right(self.offset_index, s) - 1
-      chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
-
-      # for the first chunk we may be starting partway through it.
-      remain = chunk_len - (s - chunk_start)
-      this_read = min(remain, to_read)
-      if filepos is not None:
-        p = filepos + ((s - chunk_start) * self.blocksize)
-        f.seek(p, os.SEEK_SET)
-        yield f.read(this_read * self.blocksize)
-      else:
-        yield fill_data * (this_read * (self.blocksize >> 2))
-      to_read -= this_read
-
-      while to_read > 0:
-        # continue with following chunks if this range spans multiple chunks.
-        idx += 1
+    with self.generator_lock:
+      for s, e in ranges:
+        to_read = e-s
+        idx = bisect.bisect_right(self.offset_index, s) - 1
         chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
-        this_read = min(chunk_len, to_read)
+
+        # for the first chunk we may be starting partway through it.
+        remain = chunk_len - (s - chunk_start)
+        this_read = min(remain, to_read)
         if filepos is not None:
-          f.seek(filepos, os.SEEK_SET)
+          p = filepos + ((s - chunk_start) * self.blocksize)
+          f.seek(p, os.SEEK_SET)
           yield f.read(this_read * self.blocksize)
         else:
           yield fill_data * (this_read * (self.blocksize >> 2))
         to_read -= this_read
 
-  def LoadFileBlockMap(self, fn, clobbered_blocks):
+        while to_read > 0:
+          # continue with following chunks if this range spans multiple chunks.
+          idx += 1
+          chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+          this_read = min(chunk_len, to_read)
+          if filepos is not None:
+            f.seek(filepos, os.SEEK_SET)
+            yield f.read(this_read * self.blocksize)
+          else:
+            yield fill_data * (this_read * (self.blocksize >> 2))
+          to_read -= this_read
+
+  def LoadFileBlockMap(self, fn, clobbered_blocks, allow_shared_blocks):
+    """Loads the given block map file.
+
+    Args:
+      fn: The filename of the block map file.
+      clobbered_blocks: A RangeSet instance for the clobbered blocks.
+      allow_shared_blocks: Whether having shared blocks is allowed.
+    """
     remaining = self.care_map
     self.file_map = out = {}
 
@@ -214,6 +225,18 @@
       for line in f:
         fn, ranges = line.split(None, 1)
         ranges = rangelib.RangeSet.parse(ranges)
+
+        if allow_shared_blocks:
+          # Find the shared blocks that have been claimed by others.
+          shared_blocks = ranges.subtract(remaining)
+          if shared_blocks:
+            ranges = ranges.subtract(shared_blocks)
+            if not ranges:
+              continue
+
+            # Tag the entry so that we can skip applying imgdiff on this file.
+            ranges.extra['uses_shared_blocks'] = True
+
         out[fn] = ranges
         assert ranges.size() == ranges.intersect(remaining).size()
 
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
new file mode 100644
index 0000000..9a0f78e
--- /dev/null
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -0,0 +1,339 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import os.path
+import unittest
+import zipfile
+
+import common
+import test_utils
+from add_img_to_target_files import (
+    AddCareMapTxtForAbOta, AddPackRadioImages, AddRadioImagesForAbOta,
+    GetCareMap)
+from rangelib import RangeSet
+
+
+OPTIONS = common.OPTIONS
+
+
+class AddImagesToTargetFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    OPTIONS.input_tmp = common.MakeTempDir()
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _create_images(images, prefix):
+    """Creates images under OPTIONS.input_tmp/prefix."""
+    path = os.path.join(OPTIONS.input_tmp, prefix)
+    if not os.path.exists(path):
+      os.mkdir(path)
+
+    for image in images:
+      image_path = os.path.join(path, image + '.img')
+      with open(image_path, 'wb') as image_fp:
+        image_fp.write(image.encode())
+
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    if not os.path.exists(images_path):
+      os.mkdir(images_path)
+    return images, images_path
+
+  def test_AddRadioImagesForAbOta_imageExists(self):
+    """Tests the case with existing images under IMAGES/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'IMAGES')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio(self):
+    """Tests the case that copies images from RADIO/."""
+    images, images_path = self._create_images(['aboot', 'xbl'], 'RADIO')
+    AddRadioImagesForAbOta(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddRadioImagesForAbOta_copyFromRadio_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddRadioImagesForAbOta(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddRadioImagesForAbOta_copyFromVendorImages(self):
+    """Tests the case that copies images from VENDOR_IMAGES/."""
+    vendor_images_path = os.path.join(OPTIONS.input_tmp, 'VENDOR_IMAGES')
+    os.mkdir(vendor_images_path)
+
+    partitions = ['aboot', 'xbl']
+    for index, partition in enumerate(partitions):
+      subdir = os.path.join(vendor_images_path, 'subdir-{}'.format(index))
+      os.mkdir(subdir)
+
+      partition_image_path = os.path.join(subdir, partition + '.img')
+      with open(partition_image_path, 'wb') as partition_fp:
+        partition_fp.write(partition.encode())
+
+    # Set up the output dir.
+    images_path = os.path.join(OPTIONS.input_tmp, 'IMAGES')
+    os.mkdir(images_path)
+
+    AddRadioImagesForAbOta(None, partitions)
+
+    for partition in partitions:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, partition + '.img')))
+
+  def test_AddRadioImagesForAbOta_missingImages(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+    self.assertRaises(AssertionError, AddRadioImagesForAbOta, None,
+                      images + ['baz'])
+
+  def test_AddRadioImagesForAbOta_missingImages_zipOutput(self):
+    images, _ = self._create_images(['aboot', 'xbl'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, AddRadioImagesForAbOta, output_zip,
+                        images + ['baz'])
+
+  def test_AddPackRadioImages(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_with_suffix(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+    images_with_suffix = [image + '.img' for image in images]
+    AddPackRadioImages(None, images_with_suffix)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_zipOutput(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Set up the output zip.
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddPackRadioImages(output_zip, images)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      for image in images:
+        self.assertIn('IMAGES/' + image + '.img', verify_zip.namelist())
+
+  def test_AddPackRadioImages_imageExists(self):
+    images, images_path = self._create_images(['foo', 'bar'], 'RADIO')
+
+    # Additionally create images under IMAGES/ so that they should be skipped.
+    images, images_path = self._create_images(['foo', 'bar'], 'IMAGES')
+
+    AddPackRadioImages(None, images)
+
+    for image in images:
+      self.assertTrue(
+          os.path.exists(os.path.join(images_path, image + '.img')))
+
+  def test_AddPackRadioImages_missingImages(self):
+    images, _ = self._create_images(['foo', 'bar'], 'RADIO')
+    AddPackRadioImages(None, images)
+
+    self.assertRaises(AssertionError, AddPackRadioImages, None,
+                      images + ['baz'])
+
+  @staticmethod
+  def _test_AddCareMapTxtForAbOta():
+    """Helper function to set up the test for test_AddCareMapTxtForAbOta()."""
+    OPTIONS.info_dict = {
+        'system_verity_block_device' : '/dev/block/system',
+        'vendor_verity_block_device' : '/dev/block/vendor',
+    }
+
+    # Prepare the META/ folder.
+    meta_path = os.path.join(OPTIONS.input_tmp, 'META')
+    if not os.path.exists(meta_path):
+      os.mkdir(meta_path)
+
+    system_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    vendor_image = test_utils.construct_sparse_image([
+        (0xCAC2, 10)])
+
+    image_paths = {
+        'system' : system_image,
+        'vendor' : vendor_image,
+    }
+    return image_paths
+
+  def test_AddCareMapTxtForAbOta(self):
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_withNonCareMapPartitions(self):
+    """Partitions without care_map should be ignored."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    AddCareMapTxtForAbOta(
+        None, ['boot', 'system', 'vendor', 'vbmeta'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_withAvb(self):
+    """Tests the case for device using AVB."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    OPTIONS.info_dict = {
+        'avb_system_hashtree_enable' : 'true',
+        'avb_vendor_hashtree_enable' : 'true',
+    }
+
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_verityNotEnabled(self):
+    """No care_map.txt should be generated if verity not enabled."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    OPTIONS.info_dict = {}
+    AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    self.assertFalse(os.path.exists(care_map_file))
+
+  def test_AddCareMapTxtForAbOta_missingImageFile(self):
+    """Missing image file should be considered fatal."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+    image_paths['vendor'] = ''
+    self.assertRaises(AssertionError, AddCareMapTxtForAbOta, None,
+                      ['system', 'vendor'], image_paths)
+
+  def test_AddCareMapTxtForAbOta_zipOutput(self):
+    """Tests the case with ZIP output."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+    with zipfile.ZipFile(output_file, 'r') as verify_zip:
+      care_map = verify_zip.read('META/care_map.txt').decode('ascii')
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+  def test_AddCareMapTxtForAbOta_zipOutput_careMapEntryExists(self):
+    """Tests the case with ZIP output which already has care_map entry."""
+    image_paths = self._test_AddCareMapTxtForAbOta()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      # Create an existing META/care_map.txt entry.
+      common.ZipWriteStr(output_zip, 'META/care_map.txt', 'dummy care_map.txt')
+
+      # Request to add META/care_map.txt again.
+      AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+    # The one under OPTIONS.input_tmp must have been replaced.
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+    with open(care_map_file, 'r') as verify_fp:
+      care_map = verify_fp.read()
+
+    lines = care_map.split('\n')
+    self.assertEqual(4, len(lines))
+    self.assertEqual('system', lines[0])
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+    self.assertEqual('vendor', lines[2])
+    self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+    # The existing entry should be scheduled to be replaced.
+    self.assertIn('META/care_map.txt', OPTIONS.replace_updated_files_list)
+
+  def test_GetCareMap(self):
+    sparse_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    OPTIONS.info_dict = {
+        'system_adjusted_partition_size' : 12,
+    }
+    name, care_map = GetCareMap('system', sparse_image)
+    self.assertEqual('system', name)
+    self.assertEqual(RangeSet("0-5 10-12").to_string_raw(), care_map)
+
+  def test_GetCareMap_invalidPartition(self):
+    self.assertRaises(AssertionError, GetCareMap, 'oem', None)
+
+  def test_GetCareMap_invalidAdjustedPartitionSize(self):
+    sparse_image = test_utils.construct_sparse_image([
+        (0xCAC1, 6),
+        (0xCAC3, 4),
+        (0xCAC1, 6)])
+    OPTIONS.info_dict = {
+        'system_adjusted_partition_size' : -12,
+    }
+    self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py
index 7084e21..ceada18 100644
--- a/tools/releasetools/test_blockimgdiff.py
+++ b/tools/releasetools/test_blockimgdiff.py
@@ -19,7 +19,8 @@
 import unittest
 
 import common
-from blockimgdiff import BlockImageDiff, EmptyImage, HeapItem, Transfer
+from blockimgdiff import (BlockImageDiff, EmptyImage, HeapItem, ImgdiffStats,
+                          Transfer)
 from rangelib import RangeSet
 
 
@@ -172,3 +173,102 @@
     # Insufficient cache to stash 15 blocks (size * 0.8 < 15).
     common.OPTIONS.cache_size = 15 * 4096
     self.assertEqual(15, block_image_diff.ReviseStashSize())
+
+  def test_FileTypeSupportedByImgdiff(self):
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/priv-app/Settings/Settings.apk"))
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/framework/am.jar"))
+    self.assertTrue(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/etc/security/otacerts.zip"))
+
+    self.assertFalse(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/framework/arm/boot.oat"))
+    self.assertFalse(
+        BlockImageDiff.FileTypeSupportedByImgdiff(
+            "/system/priv-app/notanapk"))
+
+  def test_CanUseImgdiff(self):
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage())
+    self.assertTrue(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app1.apk", RangeSet("10-15"), RangeSet("0-5")))
+    self.assertTrue(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app2.apk", RangeSet("20 25"), RangeSet("30-31"), True))
+
+    self.assertDictEqual(
+        {
+            ImgdiffStats.USED_IMGDIFF : {"/system/app/app1.apk"},
+            ImgdiffStats.USED_IMGDIFF_LARGE_APK : {"/vendor/app/app2.apk"},
+        },
+        block_image_diff.imgdiff_stats.stats)
+
+
+  def test_CanUseImgdiff_ineligible(self):
+    # Disabled by caller.
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage(),
+                                      disable_imgdiff=True)
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app1.apk", RangeSet("10-15"), RangeSet("0-5")))
+
+    # Unsupported file type.
+    block_image_diff = BlockImageDiff(EmptyImage(), EmptyImage())
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/bin/gzip", RangeSet("10-15"), RangeSet("0-5")))
+
+    # At least one of the ranges is in non-monotonic order.
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/system/app/app2.apk", RangeSet("10-15"),
+            RangeSet("15-20 30 10-14")))
+
+    # At least one of the ranges has been modified.
+    src_ranges = RangeSet("0-5")
+    src_ranges.extra['trimmed'] = True
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app3.apk", RangeSet("10-15"), src_ranges))
+
+    # At least one of the ranges is incomplete.
+    src_ranges = RangeSet("0-5")
+    src_ranges.extra['incomplete'] = True
+    self.assertFalse(
+        block_image_diff.CanUseImgdiff(
+            "/vendor/app/app4.apk", RangeSet("10-15"), src_ranges))
+
+    # The stats are correctly logged.
+    self.assertDictEqual(
+        {
+            ImgdiffStats.SKIPPED_NONMONOTONIC : {'/system/app/app2.apk'},
+            ImgdiffStats.SKIPPED_TRIMMED : {'/vendor/app/app3.apk'},
+            ImgdiffStats.SKIPPED_INCOMPLETE: {'/vendor/app/app4.apk'},
+        },
+        block_image_diff.imgdiff_stats.stats)
+
+
+class ImgdiffStatsTest(unittest.TestCase):
+
+  def test_Log(self):
+    imgdiff_stats = ImgdiffStats()
+    imgdiff_stats.Log("/system/app/app2.apk", ImgdiffStats.USED_IMGDIFF)
+    self.assertDictEqual(
+        {
+            ImgdiffStats.USED_IMGDIFF: {'/system/app/app2.apk'},
+        },
+        imgdiff_stats.stats)
+
+  def test_Log_invalidInputs(self):
+    imgdiff_stats = ImgdiffStats()
+
+    self.assertRaises(AssertionError, imgdiff_stats.Log, "/system/bin/gzip",
+                      ImgdiffStats.USED_IMGDIFF)
+
+    self.assertRaises(AssertionError, imgdiff_stats.Log, "/system/app/app1.apk",
+                      "invalid reason")
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 8fb4600..c073eba 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -13,7 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
 import os
+import subprocess
 import tempfile
 import time
 import unittest
@@ -21,7 +23,10 @@
 from hashlib import sha1
 
 import common
+import test_utils
 import validate_target_files
+from rangelib import RangeSet
+
 
 KiB = 1024
 MiB = 1024 * KiB
@@ -398,6 +403,9 @@
       'Compressed4.apk' : 'certs/compressed4',
   }
 
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+
   def tearDown(self):
     common.Cleanup()
 
@@ -474,6 +482,169 @@
     with zipfile.ZipFile(target_files, 'r') as input_zip:
       self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
 
+  def test_ExtractPublicKey(self):
+    cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
+    with open(pubkey, 'rb') as pubkey_fp:
+      self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
+
+  def test_ExtractPublicKey_invalidInput(self):
+    wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
+    self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+
+  def test_ParseCertificate(self):
+    cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+
+    cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
+    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    expected, _ = proc.communicate()
+    self.assertEqual(0, proc.returncode)
+
+    with open(cert) as cert_fp:
+      actual = common.ParseCertificate(cert_fp.read())
+    self.assertEqual(expected, actual)
+
+
+class CommonUtilsTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_GetSparseImage_emptyBlockMapFile(self):
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([
+              (0xCAC1, 6),
+              (0xCAC3, 3),
+              (0xCAC1, 4)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr('IMAGES/system.map', '')
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir, input_zip = common.UnzipTemp(target_files)
+    sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+    input_zip.close()
+
+    self.assertDictEqual(
+        {
+            '__COPY': RangeSet("0"),
+            '__NONZERO-0': RangeSet("1-5 9-12"),
+        },
+        sparse_image.file_map)
+
+  def test_GetSparseImage_invalidImageName(self):
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'system2', None, None, False)
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'unknown', None, None, False)
+
+  def test_GetSparseImage_missingBlockMapFile(self):
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([
+              (0xCAC1, 6),
+              (0xCAC3, 3),
+              (0xCAC1, 4)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir, input_zip = common.UnzipTemp(target_files)
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+        False)
+    input_zip.close()
+
+  def test_GetSparseImage_sharedBlocks_notAllowed(self):
+    """Tests the case of having overlapping blocks but disallowed."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      # Block 10 is shared between two files.
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 10-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir, input_zip = common.UnzipTemp(target_files)
+    self.assertRaises(
+        AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+        False)
+    input_zip.close()
+
+  def test_GetSparseImage_sharedBlocks_allowed(self):
+    """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      # Construct an image with a care_map of "0-5 9-12".
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      # Block 10 is shared between two files.
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 10-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir, input_zip = common.UnzipTemp(target_files)
+    sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
+    input_zip.close()
+
+    self.assertDictEqual(
+        {
+            '__COPY': RangeSet("0"),
+            '__NONZERO-0': RangeSet("6-8 13-15"),
+            '/system/file1': RangeSet("1-5 9-10"),
+            '/system/file2': RangeSet("11-12"),
+        },
+        sparse_image.file_map)
+
+    # '/system/file2' should be marked with 'uses_shared_blocks', but not with
+    # 'incomplete'.
+    self.assertTrue(
+        sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
+    self.assertNotIn(
+        'incomplete', sparse_image.file_map['/system/file2'].extra)
+
+    # All other entries should look normal without any tags.
+    self.assertFalse(sparse_image.file_map['__COPY'].extra)
+    self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
+    self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+
+  def test_GetSparseImage_incompleteRanges(self):
+    """Tests the case of ext4 images with holes."""
+    target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.write(
+          test_utils.construct_sparse_image([(0xCAC2, 16)]),
+          arcname='IMAGES/system.img')
+      target_files_zip.writestr(
+          'IMAGES/system.map',
+          '\n'.join([
+              '/system/file1 1-5 9-10',
+              '/system/file2 11-12']))
+      target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+      # '/system/file2' has less blocks listed (2) than actual (3).
+      target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+    tempdir, input_zip = common.UnzipTemp(target_files)
+    sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+    input_zip.close()
+
+    self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+    self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
+
 
 class InstallRecoveryScriptFormatTest(unittest.TestCase):
   """Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
new file mode 100644
index 0000000..ee5bc53
--- /dev/null
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -0,0 +1,829 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import copy
+import os
+import os.path
+import unittest
+import zipfile
+
+import common
+import test_utils
+from ota_from_target_files import (
+    _LoadOemDicts, BuildInfo, GetPackageMetadata,
+    GetTargetFilesZipForSecondaryImages,
+    GetTargetFilesZipWithoutPostinstallConfig,
+    Payload, PayloadSigner, POSTINSTALL_CONFIG,
+    WriteFingerprintAssertion)
+
+
+def construct_target_files(secondary=False):
+  """Returns a target-files.zip file for generating OTA packages."""
+  target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+  with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+    # META/update_engine_config.txt
+    target_files_zip.writestr(
+        'META/update_engine_config.txt',
+        "PAYLOAD_MAJOR_VERSION=2\nPAYLOAD_MINOR_VERSION=4\n")
+
+    # META/postinstall_config.txt
+    target_files_zip.writestr(
+        POSTINSTALL_CONFIG,
+        '\n'.join([
+            "RUN_POSTINSTALL_system=true",
+            "POSTINSTALL_PATH_system=system/bin/otapreopt_script",
+            "FILESYSTEM_TYPE_system=ext4",
+            "POSTINSTALL_OPTIONAL_system=true",
+        ]))
+
+    # META/ab_partitions.txt
+    ab_partitions = ['boot', 'system', 'vendor']
+    target_files_zip.writestr(
+        'META/ab_partitions.txt',
+        '\n'.join(ab_partitions))
+
+    # Create dummy images for each of them.
+    for partition in ab_partitions:
+      target_files_zip.writestr('IMAGES/' + partition + '.img',
+                                os.urandom(len(partition)))
+
+    if secondary:
+      target_files_zip.writestr('IMAGES/system_other.img',
+                                os.urandom(len("system_other")))
+
+  return target_files
+
+
+class MockScriptWriter(object):
+  """A class that mocks edify_generator.EdifyGenerator.
+
+  It simply pushes the incoming arguments onto script stack, which is to assert
+  the calls to EdifyGenerator functions.
+  """
+
+  def __init__(self):
+    self.script = []
+
+  def Mount(self, *args):
+    self.script.append(('Mount',) + args)
+
+  def AssertDevice(self, *args):
+    self.script.append(('AssertDevice',) + args)
+
+  def AssertOemProperty(self, *args):
+    self.script.append(('AssertOemProperty',) + args)
+
+  def AssertFingerprintOrThumbprint(self, *args):
+    self.script.append(('AssertFingerprintOrThumbprint',) + args)
+
+  def AssertSomeFingerprint(self, *args):
+    self.script.append(('AssertSomeFingerprint',) + args)
+
+  def AssertSomeThumbprint(self, *args):
+    self.script.append(('AssertSomeThumbprint',) + args)
+
+
+class BuildInfoTest(unittest.TestCase):
+
+  TEST_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.product.name' : 'product-name',
+          'ro.build.fingerprint' : 'build-fingerprint',
+          'ro.build.foo' : 'build-foo',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+  }
+
+  TEST_INFO_DICT_USES_OEM_PROPS = {
+      'build.prop' : {
+          'ro.product.name' : 'product-name',
+          'ro.build.thumbprint' : 'build-thumbprint',
+          'ro.build.bar' : 'build-bar',
+      },
+      'vendor.build.prop' : {
+          'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+      },
+      'property1' : 'value1',
+      'property2' : 4096,
+      'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+  }
+
+  TEST_OEM_DICTS = [
+      {
+          'ro.product.brand' : 'brand1',
+          'ro.product.device' : 'device1',
+      },
+      {
+          'ro.product.brand' : 'brand2',
+          'ro.product.device' : 'device2',
+      },
+      {
+          'ro.product.brand' : 'brand3',
+          'ro.product.device' : 'device3',
+      },
+  ]
+
+  def test_init(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('product-device', target_info.device)
+    self.assertEqual('build-fingerprint', target_info.fingerprint)
+    self.assertFalse(target_info.is_ab)
+    self.assertIsNone(target_info.oem_props)
+
+  def test_init_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('device1', target_info.device)
+    self.assertEqual('brand1/product-name/device1:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Swap the order in oem_dicts, which would lead to different BuildInfo.
+    oem_dicts = copy.copy(self.TEST_OEM_DICTS)
+    oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, oem_dicts)
+    self.assertEqual('device3', target_info.device)
+    self.assertEqual('brand3/product-name/device3:build-thumbprint',
+                     target_info.fingerprint)
+
+    # Missing oem_dict should be rejected.
+    self.assertRaises(AssertionError, BuildInfo,
+                      self.TEST_INFO_DICT_USES_OEM_PROPS, None)
+
+  def test___getitem__(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+
+  def test___getitem__with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info['property1'])
+    self.assertEqual(4096, target_info['property2'])
+    self.assertRaises(KeyError,
+                      lambda: target_info['build.prop']['ro.build.foo'])
+
+  def test_get(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
+
+  def test_get_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('value1', target_info.get('property1'))
+    self.assertEqual(4096, target_info.get('property2'))
+    self.assertEqual(4096, target_info.get('property2', 1024))
+    self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+    self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
+    self.assertRaises(KeyError,
+                      lambda: target_info.get('build.prop')['ro.build.foo'])
+
+  def test_GetBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
+    self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_GetVendorBuildProp_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    self.assertEqual('vendor-build-fingerprint',
+                     target_info.GetVendorBuildProp(
+                         'ro.vendor.build.fingerprint'))
+    self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+                      'ro.build.nonexistent')
+
+  def test_WriteMountOemScript(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteMountOemScript(script_writer)
+    self.assertEqual([('Mount', '/oem', None)], script_writer.script)
+
+  def test_WriteDeviceAssertions(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual([('AssertDevice', 'product-device')], script_writer.script)
+
+  def test_WriteDeviceAssertions_with_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    script_writer = MockScriptWriter()
+    target_info.WriteDeviceAssertions(script_writer, False)
+    self.assertEqual(
+        [
+            ('AssertOemProperty', 'ro.product.device',
+             ['device1', 'device2', 'device3'], False),
+            ('AssertOemProperty', 'ro.product.brand',
+             ['brand1', 'brand2', 'brand3'], False),
+        ],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_without_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+    source_info_dict['build.prop']['ro.build.fingerprint'] = (
+        'source-build-fingerprint')
+    source_info = BuildInfo(source_info_dict, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeFingerprint', 'source-build-fingerprint',
+          'build-fingerprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_source_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_target_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info = BuildInfo(self.TEST_INFO_DICT, None)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+          'build-thumbprint')],
+        script_writer.script)
+
+  def test_WriteFingerprintAssertion_with_both_oem_props(self):
+    target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                            self.TEST_OEM_DICTS)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+    source_info_dict['build.prop']['ro.build.thumbprint'] = (
+        'source-build-thumbprint')
+    source_info = BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+    script_writer = MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeThumbprint', 'build-thumbprint',
+          'source-build-thumbprint')],
+        script_writer.script)
+
+
+class LoadOemDictsTest(unittest.TestCase):
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def test_NoneDict(self):
+    self.assertIsNone(_LoadOemDicts(None))
+
+  def test_SingleDict(self):
+    dict_file = common.MakeTempFile()
+    with open(dict_file, 'w') as dict_fp:
+      dict_fp.write('abc=1\ndef=2\nxyz=foo\na.b.c=bar\n')
+
+    oem_dicts = _LoadOemDicts([dict_file])
+    self.assertEqual(1, len(oem_dicts))
+    self.assertEqual('foo', oem_dicts[0]['xyz'])
+    self.assertEqual('bar', oem_dicts[0]['a.b.c'])
+
+  def test_MultipleDicts(self):
+    oem_source = []
+    for i in range(3):
+      dict_file = common.MakeTempFile()
+      with open(dict_file, 'w') as dict_fp:
+        dict_fp.write(
+            'ro.build.index={}\ndef=2\nxyz=foo\na.b.c=bar\n'.format(i))
+      oem_source.append(dict_file)
+
+    oem_dicts = _LoadOemDicts(oem_source)
+    self.assertEqual(3, len(oem_dicts))
+    for i, oem_dict in enumerate(oem_dicts):
+      self.assertEqual('2', oem_dict['def'])
+      self.assertEqual('foo', oem_dict['xyz'])
+      self.assertEqual('bar', oem_dict['a.b.c'])
+      self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])
+
+
+class OtaFromTargetFilesTest(unittest.TestCase):
+
+  TEST_TARGET_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-target',
+          'ro.build.version.incremental' : 'build-version-incremental-target',
+          'ro.build.version.sdk' : '27',
+          'ro.build.version.security_patch' : '2017-12-01',
+          'ro.build.date.utc' : '1500000000',
+      },
+  }
+
+  TEST_SOURCE_INFO_DICT = {
+      'build.prop' : {
+          'ro.product.device' : 'product-device',
+          'ro.build.fingerprint' : 'build-fingerprint-source',
+          'ro.build.version.incremental' : 'build-version-incremental-source',
+          'ro.build.version.sdk' : '25',
+          'ro.build.version.security_patch' : '2016-12-01',
+          'ro.build.date.utc' : '1400000000',
+      },
+  }
+
+  def setUp(self):
+    # Reset the global options as in ota_from_target_files.py.
+    common.OPTIONS.incremental_source = None
+    common.OPTIONS.downgrade = False
+    common.OPTIONS.timestamp = False
+    common.OPTIONS.wipe_user_data = False
+
+  def test_GetPackageMetadata_abOta_full(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_abOta_incremental(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    target_info_dict['ab_update'] = 'true'
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'AB',
+            'ota-required-cache' : '0',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_full(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_nonAbOta_incremental(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info = BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
+    common.OPTIONS.incremental_source = ''
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_wipe(self):
+    target_info = BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000000',
+            'pre-device' : 'product-device',
+        },
+        metadata)
+
+  @staticmethod
+  def _test_GetPackageMetadata_swapBuildTimestamps(target_info, source_info):
+    (target_info['build.prop']['ro.build.date.utc'],
+     source_info['build.prop']['ro.build.date.utc']) = (
+         source_info['build.prop']['ro.build.date.utc'],
+         target_info['build.prop']['ro.build.date.utc'])
+
+  def test_GetPackageMetadata_unintentionalDowngradeDetected(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+                      source_info)
+
+  def test_GetPackageMetadata_downgrade(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.downgrade = True
+    common.OPTIONS.wipe_user_data = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-downgrade' : 'yes',
+            'ota-type' : 'BLOCK',
+            'ota-wipe' : 'yes',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetPackageMetadata_overrideTimestamp(self):
+    target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict = copy.deepcopy(self.TEST_SOURCE_INFO_DICT)
+    self._test_GetPackageMetadata_swapBuildTimestamps(
+        target_info_dict, source_info_dict)
+
+    target_info = BuildInfo(target_info_dict, None)
+    source_info = BuildInfo(source_info_dict, None)
+    common.OPTIONS.incremental_source = ''
+    common.OPTIONS.timestamp = True
+    metadata = GetPackageMetadata(target_info, source_info)
+    self.assertDictEqual(
+        {
+            'ota-type' : 'BLOCK',
+            'post-build' : 'build-fingerprint-target',
+            'post-build-incremental' : 'build-version-incremental-target',
+            'post-sdk-level' : '27',
+            'post-security-patch-level' : '2017-12-01',
+            'post-timestamp' : '1500000001',
+            'pre-device' : 'product-device',
+            'pre-build' : 'build-fingerprint-source',
+            'pre-build-incremental' : 'build-version-incremental-source',
+        },
+        metadata)
+
+  def test_GetTargetFilesZipForSecondaryImages(self):
+    input_file = construct_target_files(secondary=True)
+    target_file = GetTargetFilesZipForSecondaryImages(input_file)
+
+    with zipfile.ZipFile(target_file) as verify_zip:
+      namelist = verify_zip.namelist()
+
+    self.assertIn('META/ab_partitions.txt', namelist)
+    self.assertIn('IMAGES/boot.img', namelist)
+    self.assertIn('IMAGES/system.img', namelist)
+    self.assertIn('IMAGES/vendor.img', namelist)
+    self.assertIn(POSTINSTALL_CONFIG, namelist)
+
+    self.assertNotIn('IMAGES/system_other.img', namelist)
+    self.assertNotIn('IMAGES/system.map', namelist)
+
+  def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self):
+    input_file = construct_target_files(secondary=True)
+    target_file = GetTargetFilesZipForSecondaryImages(
+        input_file, skip_postinstall=True)
+
+    with zipfile.ZipFile(target_file) as verify_zip:
+      namelist = verify_zip.namelist()
+
+    self.assertIn('META/ab_partitions.txt', namelist)
+    self.assertIn('IMAGES/boot.img', namelist)
+    self.assertIn('IMAGES/system.img', namelist)
+    self.assertIn('IMAGES/vendor.img', namelist)
+
+    self.assertNotIn('IMAGES/system_other.img', namelist)
+    self.assertNotIn('IMAGES/system.map', namelist)
+    self.assertNotIn(POSTINSTALL_CONFIG, namelist)
+
+  def test_GetTargetFilesZipWithoutPostinstallConfig(self):
+    input_file = construct_target_files()
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+    with zipfile.ZipFile(target_file) as verify_zip:
+      self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+  def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self):
+    input_file = construct_target_files()
+    common.ZipDelete(input_file, POSTINSTALL_CONFIG)
+    target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+    with zipfile.ZipFile(target_file) as verify_zip:
+      self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+
+class PayloadSignerTest(unittest.TestCase):
+
+  SIGFILE = 'sigfile.bin'
+  SIGNED_SIGFILE = 'signed-sigfile.bin'
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = []
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  def _assertFilesEqual(self, file1, file2):
+    with open(file1, 'rb') as fp1, open(file2, 'rb') as fp2:
+      self.assertEqual(fp1.read(), fp2.read())
+
+  def test_init(self):
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withPassword(self):
+    common.OPTIONS.package_key = os.path.join(
+        self.testdata_dir, 'testkey_with_passwd')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : 'foo',
+    }
+    payload_signer = PayloadSigner()
+    self.assertEqual('openssl', payload_signer.signer)
+
+  def test_init_withExternalSigner(self):
+    common.OPTIONS.payload_signer = 'abc'
+    common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
+    payload_signer = PayloadSigner()
+    self.assertEqual('abc', payload_signer.signer)
+    self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
+
+  def test_Sign(self):
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_openssl(self):
+    """Uses openssl as the external payload signer."""
+    common.OPTIONS.payload_signer = 'openssl'
+    common.OPTIONS.payload_signer_args = [
+        'pkeyutl', '-sign', '-keyform', 'DER', '-inkey',
+        os.path.join(self.testdata_dir, 'testkey.pk8'),
+        '-pkeyopt', 'digest:sha256']
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+  def test_Sign_withExternalSigner_script(self):
+    """Uses testdata/payload_signer.sh as the external payload signer."""
+    common.OPTIONS.payload_signer = os.path.join(
+        self.testdata_dir, 'payload_signer.sh')
+    common.OPTIONS.payload_signer_args = [
+        os.path.join(self.testdata_dir, 'testkey.pk8')]
+    payload_signer = PayloadSigner()
+    input_file = os.path.join(self.testdata_dir, self.SIGFILE)
+    signed_file = payload_signer.Sign(input_file)
+
+    verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
+    self._assertFilesEqual(verify_file, signed_file)
+
+
+class PayloadTest(unittest.TestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.assertTrue(os.path.exists(self.testdata_dir))
+
+    common.OPTIONS.wipe_user_data = False
+    common.OPTIONS.payload_signer = None
+    common.OPTIONS.payload_signer_args = None
+    common.OPTIONS.package_key = os.path.join(self.testdata_dir, 'testkey')
+    common.OPTIONS.key_passwords = {
+        common.OPTIONS.package_key : None,
+    }
+
+  def tearDown(self):
+    common.Cleanup()
+
+  @staticmethod
+  def _create_payload_full(secondary=False):
+    target_file = construct_target_files(secondary)
+    payload = Payload(secondary)
+    payload.Generate(target_file)
+    return payload
+
+  @staticmethod
+  def _create_payload_incremental():
+    target_file = construct_target_files()
+    source_file = construct_target_files()
+    payload = Payload()
+    payload.Generate(target_file, source_file)
+    return payload
+
+  def test_Generate_full(self):
+    payload = self._create_payload_full()
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_incremental(self):
+    payload = self._create_payload_incremental()
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_additionalArgs(self):
+    target_file = construct_target_files()
+    source_file = construct_target_files()
+    payload = Payload()
+    # This should work the same as calling payload.Generate(target_file,
+    # source_file).
+    payload.Generate(
+        target_file, additional_args=["--source_image", source_file])
+    self.assertTrue(os.path.exists(payload.payload_file))
+
+  def test_Generate_invalidInput(self):
+    target_file = construct_target_files()
+    common.ZipDelete(target_file, 'IMAGES/vendor.img')
+    payload = Payload()
+    self.assertRaises(AssertionError, payload.Generate, target_file)
+
+  def test_Sign_full(self):
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    import check_ota_package_signature
+    check_ota_package_signature.VerifyAbOtaPayload(
+        os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+        output_file)
+
+  def test_Sign_incremental(self):
+    payload = self._create_payload_incremental()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    import check_ota_package_signature
+    check_ota_package_signature.VerifyAbOtaPayload(
+        os.path.join(self.testdata_dir, 'testkey.x509.pem'),
+        output_file)
+
+  def test_Sign_withDataWipe(self):
+    common.OPTIONS.wipe_user_data = True
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    with open(payload.payload_properties) as properties_fp:
+      self.assertIn("POWERWASH=1", properties_fp.read())
+
+  def test_Sign_secondary(self):
+    payload = self._create_payload_full(secondary=True)
+    payload.Sign(PayloadSigner())
+
+    with open(payload.payload_properties) as properties_fp:
+      self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
+
+  def test_Sign_badSigner(self):
+    """Tests that signing failure can be captured."""
+    payload = self._create_payload_full()
+    payload_signer = PayloadSigner()
+    payload_signer.signer_args.append('bad-option')
+    self.assertRaises(AssertionError, payload.Sign, payload_signer)
+
+  def test_WriteToZip(self):
+    payload = self._create_payload_full()
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    with zipfile.ZipFile(output_file) as verify_zip:
+      # First make sure we have the essential entries.
+      namelist = verify_zip.namelist()
+      self.assertIn(Payload.PAYLOAD_BIN, namelist)
+      self.assertIn(Payload.PAYLOAD_PROPERTIES_TXT, namelist)
+
+      # Then assert these entries are stored.
+      for entry_info in verify_zip.infolist():
+        if entry_info.filename not in (Payload.PAYLOAD_BIN,
+                                       Payload.PAYLOAD_PROPERTIES_TXT):
+          continue
+        self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
+
+  def test_WriteToZip_unsignedPayload(self):
+    """Unsigned payloads should not be allowed to be written to zip."""
+    payload = self._create_payload_full()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
+
+    # Also test with incremental payload.
+    payload = self._create_payload_incremental()
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
+
+  def test_WriteToZip_secondary(self):
+    payload = self._create_payload_full(secondary=True)
+    payload.Sign(PayloadSigner())
+
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output_file, 'w') as output_zip:
+      payload.WriteToZip(output_zip)
+
+    with zipfile.ZipFile(output_file) as verify_zip:
+      # First make sure we have the essential entries.
+      namelist = verify_zip.namelist()
+      self.assertIn(Payload.SECONDARY_PAYLOAD_BIN, namelist)
+      self.assertIn(Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
+
+      # Then assert these entries are stored.
+      for entry_info in verify_zip.infolist():
+        if entry_info.filename not in (
+            Payload.SECONDARY_PAYLOAD_BIN,
+            Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+          continue
+        self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 726d6b9..26f9e10 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -16,18 +16,27 @@
 
 from __future__ import print_function
 
-import tempfile
+import base64
+import os.path
 import unittest
 import zipfile
 
 import common
-from sign_target_files_apks import EditTags, ReplaceVerityKeyId, RewriteProps
+import test_utils
+from sign_target_files_apks import (
+    EditTags, ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
 
 
 class SignTargetFilesApksTest(unittest.TestCase):
 
+  MAC_PERMISSIONS_XML = """<?xml version="1.0" encoding="iso-8859-1"?>
+<policy>
+  <signer signature="{}"><seinfo value="platform"/></signer>
+  <signer signature="{}"><seinfo value="media"/></signer>
+</policy>"""
+
   def setUp(self):
-    self.tempdir = common.MakeTempDir()
+    self.testdata_dir = test_utils.get_testdata_dir()
 
   def tearDown(self):
     common.Cleanup()
@@ -88,94 +97,31 @@
         "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
         "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
         "buildvariant=userdebug "
-        "veritykeyid=id:485900563d272c46ae118605a47419ac09ca8c11\n")
+        "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
 
-    # From build/target/product/security/verity.x509.pem.
-    VERITY_CERTIFICATE1 = """-----BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
-"""
-
-    # From build/target/product/security/testkey.x509.pem.
-    VERITY_CERTIFICATE2 = """-----BEGIN CERTIFICATE-----
-MIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
-VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
-AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
-A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
-hvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waM
-qOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4
-wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy
-4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzU
-RNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97s
-zI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkw
-HQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZ
-AFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
-CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
-QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
-CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1Ud
-EwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZa
-J6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4Y
-LCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe
-+ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX
-31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwr
-sBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE0=
------END CERTIFICATE-----
-"""
-
-    input_file = tempfile.NamedTemporaryFile(
-        delete=False, suffix='.zip', dir=self.tempdir)
-    with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+    input_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'w') as input_zip:
       input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
 
     # Test with the first certificate.
-    cert_file = tempfile.NamedTemporaryFile(
-        delete=False, suffix='.x509.pem', dir=self.tempdir)
-    cert_file.write(VERITY_CERTIFICATE1)
-    cert_file.close()
+    cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
 
-    output_file = tempfile.NamedTemporaryFile(
-        delete=False, suffix='.zip', dir=self.tempdir)
-    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
-         zipfile.ZipFile(output_file.name, 'w') as output_zip:
-      ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
 
-    with zipfile.ZipFile(output_file.name) as output_zip:
+    with zipfile.ZipFile(output_file) as output_zip:
       self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline'))
 
     # Test with the second certificate.
-    with open(cert_file.name, 'w') as cert_file_fp:
-      cert_file_fp.write(VERITY_CERTIFICATE2)
+    cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
 
-    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
-         zipfile.ZipFile(output_file.name, 'w') as output_zip:
-      ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
+      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
 
-    with zipfile.ZipFile(output_file.name) as output_zip:
+    with zipfile.ZipFile(output_file) as output_zip:
       self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline'))
 
   def test_ReplaceVerityKeyId_no_veritykeyid(self):
@@ -184,16 +130,84 @@
         "lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
         "loop.max_part=7\n")
 
-    input_file = tempfile.NamedTemporaryFile(
-        delete=False, suffix='.zip', dir=self.tempdir)
-    with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+    input_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'w') as input_zip:
       input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
 
-    output_file = tempfile.NamedTemporaryFile(
-        delete=False, suffix='.zip', dir=self.tempdir)
-    with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
-         zipfile.ZipFile(output_file.name, 'w') as output_zip:
+    output_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(input_file, 'r') as input_zip, \
+         zipfile.ZipFile(output_file, 'w') as output_zip:
       ReplaceVerityKeyId(input_zip, output_zip, None)
 
-    with zipfile.ZipFile(output_file.name) as output_zip:
+    with zipfile.ZipFile(output_file) as output_zip:
       self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline'))
+
+  def test_ReplaceCerts(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+    cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    with open(cert3_path) as cert3_fp:
+      cert3 = cert3_fp.read()
+
+    # Replace cert1 with cert3.
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    output_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert3)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert3_path[:-9],
+    }
+
+    self.assertEqual(output_xml, ReplaceCerts(input_xml))
+
+  def test_ReplaceCerts_duplicateEntries(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+
+    # Replace cert1 with cert2, which leads to duplicate entries.
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert2_path[:-9],
+    }
+    self.assertRaises(AssertionError, ReplaceCerts, input_xml)
+
+  def test_ReplaceCerts_skipNonExistentCerts(self):
+    cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+    with open(cert1_path) as cert1_fp:
+      cert1 = cert1_fp.read()
+    cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+    with open(cert2_path) as cert2_fp:
+      cert2 = cert2_fp.read()
+    cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    with open(cert3_path) as cert3_fp:
+      cert3 = cert3_fp.read()
+
+    input_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert1)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    output_xml = self.MAC_PERMISSIONS_XML.format(
+        base64.b16encode(common.ParseCertificate(cert3)).lower(),
+        base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+    common.OPTIONS.key_map = {
+        cert1_path[:-9] : cert3_path[:-9],
+        'non-existent' : cert3_path[:-9],
+        cert2_path[:-9] : 'non-existent',
+    }
+    self.assertEqual(output_xml, ReplaceCerts(input_xml))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
new file mode 100644
index 0000000..e64355b
--- /dev/null
+++ b/tools/releasetools/test_utils.py
@@ -0,0 +1,96 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Utils for running unittests.
+"""
+
+import os
+import os.path
+import struct
+
+import common
+
+
+def get_testdata_dir():
+  """Returns the testdata dir, in relative to the script dir."""
+  # The script dir is the one we want, which could be different from pwd.
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  return os.path.join(current_dir, 'testdata')
+
+
+def construct_sparse_image(chunks):
+  """Returns a sparse image file constructed from the given chunks.
+
+  From system/core/libsparse/sparse_format.h.
+  typedef struct sparse_header {
+    __le32 magic;  // 0xed26ff3a
+    __le16 major_version;  // (0x1) - reject images with higher major versions
+    __le16 minor_version;  // (0x0) - allow images with higer minor versions
+    __le16 file_hdr_sz;  // 28 bytes for first revision of the file format
+    __le16 chunk_hdr_sz;  // 12 bytes for first revision of the file format
+    __le32 blk_sz;  // block size in bytes, must be a multiple of 4 (4096)
+    __le32 total_blks;  // total blocks in the non-sparse output image
+    __le32 total_chunks;  // total chunks in the sparse input image
+    __le32 image_checksum;  // CRC32 checksum of the original data, counting
+                            // "don't care" as 0. Standard 802.3 polynomial,
+                            // use a Public Domain table implementation
+  } sparse_header_t;
+
+  typedef struct chunk_header {
+    __le16 chunk_type;  // 0xCAC1 -> raw; 0xCAC2 -> fill;
+                        // 0xCAC3 -> don't care
+    __le16 reserved1;
+    __le32 chunk_sz;  // in blocks in output image
+    __le32 total_sz;  // in bytes of chunk input file including chunk header
+                      // and data
+  } chunk_header_t;
+
+  Args:
+    chunks: A list of chunks to be written. Each entry should be a tuple of
+        (chunk_type, block_number).
+
+  Returns:
+    Filename of the created sparse image.
+  """
+  SPARSE_HEADER_MAGIC = 0xED26FF3A
+  SPARSE_HEADER_FORMAT = "<I4H4I"
+  CHUNK_HEADER_FORMAT = "<2H2I"
+
+  sparse_image = common.MakeTempFile(prefix='sparse-', suffix='.img')
+  with open(sparse_image, 'wb') as fp:
+    fp.write(struct.pack(
+        SPARSE_HEADER_FORMAT, SPARSE_HEADER_MAGIC, 1, 0, 28, 12, 4096,
+        sum(chunk[1] for chunk in chunks),
+        len(chunks), 0))
+
+    for chunk in chunks:
+      data_size = 0
+      if chunk[0] == 0xCAC1:
+        data_size = 4096 * chunk[1]
+      elif chunk[0] == 0xCAC2:
+        data_size = 4
+      elif chunk[0] == 0xCAC3:
+        pass
+      else:
+        assert False, "Unsupported chunk type: {}".format(chunk[0])
+
+      fp.write(struct.pack(
+          CHUNK_HEADER_FORMAT, chunk[0], 0, chunk[1], data_size + 12))
+      if data_size != 0:
+        fp.write(os.urandom(data_size))
+
+  return sparse_image
diff --git a/tools/releasetools/testdata/media.x509.pem b/tools/releasetools/testdata/media.x509.pem
new file mode 100644
index 0000000..98cd443
--- /dev/null
+++ b/tools/releasetools/testdata/media.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJAPK5jmEjVyxOMA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMzQwNTdaFw0zNTA5MDEyMzQwNTdaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAK4lDFoW75f8KGmsZRsyF8w2ug6GlkFo1YoE
+n0DOhYZxI6P/tPbZScM88to6BcI+rKpX2AOImxdZvPWefG8hiQriUIW37VaqYmwJ
+ie+czTY2LKDo0blgP9TYModnkmzMCQxot3Wuf/MJNMw2nvKFWiZn3wxmf9DHz12O
+umVYBnNzA7tiRybquu37cvB+16dqs8uaOBxLfc2AmxQNiR8AITvkAfWNagamHq3D
+qcLxxlZyhbCa4JNCpm+kIer5Ot91c6AowzHXBgGrOvfMhAM+znx3KjpbhrDb6dd3
+w6SKqYAe3O4ngVifRNnkETl5YAV2qZQQuoEJElna2YxsaP94S48CAQOjgfwwgfkw
+HQYDVR0OBBYEFMopPKqLwO0+VC7vQgWiv/K1fk11MIHJBgNVHSMEgcEwgb6AFMop
+PKqLwO0+VC7vQgWiv/K1fk11oYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAPK5jmEjVyxOMAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAITelRbV5KhyF6c9qEhwSPUzc6X3
+M/OQ1hvfPMnlJRYlv8qnwxWcriddFyqa4eh21UWBJ6xUL2gpDdUQwAKdj1Hg7hVr
+e3tazbOUJBuOx4t05cQsXK+uFWyvW9GZojonUk2gct6743hGSlM2MLDk0P+34I7L
+cB+ttjecdEZ/bgDG7YiFlTgHkgOHVgB4csjjAHr0I6V6LKs6KChptkxLe9X8GH0K
+fiQVll1ark4Hpt91G0p16Xk8kYphK4HNC2KK7gFo3ETkexDTWTJghJ1q321yfcJE
+RMIh0/nsw2jK0HmZ8rgQW8HyDTjUEGbMFBHCV6lupDSfV0ZWVQfk6AIKGoE=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/payload_signer.sh b/tools/releasetools/testdata/payload_signer.sh
new file mode 100755
index 0000000..a44ef34
--- /dev/null
+++ b/tools/releasetools/testdata/payload_signer.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+# The script will be called with 'payload_signer.sh <key> -in <input> -out <output>'.
+openssl pkeyutl -sign -keyform DER -inkey $1 -pkeyopt digest:sha256 -in $3 -out $5
diff --git a/tools/releasetools/testdata/platform.x509.pem b/tools/releasetools/testdata/platform.x509.pem
new file mode 100644
index 0000000..087f02e
--- /dev/null
+++ b/tools/releasetools/testdata/platform.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJALOZgIbQVs/6MA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMjQwNTBaFw0zNTA5MDEyMjQwNTBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAJx4BZKsDV04HN6qZezIpgBuNkgMbXIHsSAR
+vlCGOqvitV0Amt9xRtbyICKAx81Ne9smJDuKgGwms0sTdSOkkmgiSQTcAUk+fArP
+GgXIdPabA3tgMJ2QdNJCgOFrrSqHNDYZUer3KkgtCbIEsYdeEqyYwap3PWgAuer9
+5W1Yvtjo2hb5o2AJnDeoNKbf7be2tEoEngeiafzPLFSW8s821k35CjuNjzSjuqtM
+9TNxqydxmzulh1StDFP8FOHbRdUeI0+76TybpO35zlQmE1DsU1YHv2mi/0qgfbX3
+6iANCabBtJ4hQC+J7RGQiTqrWpGA8VLoL4WkV1PPX8GQccXuyCcCAQOjgfwwgfkw
+HQYDVR0OBBYEFE/koLPdnLop9x1yh8Tnw48ghsKZMIHJBgNVHSMEgcEwgb6AFE/k
+oLPdnLop9x1yh8Tnw48ghsKZoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJALOZgIbQVs/6MAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAFclUbjZOh9z3g9tRp+G2tZwFAAp
+PIigzXzXeLc9r8wZf6t25iEuVsHHYc/EL9cz3lLFCuCIFM78CjtaGkNGBU2Cnx2C
+tCsgSL+ItdFJKe+F9g7dEtctVWV+IuPoXQTIMdYT0Zk4u4mCJH+jISVroS0dao+S
+6h2xw3Mxe6DAN/DRr/ZFrvIkl5+6bnoUvAJccbmBOM7z3fwFlhfPJIRc97QNY4L3
+J17XOElatuWTG5QhdlxJG3L7aOCA29tYwgKdNHyLMozkPvaosVUz7fvpib1qSN1L
+IC7alMarjdW4OZID2q4u1EYjLk/pvZYTlMYwDlE448/Shebk5INTjLixs1c=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/sigfile.bin b/tools/releasetools/testdata/sigfile.bin
new file mode 100644
index 0000000..8682216
--- /dev/null
+++ b/tools/releasetools/testdata/sigfile.bin
@@ -0,0 +1 @@
+ºQàÂÜ¢”¡½¨Gpø£Õùù°ÔÖ'[4KéL¡c
\ No newline at end of file
diff --git a/tools/releasetools/testdata/signed-sigfile.bin b/tools/releasetools/testdata/signed-sigfile.bin
new file mode 100644
index 0000000..86d2f9e
--- /dev/null
+++ b/tools/releasetools/testdata/signed-sigfile.bin
@@ -0,0 +1,2 @@
+R¡&‹EÿsÁ%ø?¹|¤œ&Í€ñzbSŠA[ßtqç†WKґl¦àÙÙås¥Ò~Fcæ	`ž¯¾Í#
+T{Ý×Û½F­ÒÁŸxƒø1‰6̋=Q°•ŒVæ^Tß°ØxX£¶/þ#©êI'ÜîtcLp““¬­ŸëovzђRá:õóWþ9(¹Á26Û̬ábÂBP1¸6ãnÒß±QÕC©gh;r‰²O}%Ľõˆáo6ã”d“ê´Éãå2Y`¦ÕÛ¼ª¥_R“OrCa,èI"n(`–ínñÜÐbaiö¹Å¨ÔäS„×Ê)kžO[`6c¬e
\ No newline at end of file
diff --git a/tools/releasetools/testdata/testkey.pk8 b/tools/releasetools/testdata/testkey.pk8
new file mode 100644
index 0000000..99be291
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey.pubkey.pem b/tools/releasetools/testdata/testkey.pubkey.pem
new file mode 100644
index 0000000..418ae60
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.pubkey.pem
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvjvyO2LwWgmQNyq7z+xK
+04eg0t3AL4y2NhpAAOzVnFyCArFcFjLTGQDDvkbZP6N12O6+dwJoPLntnm9A+VnP
+IFFRHg0HUWSbHM+Qk8Jgv2/2AVkAUj5J1r9t4X+2WI0eRzJP15Zjn68pQKGmcyci
+ry0gbvmYvXL2ZUmTm56DmEfCUCRIY2IGJ/CcMnFeItVU0LxKsV5Mlt5BO0Vv/CV4
+EaiOLwyCnoZuUhYto7dHlO/47v/H9zhkJC54OA1dkD38EPgO5GnfhGFSNXQRmJDT
+XrFgd6O+QO4yUNX8lYP10MzimUpItZa05t68NADqwYl3T7nWzvuC9r4IqZDyPf21
+TQIDAQAB
+-----END PUBLIC KEY-----
diff --git a/tools/releasetools/testdata/testkey.x509.pem b/tools/releasetools/testdata/testkey.x509.pem
new file mode 100644
index 0000000..65c8085
--- /dev/null
+++ b/tools/releasetools/testdata/testkey.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJAN/FvjYzGNOKMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDM0NTFaFw00NTA2MDUwMDM0NTFaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAL478jti8FoJkDcqu8/sStOHoNLdwC+MtjYa
+QADs1ZxcggKxXBYy0xkAw75G2T+jddjuvncCaDy57Z5vQPlZzyBRUR4NB1FkmxzP
+kJPCYL9v9gFZAFI+Sda/beF/tliNHkcyT9eWY5+vKUChpnMnIq8tIG75mL1y9mVJ
+k5ueg5hHwlAkSGNiBifwnDJxXiLVVNC8SrFeTJbeQTtFb/wleBGoji8Mgp6GblIW
+LaO3R5Tv+O7/x/c4ZCQueDgNXZA9/BD4DuRp34RhUjV0EZiQ016xYHejvkDuMlDV
+/JWD9dDM4plKSLWWtObevDQA6sGJd0+51s77gva+CKmQ8j39tU0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNJPJZDpq6tc/19Z2kxPA2bj9D6UMB8GA1UdIwQYMBaAFNJPJZDp
+q6tc/19Z2kxPA2bj9D6UMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBABSUG9qrwV3WcClDJwqkNLN4yeVVYzkRMGA8/XqOiYrW4zh0mKDLfr6OeU1C
+AKwZBLhhql59Po25r4gcwPiTN2DkoCfb3T59XG8J54PAgTQjIAZ3J+mGZplnmuD3
+wj+UGUpPe0qTr33ZPoJfwxVo4RVnOt/UCsIGXch0HS/BIdpechqP0w4rOHUbq6EA
+8UEi5irKSDOU9b/5rD/tX2f4nGwJlKQEHWrsj9LLKlaL7fX36ghoSxN/pBJOhedg
+/VjT6xbaEwfyhC6Zj9av5Xl7UdpYt+rBMroAGenz0OSxKhIphdcx4ZMhvfkBoYG9
+Crupdqe+kUsfg2RlPb5grQ3klMo=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/testkey_with_passwd.pk8 b/tools/releasetools/testdata/testkey_with_passwd.pk8
new file mode 100644
index 0000000..3d567de
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.pk8
Binary files differ
diff --git a/tools/releasetools/testdata/testkey_with_passwd.x509.pem b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
new file mode 100644
index 0000000..449396e
--- /dev/null
+++ b/tools/releasetools/testdata/testkey_with_passwd.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIJANefUd3Piu0yMA0GCSqGSIb3DQEBCwUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xODAxMTgwMDI3NDRaFw00NTA2MDUwMDI3NDRaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBALBoA4c+qCQKapQAVGclbousC5J/L0TNZJEd
+KSW2nzXUHIwgTQ3r82227xkIvjnqXMCsc0q3/N2gGKR4sHqA30JO9Dyfgsx1ISaR
+GXe5cG048m5U5snplQgvPovtah9ZyvwNPzWPYC3uceJaDxKQKwVdsV+mOWM6WmpQ
+bdLO37jxfytyAbzaz3sG5HA3FSB8rX/xDM6If18NsxSHpcjaOjZXC4Fg6wlp0klY
+5/qhFEdmieu2zQVelXjoJfKSku8tPa7kZeDU/F3uLUq/U/xvFk7NVsRV+QvYOdQK
+1QECc/3yv1TKNAN3huWTgzCX6bMHmi09Npw3MQaGY0oS34cH9x0CAwEAAaNTMFEw
+HQYDVR0OBBYEFNsJZ0n9Opeea0rVAzL+1jwkDKzPMB8GA1UdIwQYMBaAFNsJZ0n9
+Opeea0rVAzL+1jwkDKzPMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQAD
+ggEBAJ/bzIzA+NrYwPEv56XKf6Vuj81+M1rTHAsH9PqbOvJT7iM7aU7wAl6vmXAo
+DQtvKoOBMdIXprapwe0quHCQm7PGxg+RRegr+dcTSVJFv1plnODOBOEAVlEfFwuW
+Cz0USF2jrNq+4ciH5zPL1a31ONb1rMkxJXQ/tAi0x8m6tZz+jsbE0wO6qB80UmkA
+4WY2Tu/gnAvFpD8plkiU0EKwedBHAcaFFZkQp23MKsVZ3UBqsqzzfXDYV1Oa6rIy
+XIZpI2Gx75pvAb57T2ap/yl0DBEAu7Nmpll0GCsgeJVdy7tS4LNj96Quya3CHWQw
+WNTVuan0KZqwDIm4Xn1oHUFQ9vc=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/verity.x509.pem b/tools/releasetools/testdata/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/tools/releasetools/testdata/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 4b34820..1b3eb73 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -23,40 +23,23 @@
    same check also applies to the vendor image if present.
 """
 
-import common
 import logging
 import os.path
 import re
-import sparse_img
 import sys
 
-
-def _GetImage(which, tmpdir):
-  assert which in ('system', 'vendor')
-
-  path = os.path.join(tmpdir, 'IMAGES', which + '.img')
-  mappath = os.path.join(tmpdir, 'IMAGES', which + '.map')
-
-  # Map file must exist (allowed to be empty).
-  assert os.path.exists(path) and os.path.exists(mappath)
-
-  clobbered_blocks = '0'
-  return sparse_img.SparseImage(path, mappath, clobbered_blocks)
+import common
 
 
 def _ReadFile(file_name, unpacked_name, round_up=False):
   """Constructs and returns a File object. Rounds up its size if needed."""
 
-  def RoundUpTo4K(value):
-    rounded_up = value + 4095
-    return rounded_up - (rounded_up % 4096)
-
   assert os.path.exists(unpacked_name)
   with open(unpacked_name, 'r') as f:
     file_data = f.read()
   file_size = len(file_data)
   if round_up:
-    file_size_rounded_up = RoundUpTo4K(file_size)
+    file_size_rounded_up = common.RoundUpTo4K(file_size)
     file_data += '\0' * (file_size_rounded_up - file_size)
   return common.File(file_name, file_data)
 
@@ -64,13 +47,13 @@
 def ValidateFileAgainstSha1(input_tmp, file_name, file_path, expected_sha1):
   """Check if the file has the expected SHA-1."""
 
-  logging.info('Validating the SHA-1 of {}'.format(file_name))
+  logging.info('Validating the SHA-1 of %s', file_name)
   unpacked_name = os.path.join(input_tmp, file_path)
   assert os.path.exists(unpacked_name)
   actual_sha1 = _ReadFile(file_name, unpacked_name, False).sha1
   assert actual_sha1 == expected_sha1, \
       'SHA-1 mismatches for {}. actual {}, expected {}'.format(
-      file_name, actual_sha1, expected_sha1)
+          file_name, actual_sha1, expected_sha1)
 
 
 def ValidateFileConsistency(input_zip, input_tmp):
@@ -78,33 +61,28 @@
 
   def CheckAllFiles(which):
     logging.info('Checking %s image.', which)
-    image = _GetImage(which, input_tmp)
+    image = common.GetSparseImage(which, input_tmp, input_zip)
     prefix = '/' + which
     for entry in image.file_map:
+      # Skip entries like '__NONZERO-0'.
       if not entry.startswith(prefix):
         continue
 
       # Read the blocks that the file resides. Note that it will contain the
       # bytes past the file length, which is expected to be padded with '\0's.
       ranges = image.file_map[entry]
+
+      incomplete = ranges.extra.get('incomplete', False)
+      if incomplete:
+        logging.warning('Skipping %s that has incomplete block list', entry)
+        continue
+
       blocks_sha1 = image.RangeSha1(ranges)
 
       # The filename under unpacked directory, such as SYSTEM/bin/sh.
       unpacked_name = os.path.join(
           input_tmp, which.upper(), entry[(len(prefix) + 1):])
       unpacked_file = _ReadFile(entry, unpacked_name, True)
-      file_size = unpacked_file.size
-
-      # block.map may contain less blocks, because mke2fs may skip allocating
-      # blocks if they contain all zeros. We can't reconstruct such a file from
-      # its block list. (Bug: 65213616)
-      if file_size > ranges.size() * 4096:
-        logging.warning(
-            'Skipping %s that has less blocks: file size %d-byte,'
-            ' ranges %s (%d-byte)', entry, file_size, ranges,
-            ranges.size() * 4096)
-        continue
-
       file_sha1 = unpacked_file.sha1
       assert blocks_sha1 == file_sha1, \
           'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
@@ -147,10 +125,10 @@
 
   script_path = 'SYSTEM/bin/install-recovery.sh'
   if not os.path.exists(os.path.join(input_tmp, script_path)):
-    logging.info('{} does not exist in input_tmp'.format(script_path))
+    logging.info('%s does not exist in input_tmp', script_path)
     return
 
-  logging.info('Checking {}'.format(script_path))
+  logging.info('Checking %s', script_path)
   with open(os.path.join(input_tmp, script_path), 'r') as script:
     lines = script.read().strip().split('\n')
   assert len(lines) >= 6
@@ -168,7 +146,7 @@
     expected_recovery_sha1 = applypatch_argv[3].strip()
     assert expected_recovery_check_sha1 == expected_recovery_sha1
     ValidateFileAgainstSha1(input_tmp, 'recovery.img',
-        'SYSTEM/etc/recovery.img', expected_recovery_sha1)
+                            'SYSTEM/etc/recovery.img', expected_recovery_sha1)
   else:
     # We're patching boot.img to get recovery.img where bonus_args is optional
     if applypatch_argv[1] == "-b":
@@ -182,16 +160,17 @@
     boot_info = applypatch_argv[boot_info_index].strip().split(':')
     assert len(boot_info) == 4
     ValidateFileAgainstSha1(input_tmp, file_name='boot.img',
-        file_path='IMAGES/boot.img', expected_sha1=boot_info[3])
+                            file_path='IMAGES/boot.img',
+                            expected_sha1=boot_info[3])
 
     recovery_sha1_index = boot_info_index + 2
     expected_recovery_sha1 = applypatch_argv[recovery_sha1_index]
     assert expected_recovery_check_sha1 == expected_recovery_sha1
     ValidateFileAgainstSha1(input_tmp, file_name='recovery.img',
-        file_path='IMAGES/recovery.img',
-        expected_sha1=expected_recovery_sha1)
+                            file_path='IMAGES/recovery.img',
+                            expected_sha1=expected_recovery_sha1)
 
-  logging.info('Done checking {}'.format(script_path))
+  logging.info('Done checking %s', script_path)
 
 
 def main(argv):
diff --git a/tools/warn.py b/tools/warn.py
index 62feac3..01398be 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -509,6 +509,26 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Fields that can be null should be annotated @Nullable',
+     'patterns': [r".*: warning: \[FieldMissingNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Method parameters that aren\'t checked for null shouldn\'t be annotated @Nullable',
+     'patterns': [r".*: warning: \[ParameterNotNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Methods that can return null should be annotated @Nullable',
+     'patterns': [r".*: warning: \[ReturnMissingNullable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Use parameter comments to document ambiguous literals',
+     'patterns': [r".*: warning: \[BooleanParameter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Field name is CONSTANT CASE, but field is not static and final',
      'patterns': [r".*: warning: \[ConstantField\] .+"]},
     {'category': 'java',
@@ -519,11 +539,21 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Use Java\'s utility functional interfaces instead of Function\u003cA, B> for primitive types.',
+     'patterns': [r".*: warning: \[LambdaFunctionalInterface\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Prefer \'L\' to \'l\' for the suffix to long literals',
      'patterns': [r".*: warning: \[LongLiteralLowerCaseSuffix\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: A private method that does not reference the enclosing instance can be static',
+     'patterns': [r".*: warning: \[MethodCanBeStatic\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: C-style array declarations should not be used',
      'patterns': [r".*: warning: \[MixedArrayDimensions\] .+"]},
     {'category': 'java',
@@ -539,11 +569,21 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Avoid having multiple unary operators acting on the same variable in a method call',
+     'patterns': [r".*: warning: \[MultipleUnaryOperatorsInMethodCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Package names should match the directory they are declared in',
      'patterns': [r".*: warning: \[PackageLocation\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Non-standard parameter comment; prefer `/*paramName=*/ arg`',
+     'patterns': [r".*: warning: \[ParameterComment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Utility classes (only static members) are not designed to be instantiated and should be made noninstantiable with a default constructor.',
      'patterns': [r".*: warning: \[PrivateConstructorForUtilityClass\] .+"]},
     {'category': 'java',
@@ -554,11 +594,31 @@
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: The default case of a switch should appear at the end of the last statement group',
+     'patterns': [r".*: warning: \[SwitchDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Unchecked exceptions do not need to be declared in the method signature.',
      'patterns': [r".*: warning: \[ThrowsUncheckedException\] .+"]},
     {'category': 'java',
      'severity': Severity.LOW,
      'description':
+         'Java: Type parameters must be a single letter with an optional numeric suffix, or an UpperCamelCase name followed by the letter \'T\'.',
+     'patterns': [r".*: warning: \[TypeParameterNaming\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Constructors and methods with the same name should appear sequentially with no other code in between',
+     'patterns': [r".*: warning: \[UngroupedOverloads\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Unnecessary call to NullPointerTester#setDefault',
+     'patterns': [r".*: warning: \[UnnecessarySetDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
          'Java: Using static imports for types is unnecessary',
      'patterns': [r".*: warning: \[UnnecessaryStaticImport\] .+"]},
     {'category': 'java',
@@ -567,6 +627,61 @@
          'Java: Wildcard imports, static or otherwise, should not be used',
      'patterns': [r".*: warning: \[WildcardImport\] .+"]},
     {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: ',
+     'patterns': [r".*: warning: \[RemoveFieldPrefixes\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Prefer assertThrows to ExpectedException',
+     'patterns': [r".*: warning: \[ExpectedExceptionMigration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Logger instances are not constants -- they are mutable and have side effects -- and should not be named using CONSTANT CASE',
+     'patterns': [r".*: warning: \[LoggerVariableCase\] .+"]},
+    {'category': 'java',
+     'severity': Severity.LOW,
+     'description':
+         'Java: Prefer assertThrows to @Test(expected=...)',
+     'patterns': [r".*: warning: \[TestExceptionMigration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Public fields must be final.',
+     'patterns': [r".*: warning: \[NonFinalPublicFields\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Private fields that are only assigned in the initializer should be made final.',
+     'patterns': [r".*: warning: \[PrivateFieldsNotAssigned\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Lists returned by methods should be immutable.',
+     'patterns': [r".*: warning: \[ReturnedListNotImmutable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Parameters to log methods should not be generated by a call to String.format() or MessageFormat.format().',
+     'patterns': [r".*: warning: \[SaferLoggerFormat\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Parameters to log methods should not be generated by a call to toString(); see b/22986665.',
+     'patterns': [r".*: warning: \[SaferLoggerToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: A call to Binder.clearCallingIdentity() should be followed by Binder.restoreCallingIdentity() in a finally block. Otherwise the wrong Binder identity may be used by subsequent code.',
+     'patterns': [r".*: warning: \[BinderIdentityRestoredDangerously\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Classes extending PreferenceActivity must implement isValidFragment such that it does not unconditionally return true to prevent vulnerability to fragment injection attacks.',
+     'patterns': [r".*: warning: \[FragmentInjection\] .+"]},
+    {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
          'Java: Subclasses of Fragment must be instantiable via Class#newInstance(): the class must be public, static and have a public nullary constructor',
@@ -579,6 +694,26 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: A wakelock acquired with a timeout may be released by the system before calling `release`, even after checking `isHeld()`. If so, it will throw a RuntimeException. Please wrap in a try/catch block.',
+     'patterns': [r".*: warning: \[WakelockReleasedDangerously\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Arguments are in the wrong order or could be commented for clarity.',
+     'patterns': [r".*: warning: \[ArgumentSelectionDefectChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Arguments are swapped in assertEquals-like call',
+     'patterns': [r".*: warning: \[AssertEqualsArgumentOrderChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: An equality test between objects with incompatible types always returns false',
+     'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: @AssistedInject and @Inject should not be used on different constructors in the same class.',
      'patterns': [r".*: warning: \[AssistedInjectAndInjectOnConstructors\] .+"]},
     {'category': 'java',
@@ -604,11 +739,21 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: The ordering of parameters in overloaded methods should be as consistent as possible (when viewed from left to right)',
+     'patterns': [r".*: warning: \[InconsistentOverloads\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Double-checked locking on non-volatile fields is unsafe',
      'patterns': [r".*: warning: \[DoubleCheckedLocking\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Annotations should always be immutable',
+     'patterns': [r".*: warning: \[ImmutableAnnotationChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Enums should always be immutable',
      'patterns': [r".*: warning: \[ImmutableEnumChecker\] .+"]},
     {'category': 'java',
@@ -629,13 +774,13 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: A different potential argument is more similar to the name of the parameter than the existing argument; this may be an error',
-     'patterns': [r".*: warning: \[ArgumentParameterMismatch\] .+"]},
+         'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
+     'patterns': [r".*: warning: \[AssertFalse\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
-     'patterns': [r".*: warning: \[AssertFalse\] .+"]},
+         'Java: This assertion throws an AssertionError if it fails, which will be caught by an enclosing try block.',
+     'patterns': [r".*: warning: \[AssertionFailureIgnored\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -664,23 +809,63 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Duration can be expressed more clearly with different units',
+     'patterns': [r".*: warning: \[CanonicalDuration\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logging or rethrowing exceptions should usually be preferred to catching and calling printStackTrace',
+     'patterns': [r".*: warning: \[CatchAndPrintStackTrace\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Ignoring exceptions and calling fail() is unnecessary, and makes test output less useful',
+     'patterns': [r".*: warning: \[CatchFail\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Inner class is non-static but does not reference enclosing class',
      'patterns': [r".*: warning: \[ClassCanBeStatic\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Class.newInstance() bypasses exception checking; prefer getConstructor().newInstance()',
+         'Java: Class.newInstance() bypasses exception checking; prefer getDeclaredConstructor().newInstance()',
      'patterns': [r".*: warning: \[ClassNewInstance\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Implicit use of the platform default charset, which can result in e.g. non-ASCII characters being silently replaced with \'?\' in many environments',
-     'patterns': [r".*: warning: \[DefaultCharset\] .+"]},
+         'Java: The type of the array parameter of Collection.toArray needs to be compatible with the array type',
+     'patterns': [r".*: warning: \[CollectionToArraySafeParameter\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: This code, which counts elements using a loop, can be replaced by a simpler library method',
-     'patterns': [r".*: warning: \[ElementsCountedInLoop\] .+"]},
+         'Java: Collector.of() should not use state',
+     'patterns': [r".*: warning: \[CollectorShouldNotUseState\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Class should not implement both `Comparable` and `Comparator`',
+     'patterns': [r".*: warning: \[ComparableAndComparator\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constructors should not invoke overridable methods.',
+     'patterns': [r".*: warning: \[ConstructorInvokesOverridable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constructors should not pass the \'this\' reference out in method invocations, since the object may not be fully constructed.',
+     'patterns': [r".*: warning: \[ConstructorLeaksThis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: DateFormat is not thread-safe, and should not be used as a constant field.',
+     'patterns': [r".*: warning: \[DateFormatConstant\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Implicit use of the platform default charset, which can result in differing behavior between JVM executions or incorrect behavior if the encoding of the data source doesn\'t match expectations.',
+     'patterns': [r".*: warning: \[DefaultCharset\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -694,8 +879,13 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: An equality test between objects with incompatible types always returns false',
-     'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+         'Java: Calls to ExpectedException#expect should always be followed by exactly one statement.',
+     'patterns': [r".*: warning: \[ExpectedExceptionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switch case may fall through',
+     'patterns': [r".*: warning: \[FallThrough\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -704,26 +894,61 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Use parentheses to make the precedence explicit',
+     'patterns': [r".*: warning: \[FloatCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Floating point literal loses precision',
+     'patterns': [r".*: warning: \[FloatingPointLiteralPrecision\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Overloads will be ambiguous when passing lambda arguments',
      'patterns': [r".*: warning: \[FunctionalInterfaceClash\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Return value of methods returning Future must be checked. Ignoring returned Futures suppresses exceptions thrown from the code that completes the Future.',
+     'patterns': [r".*: warning: \[FutureReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Calling getClass() on an enum may return a subclass of the enum type',
      'patterns': [r".*: warning: \[GetClassOnEnum\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Hiding fields of superclasses may cause confusion and errors',
+     'patterns': [r".*: warning: \[HidingField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: This annotation has incompatible modifiers as specified by its @IncompatibleModifiers annotation',
      'patterns': [r".*: warning: \[IncompatibleModifiers\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: This for loop increments the same variable in the header and in the body',
+     'patterns': [r".*: warning: \[IncrementInForLoopAndHeader\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Please also override int read(byte[], int, int), otherwise multi-byte reads from this input stream are likely to be slow.',
      'patterns': [r".*: warning: \[InputStreamSlowMultibyteRead\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Casting inside an if block should be plausibly consistent with the instanceof type',
+     'patterns': [r".*: warning: \[InstanceOfAndCastMatchWrongType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Expression of type int may overflow before being assigned to a long',
+     'patterns': [r".*: warning: \[IntLongMath\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Class should not implement both `Iterable` and `Iterator`',
      'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
     {'category': 'java',
@@ -734,16 +959,41 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Some JUnit4 construct cannot be used in a JUnit3 context. Convert your class to JUnit4 style to use them.',
+     'patterns': [r".*: warning: \[JUnit4ClassUsedInJUnit3\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Test class inherits from JUnit 3\'s TestCase but has JUnit 4 @Test annotations.',
      'patterns': [r".*: warning: \[JUnitAmbiguousTestClass\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: The Google Java Style Guide requires switch statements to have an explicit default',
+         'Java: Never reuse class names from java.lang',
+     'patterns': [r".*: warning: \[JavaLangClash\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Suggests alternatives to obsolete JDK classes.',
+     'patterns': [r".*: warning: \[JdkObsolete\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Assignment where a boolean expression was expected; use == if this assignment wasn\'t expected or add parentheses for clarity.',
+     'patterns': [r".*: warning: \[LogicalAssignment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switches on enum types should either handle all values, or have a default case.',
      'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: The Google Java Style Guide requires that each switch statement includes a default statement group, even if it contains no code. (This requirement is lifted for any switch statement that covers all values of an enum.)',
+     'patterns': [r".*: warning: \[MissingDefault\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Not calling fail() when expecting an exception masks bugs',
      'patterns': [r".*: warning: \[MissingFail\] .+"]},
     {'category': 'java',
@@ -754,11 +1004,36 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: Compound assignments to bytes, shorts, chars, and floats hide dangerous casts',
+         'Java: Modifying a collection while iterating over it in a loop may cause a ConcurrentModificationException to be thrown.',
+     'patterns': [r".*: warning: \[ModifyCollectionInEnhancedForLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Multiple calls to either parallel or sequential are unnecessary and cause confusion.',
+     'patterns': [r".*: warning: \[MultipleParallelOrSequentialCalls\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Constant field declarations should use the immutable type (such as ImmutableList) instead of the general collection interface type (such as List)',
+     'patterns': [r".*: warning: \[MutableConstantField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Method return type should use the immutable type (such as ImmutableList) instead of the general collection interface type (such as List)',
+     'patterns': [r".*: warning: \[MutableMethodReturnType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Compound assignments may hide dangerous casts',
      'patterns': [r".*: warning: \[NarrowingCompoundAssignment\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Nested instanceOf conditions of disjoint types create blocks of code that never execute',
+     'patterns': [r".*: warning: \[NestedInstanceOfConditions\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: This update of a volatile variable is non-atomic',
      'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
     {'category': 'java',
@@ -794,6 +1069,31 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: One should not call optional.get() inside an if statement that checks !optional.isPresent',
+     'patterns': [r".*: warning: \[OptionalNotPresent\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: String literal contains format specifiers, but is not passed to a format method',
+     'patterns': [r".*: warning: \[OrphanedFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: To return a custom message with a Throwable class, one should override getMessage() instead of toString() for Throwable.',
+     'patterns': [r".*: warning: \[OverrideThrowableToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Varargs doesn\'t agree for overridden method',
+     'patterns': [r".*: warning: \[Overrides\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Detects `/* name= */`-style comments on actual parameters where the name doesn\'t match the formal parameter',
+     'patterns': [r".*: warning: \[ParameterName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Preconditions only accepts the %s placeholder in error message strings',
      'patterns': [r".*: warning: \[PreconditionsInvalidPlaceholder\] .+"]},
     {'category': 'java',
@@ -809,6 +1109,16 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: BugChecker has incorrect ProvidesFix tag, please update',
+     'patterns': [r".*: warning: \[ProvidesFix\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: reachabilityFence should always be called inside a finally block',
+     'patterns': [r".*: warning: \[ReachabilityFenceUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Thrown exception is a subtype of another',
      'patterns': [r".*: warning: \[RedundantThrows\] .+"]},
     {'category': 'java',
@@ -824,8 +1134,18 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: A static variable or method should not be accessed from an object instance',
-     'patterns': [r".*: warning: \[StaticAccessedFromInstance\] .+"]},
+         'Java: Prefer the short-circuiting boolean operators \u0026\u0026 and || to \u0026 and |.',
+     'patterns': [r".*: warning: \[ShortCircuitBoolean\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: A static variable or method should be qualified with a class name, not expression',
+     'patterns': [r".*: warning: \[StaticQualifiedUsingExpression\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Streams that encapsulate a closeable resource should be closed using try-with-resources',
+     'patterns': [r".*: warning: \[StreamResourceLeak\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -834,13 +1154,43 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: String.split should never take only a single argument; it has surprising behavior',
+     'patterns': [r".*: warning: \[StringSplit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Prefer Splitter to String.split',
+     'patterns': [r".*: warning: \[StringSplitter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Using @Test(expected=...) is discouraged, since the test will pass if *any* statement in the test method throws the expected exception',
+     'patterns': [r".*: warning: \[TestExceptionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Thread.join needs to be surrounded by a loop until it succeeds, as in Uninterruptibles.joinUninterruptibly.',
+     'patterns': [r".*: warning: \[ThreadJoinLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: ThreadLocals should be stored in static fields',
+     'patterns': [r".*: warning: \[ThreadLocalUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Three-letter time zone identifiers are deprecated, may be ambiguous, and might not do what you intend; the full IANA time zone ID should be used instead.',
+     'patterns': [r".*: warning: \[ThreeLetterTimeZoneID\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Truth Library assert is called on a constant.',
      'patterns': [r".*: warning: \[TruthConstantAsserts\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
-         'Java: An object is tested for equality to itself using Truth Libraries.',
-     'patterns': [r".*: warning: \[TruthSelfEquals\] .+"]},
+         'Java: Type parameter declaration overrides another type parameter already declared',
+     'patterns': [r".*: warning: \[TypeParameterShadowing\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
@@ -849,11 +1199,31 @@
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Creation of a Set/HashSet/HashMap of java.net.URL. equals() and hashCode() of java.net.URL class make blocking internet connections.',
+     'patterns': [r".*: warning: \[URLEqualsHashCode\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Switch handles all enum values; an explicit default case is unnecessary and defeats error checking for non-exhaustive switches.',
+     'patterns': [r".*: warning: \[UnnecessaryDefaultInEnumSwitch\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Finalizer may run before native code finishes execution',
+     'patterns': [r".*: warning: \[UnsafeFinalization\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Unsynchronized method overrides a synchronized method.',
      'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
     {'category': 'java',
      'severity': Severity.MEDIUM,
      'description':
+         'Java: Java assert is used in test. For testing purposes Assert.* matchers should be used.',
+     'patterns': [r".*: warning: \[UseCorrectAssertInTests\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
          'Java: Non-constant variable missing @Var annotation',
      'patterns': [r".*: warning: \[Var\] .+"]},
     {'category': 'java',
@@ -862,6 +1232,151 @@
          'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
      'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
     {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Pluggable Type checker internal error',
+     'patterns': [r".*: warning: \[PluggableTypeChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Invalid message format-style format specifier ({0}), expected printf-style (%s)',
+     'patterns': [r".*: warning: \[FloggerMessageFormat\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logger level check is already implied in the log() call. An explicit at[Level]().isEnabled() check is redundant.',
+     'patterns': [r".*: warning: \[FloggerRedundantIsEnabled\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Calling withCause(Throwable) with an inline allocated Throwable is discouraged. Consider using withStackTrace(StackSize) instead, and specifying a reduced stack size (e.g. SMALL, MEDIUM or LARGE) instead of FULL, to improve performance.',
+     'patterns': [r".*: warning: \[FloggerWithCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Use withCause to associate Exceptions with log statements',
+     'patterns': [r".*: warning: \[FloggerWithoutCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: No bug exists to track an ignored test',
+     'patterns': [r".*: warning: \[IgnoredTestWithoutBug\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: @Ignore is preferred to @Suppress for JUnit4 tests. @Suppress may silently fail in JUnit4 (that is, tests may run anyway.)',
+     'patterns': [r".*: warning: \[JUnit4SuppressWithoutIgnore\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Medium and large test classes should document why they are medium or large',
+     'patterns': [r".*: warning: \[JUnit4TestAttributeMissing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: java.net.IDN implements the older IDNA2003 standard. Prefer com.google.i18n.Idn, which implements the newer UTS #46 standard',
+     'patterns': [r".*: warning: \[JavaNetIdn\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Consider requiring strict parsing on JodaDurationFlag instances. Before adjusting existing flags, check the documentation and your existing configuration to avoid crashes!',
+     'patterns': [r".*: warning: \[JodaDurationFlagStrictParsing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Logging an exception and throwing it (or a new exception) for the same exceptional situation is an anti-pattern.',
+     'patterns': [r".*: warning: \[LogAndThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: FormattingLogger uses wrong or mismatched format string',
+     'patterns': [r".*: warning: \[MisusedFormattingLogger\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Flags should be final',
+     'patterns': [r".*: warning: \[NonFinalFlag\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Reading a flag from a static field or initializer block will cause it to always receive the default value and will cause an IllegalFlagStateException if the flag is ever set.',
+     'patterns': [r".*: warning: \[StaticFlagUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.MEDIUM,
+     'description':
+         'Java: Apps must use BuildCompat.isAtLeastO to check whether they\'re running on Android O',
+     'patterns': [r".*: warning: \[UnsafeSdkVersionCheck\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Logging tag cannot be longer than 23 characters.',
+     'patterns': [r".*: warning: \[LogTagLength\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Relative class name passed to ComponentName constructor',
+     'patterns': [r".*: warning: \[RelativeComponentName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Explicitly enumerate all cases in switch statements for certain enum types.',
+     'patterns': [r".*: warning: \[EnumerateAllCasesInEnumSwitch\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Do not call assumeTrue(tester.getExperimentValueFor(...)). Use @RequireEndToEndTestExperiment instead.',
+     'patterns': [r".*: warning: \[JUnitAssumeExperiment\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The accessed field or method is not visible here. Note that the default production visibility for @VisibleForTesting is Visibility.PRIVATE.',
+     'patterns': [r".*: warning: \[VisibleForTestingChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Detects errors encountered building Error Prone plugins',
+     'patterns': [r".*: warning: \[ErrorPronePluginCorrectness\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Parcelable CREATOR fields should be Creator\u003cT>',
+     'patterns': [r".*: warning: \[ParcelableCreatorType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Enforce reflected Parcelables are kept by Proguard',
+     'patterns': [r".*: warning: \[ReflectedParcelable\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Any class that extends IntentService should have @Nullable notation on method onHandleIntent(@Nullable Intent intent) and handle the case if intent is null.',
+     'patterns': [r".*: warning: \[OnHandleIntentNullableChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: In many cases, randomUUID is not necessary, and it slows the performance, which can be quite severe especially when this operation happens at start up time. Consider replacing it with cheaper alternatives, like object.hashCode() or IdGenerator.INSTANCE.getRandomId()',
+     'patterns': [r".*: warning: \[UUIDChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: DynamicActivity.findViewById(int) is slow and should not be used inside View.onDraw(Canvas)!',
+     'patterns': [r".*: warning: \[NoFindViewByIdInOnDrawChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Passing Throwable/Exception argument to the message format L.x(). Calling L.w(tag, message, ex) instead of L.w(tag, ex, message)',
+     'patterns': [r".*: warning: \[WrongThrowableArgumentInLogChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: New splicers are disallowed on paths that are being Libsearched',
+     'patterns': [r".*: warning: \[BlacklistedSplicerPathChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Object serialized in Bundle may have been flattened to base type.',
+     'patterns': [r".*: warning: \[BundleDeserializationCast\] .+"]},
+    {'category': 'java',
      'severity': Severity.HIGH,
      'description':
          'Java: Log tag too long, cannot exceed 23 characters.',
@@ -879,11 +1394,6 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Static and default methods in interfaces are not allowed in android builds.',
-     'patterns': [r".*: warning: \[StaticOrDefaultInterfaceMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Incompatible type as argument to Object-accepting Java collections method',
      'patterns': [r".*: warning: \[CollectionIncompatibleType\] .+"]},
     {'category': 'java',
@@ -909,113 +1419,8 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: @AssistedInject and @Inject cannot be used on the same constructor.',
-     'patterns': [r".*: warning: \[AssistedInjectAndInjectOnSameConstructor\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @AutoFactory and @Inject should not be used in the same type.',
-     'patterns': [r".*: warning: \[AutoFactoryAtInject\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Injected constructors cannot be optional nor have binding annotations',
-     'patterns': [r".*: warning: \[InjectedConstructorAnnotations\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A scoping annotation\'s Target should include TYPE and METHOD.',
-     'patterns': [r".*: warning: \[InjectInvalidTargetingOnScopingAnnotation\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Abstract and default methods are not injectable with javax.inject.Inject',
-     'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @javax.inject.Inject cannot be put on a final field.',
-     'patterns': [r".*: warning: \[JavaxInjectOnFinalField\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: This class has more than one @Inject-annotated constructor. Please remove the @Inject annotation from all but one of them.',
-     'patterns': [r".*: warning: \[MoreThanOneInjectableConstructor\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Using more than one qualifier annotation on the same element is not allowed.',
-     'patterns': [r".*: warning: \[InjectMoreThanOneQualifier\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A class can be annotated with at most one scope annotation.',
-     'patterns': [r".*: warning: \[InjectMoreThanOneScopeAnnotationOnClass\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Annotations cannot be both Scope annotations and Qualifier annotations: this causes confusion when trying to use them.',
-     'patterns': [r".*: warning: \[OverlappingQualifierAndScopeAnnotation\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Qualifier applied to a method that isn\'t a @Provides method. This method won\'t be used for dependency injection',
-     'patterns': [r".*: warning: \[QualifierOnMethodWithoutProvides\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scope annotation on an interface or abstact class is not allowed',
-     'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scoping and qualifier annotations must have runtime retention.',
-     'patterns': [r".*: warning: \[InjectScopeOrQualifierAnnotationRetention\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: `@Multibinds` is the new way to declare multibindings.',
-     'patterns': [r".*: warning: \[MultibindsInsteadOfMultibindings\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Dagger @Provides methods may not return null unless annotated with @Nullable',
-     'patterns': [r".*: warning: \[DaggerProvidesNull\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Scope annotation on implementation class of AssistedInject factory is not allowed',
-     'patterns': [r".*: warning: \[GuiceAssistedInjectScoping\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: A constructor cannot have two @Assisted parameters of the same type unless they are disambiguated with named @Assisted annotations.',
-     'patterns': [r".*: warning: \[GuiceAssistedParameters\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Although Guice allows injecting final fields, doing so is disallowed because the injected value may not be visible to other threads.',
-     'patterns': [r".*: warning: \[GuiceInjectOnFinalField\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: This method is not annotated with @Inject, but it overrides a method that is  annotated with @javax.inject.Inject. The method will not be Injected.',
-     'patterns': [r".*: warning: \[OverridesJavaxInjectableMethod\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: @Provides methods need to be declared in a Module to have any effect.',
-     'patterns': [r".*: warning: \[ProvidesMethodOutsideOfModule\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
-     'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
-         'Java: Invalid @GuardedBy expression',
-     'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+     'patterns': [r".*: warning: \[GuardedBy\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1034,13 +1439,13 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An argument is more similar to a different parameter; the arguments may have been swapped.',
-     'patterns': [r".*: warning: \[ArgumentParameterSwap\] .+"]},
+         'Java: Reference equality used to compare arrays',
+     'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Reference equality used to compare arrays',
-     'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
+         'Java: Arrays.fill(Object[], Object) called with incompatible types.',
+     'patterns': [r".*: warning: \[ArrayFillIncompatibleType\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1089,6 +1494,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java:  Implementing \'Comparable\u003cT>\' where T is not compatible with the implementing class.',
+     'patterns': [r".*: warning: \[ComparableType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: This comparison method violates the contract',
      'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
     {'category': 'java',
@@ -1104,6 +1514,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Non-trivial compile time constant boolean expressions shouldn\'t be used.',
+     'patterns': [r".*: warning: \[ComplexBooleanConstant\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: A conditional expression with numeric operands of differing types will perform binary numeric promotion of the operands; when these operands are of reference types, the expression\'s result may not be of the expected type.',
+     'patterns': [r".*: warning: \[ConditionalExpressionNumericPromotion\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Compile-time constant expression overflows',
      'patterns': [r".*: warning: \[ConstantOverflow\] .+"]},
     {'category': 'java',
@@ -1114,11 +1534,21 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Thread created but not started',
+     'patterns': [r".*: warning: \[DeadThread\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Division by integer literal zero',
      'patterns': [r".*: warning: \[DivZero\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: This method should not be called.',
+     'patterns': [r".*: warning: \[DoNotCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Empty statement after if',
      'patterns': [r".*: warning: \[EmptyIf\] .+"]},
     {'category': 'java',
@@ -1129,7 +1559,12 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class',
+         'Java: == must be used in equals method to check equality to itself or an infinite loop will occur.',
+     'patterns': [r".*: warning: \[EqualsReference\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class, or from an override of the method',
      'patterns': [r".*: warning: \[ForOverride\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1144,6 +1579,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: DoubleMath.fuzzyEquals should never be used in an Object.equals() method',
+     'patterns': [r".*: warning: \[FuzzyEqualsShouldNotBeUsedInEqualsMethod\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Calling getClass() on an annotation may return a proxy class',
      'patterns': [r".*: warning: \[GetClassOnAnnotation\] .+"]},
     {'category': 'java',
@@ -1154,17 +1594,12 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An object is tested for equality to itself using Guava Libraries',
-     'patterns': [r".*: warning: \[GuavaSelfEquals\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: contains() is a legacy method that is equivalent to containsValue()',
      'patterns': [r".*: warning: \[HashtableContains\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Writing "a && a", "a || a", "a & a", or "a | a" is equivalent to "a".',
+         'Java: A binary expression where both operands are the same is usually incorrect.',
      'patterns': [r".*: warning: \[IdentityBinaryExpression\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
@@ -1174,6 +1609,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: The first argument to indexOf is a Unicode code point, and the second is the index to start the search from',
+     'patterns': [r".*: warning: \[IndexOfChar\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Conditional expression in varargs call contains array and non-array arguments',
+     'patterns': [r".*: warning: \[InexactVarargsConditional\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: This method always recurses, and will cause a StackOverflowError',
      'patterns': [r".*: warning: \[InfiniteRecursion\] .+"]},
     {'category': 'java',
@@ -1189,36 +1634,71 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Invalid time zone identifier. TimeZone.getTimeZone(String) will silently return GMT instead of the time zone you intended.',
+     'patterns': [r".*: warning: \[InvalidTimeZoneID\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: The argument to Class#isInstance(Object) should not be a Class',
      'patterns': [r".*: warning: \[IsInstanceOfClass\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Path implements Iterable\u003cPath>; prefer Collection\u003cPath> for clarity',
+     'patterns': [r".*: warning: \[IterablePathParameter\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: jMock tests must have a @RunWith(JMock.class) annotation, or the Mockery field must have a @Rule JUnit annotation',
      'patterns': [r".*: warning: \[JMockTestWithoutRunWithOrRuleAnnotation\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Test method will not be run; please prefix name with "test"',
+         'Java: Test method will not be run; please correct method signature (Should be public, non-static, and method name should begin with "test").',
      'patterns': [r".*: warning: \[JUnit3TestNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: setUp() method will not be run; Please add a @Before annotation',
+         'Java: This method should be static',
+     'patterns': [r".*: warning: \[JUnit4ClassAnnotationNonStatic\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: setUp() method will not be run; please add JUnit\'s @Before annotation',
      'patterns': [r".*: warning: \[JUnit4SetUpNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: tearDown() method will not be run; Please add an @After annotation',
+         'Java: tearDown() method will not be run; please add JUnit\'s @After annotation',
      'patterns': [r".*: warning: \[JUnit4TearDownNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Test method will not be run; please add @Test annotation',
+         'Java: This looks like a test method but is not run; please add @Test or @Ignore, or, if this is a helper method, reduce its visibility.',
      'patterns': [r".*: warning: \[JUnit4TestNotRun\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: An object is tested for reference equality to itself using JUnit library.',
+     'patterns': [r".*: warning: \[JUnitAssertSameCheck\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: This pattern will silently corrupt certain byte sequences from the serialized protocol message. Use ByteString or byte[] directly',
+     'patterns': [r".*: warning: \[LiteByteStringUtf8\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Loop condition is never modified in loop body.',
+     'patterns': [r".*: warning: \[LoopConditionChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Overriding method is missing a call to overridden super method',
+     'patterns': [r".*: warning: \[MissingSuperCall\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
      'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
     {'category': 'java',
@@ -1239,6 +1719,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: The result of this method must be closed.',
+     'patterns': [r".*: warning: \[MustBeClosedChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The first argument to nCopies is the number of copies, and the second is the item to copy',
+     'patterns': [r".*: warning: \[NCopiesOfChar\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: @NoAllocation was specified on this method, but something was found that would trigger an allocation',
      'patterns': [r".*: warning: \[NoAllocation\] .+"]},
     {'category': 'java',
@@ -1259,6 +1749,11 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: This conditional expression may evaluate to null, which will result in an NPE when the result is unboxed.',
+     'patterns': [r".*: warning: \[NullTernary\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Numeric comparison using reference equality instead of value equality',
      'patterns': [r".*: warning: \[NumericEquality\] .+"]},
     {'category': 'java',
@@ -1269,11 +1764,6 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Varargs doesn\'t agree for overridden method',
-     'patterns': [r".*: warning: \[Overrides\] .+"]},
-    {'category': 'java',
-     'severity': Severity.HIGH,
-     'description':
          'Java: Declaring types inside package-info.java files is very bad form',
      'patterns': [r".*: warning: \[PackageInfo\] .+"]},
     {'category': 'java',
@@ -1289,6 +1779,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Using ::equals as an incompatible Predicate; the predicate will always return false',
+     'patterns': [r".*: warning: \[PredicateIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Access to a private protocol buffer field is forbidden. This protocol buffer carries a security contract, and can only be created using an approved library. Direct access to the fields is forbidden.',
+     'patterns': [r".*: warning: \[PrivateSecurityContractProtoAccess\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Protobuf fields cannot be null',
      'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
     {'category': 'java',
@@ -1299,6 +1799,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: To get the tag number of a protocol buffer enum, use getNumber() instead.',
+     'patterns': [r".*: warning: \[ProtocolBufferOrdinal\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Casting a random number in the range [0.0, 1.0) to an integer or long always results in 0.',
+     'patterns': [r".*: warning: \[RandomCast\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Use Random.nextInt(int).  Random.nextInt() % n can have negative results',
      'patterns': [r".*: warning: \[RandomModInteger\] .+"]},
     {'category': 'java',
@@ -1324,13 +1834,13 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: Variable compared to itself',
-     'patterns': [r".*: warning: \[SelfEquality\] .+"]},
+         'Java: Testing an object for equality with itself will always be true.',
+     'patterns': [r".*: warning: \[SelfEquals\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
-         'Java: An object is tested for equality to itself',
-     'patterns': [r".*: warning: \[SelfEquals\] .+"]},
+         'Java: This method must be called with an even number of arguments.',
+     'patterns': [r".*: warning: \[ShouldHaveEvenArgs\] .+"]},
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
@@ -1359,6 +1869,16 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: Throwing \'null\' always results in a NullPointerException being thrown.',
+     'patterns': [r".*: warning: \[ThrowNull\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: isEqualTo should not be used to test an object for equality with itself; the assertion will never fail.',
+     'patterns': [r".*: warning: \[TruthSelfEquals\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Catching Throwable/Error masks failures from fail() or assert*() in the try block',
      'patterns': [r".*: warning: \[TryFailThrowable\] .+"]},
     {'category': 'java',
@@ -1384,8 +1904,193 @@
     {'category': 'java',
      'severity': Severity.HIGH,
      'description':
+         'Java: `var` should not be used as a type name.',
+     'patterns': [r".*: warning: \[VarTypeName\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
          'Java: Method parameter has wrong package',
      'patterns': [r".*: warning: \[ParameterPackage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Type declaration annotated with @ThreadSafe is not thread safe',
+     'patterns': [r".*: warning: \[ThreadSafe\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of class, field, or method that is not compatible with legacy Android devices',
+     'patterns': [r".*: warning: \[AndroidApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Invalid use of Flogger format string',
+     'patterns': [r".*: warning: \[AndroidFloggerFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use TunnelException.getCauseAs(Class) instead of casting the result of TunnelException.getCause().',
+     'patterns': [r".*: warning: \[DoNotCastTunnelExceptionCause\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Identifies undesirable mocks.',
+     'patterns': [r".*: warning: \[DoNotMock_ForJavaBuilder\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Duration Flag should NOT have units in the variable name or the @FlagSpec\'s name or altName field.',
+     'patterns': [r".*: warning: \[DurationFlagWithUnits\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Duration.get() only works with SECONDS or NANOS.',
+     'patterns': [r".*: warning: \[DurationGetTemporalUnit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Invalid printf-style format string',
+     'patterns': [r".*: warning: \[FloggerFormatString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Test class may not be run because it is missing a @RunWith annotation',
+     'patterns': [r".*: warning: \[JUnit4RunWithMissing\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of class, field, or method that is not compatible with JDK 7',
+     'patterns': [r".*: warning: \[Java7ApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of java.time.Duration.withNanos(int) is not allowed.',
+     'patterns': [r".*: warning: \[JavaDurationWithNanos\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of java.time.Duration.withSeconds(long) is not allowed.',
+     'patterns': [r".*: warning: \[JavaDurationWithSeconds\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: java.time APIs that silently use the default system time-zone are not allowed.',
+     'patterns': [r".*: warning: \[JavaTimeDefaultTimeZone\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of new Duration(long) is not allowed. Please use Duration.millis(long) instead.',
+     'patterns': [r".*: warning: \[JodaDurationConstructor\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of duration.withMillis(long) is not allowed. Please use Duration.millis(long) instead.',
+     'patterns': [r".*: warning: \[JodaDurationWithMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of instant.withMillis(long) is not allowed. Please use new Instant(long) instead.',
+     'patterns': [r".*: warning: \[JodaInstantWithMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of JodaTime\'s type.plus(long) or type.minus(long) is not allowed (where \u003ctype> = {Duration,Instant,DateTime,DateMidnight}). Please use type.plus(Duration.millis(long)) or type.minus(Duration.millis(long)) instead.',
+     'patterns': [r".*: warning: \[JodaPlusMinusLong\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Changing JodaTime\'s current time is not allowed in non-testonly code.',
+     'patterns': [r".*: warning: \[JodaSetCurrentMillis\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of Joda-Time\'s DateTime.toDateTime(), Duration.toDuration(), Instant.toInstant(), Interval.toInterval(), and Period.toPeriod() are not allowed.',
+     'patterns': [r".*: warning: \[JodaToSelf\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Use of JodaTime\'s type.withDurationAdded(long, int) (where \u003ctype> = {Duration,Instant,DateTime}). Please use type.withDurationAdded(Duration.millis(long), int) instead.',
+     'patterns': [r".*: warning: \[JodaWithDurationAddedLong\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: LanguageCode comparison using reference equality instead of value equality',
+     'patterns': [r".*: warning: \[LanguageCodeEquality\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: The zero argument toString is not part of the Localizable interface and likely is just the java Object toString.  You probably want to call toString(Locale).',
+     'patterns': [r".*: warning: \[LocalizableWrongToString\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Period.get() only works with YEARS, MONTHS, or DAYS.',
+     'patterns': [r".*: warning: \[PeriodGetTemporalUnit\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Return value of methods returning Promise must be checked. Ignoring returned Promises suppresses exceptions thrown from the code that completes the Promises.',
+     'patterns': [r".*: warning: \[PromiseReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: When returning a Promise, use thenChain() instead of then()',
+     'patterns': [r".*: warning: \[PromiseThenReturningPromise\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Streams.iterating() is unsafe for use except in the header of a for-each loop; please see its Javadoc for details.',
+     'patterns': [r".*: warning: \[StreamsIteratingNotInLoop\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: TemporalAccessor.get() only works for certain values of ChronoField.',
+     'patterns': [r".*: warning: \[TemporalAccessorGetChronoField\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Try-with-resources is not supported in this code, use try/finally instead',
+     'patterns': [r".*: warning: \[TryWithResources\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Adds checkOrThrow calls where needed',
+     'patterns': [r".*: warning: \[AddCheckOrThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Equality on Nano protos (== or .equals) might not be the same in Lite',
+     'patterns': [r".*: warning: \[ForbidNanoEquality\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Submessages of a proto cannot be mutated',
+     'patterns': [r".*: warning: \[ForbidSubmessageMutation\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Repeated fields on proto messages cannot be directly referenced',
+     'patterns': [r".*: warning: \[NanoUnsafeRepeatedFieldUsage\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Requires that non-@enum int assignments to @enum ints is wrapped in a checkOrThrow',
+     'patterns': [r".*: warning: \[RequireCheckOrThrow\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Assignments into repeated field elements must be sequential',
+     'patterns': [r".*: warning: \[RequireSequentialRepeatedFields\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: Future.get in Google Now Producers code',
+     'patterns': [r".*: warning: \[FutureGetInNowProducers\] .+"]},
+    {'category': 'java',
+     'severity': Severity.HIGH,
+     'description':
+         'Java: @SimpleEnum applied to non-enum type',
+     'patterns': [r".*: warning: \[SimpleEnumUsage\] .+"]},
 
     # End warnings generated by Error Prone