resolve merge conflicts of 79805876080859d9ca6b15fe2a811b54c18789f8 to qt-r1-dev-plus-aosp

Bug: None
Test: I solemnly swear I tested this conflict resolution.
Change-Id: I2acc079286697d8a303d15283fcde9af4c6cd69b
diff --git a/Changes.md b/Changes.md
index 1fadcef..be2a271 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,9 @@
 # Build System Changes for Android.mk Writers
 
+## Deprecation of `BUILD_*` module types
+
+See [build/make/Deprecation.md](Deprecation.md) for the current status.
+
 ## `PRODUCT_HOST_PACKAGES` split from `PRODUCT_PACKAGES` {#PRODUCT_HOST_PACKAGES}
 
 Previously, adding a module to `PRODUCT_PACKAGES` that supported both the host
diff --git a/CleanSpec.mk b/CleanSpec.mk
index ac7a843..fca07fd 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -632,6 +632,13 @@
 
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libjavacrypto.so)
 
+# Clean up old verity tools.
+$(call add-clean-step, rm -rf $(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar)
+$(call add-clean-step, rm -rf $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar)
+$(call add-clean-step, rm -rf $(HOST_OUT_EXECUTABLES)/build_verity_metadata.py)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libc_malloc*)
+
 # Clean up old location of soft OMX plugins
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libstagefright_soft*)
 
diff --git a/Deprecation.md b/Deprecation.md
new file mode 100644
index 0000000..0d925cb
--- /dev/null
+++ b/Deprecation.md
@@ -0,0 +1,65 @@
+# Deprecation of Make
+
+We've made significant progress converting AOSP from Make to Soong (Android.mk
+to Android.bp), and we're ready to start turning off pieces of Make. If you
+have any problems converting, please contact us via:
+
+* The [android-building@googlegroups.com] group.
+* Our [public bug tracker](https://issuetracker.google.com/issues/new?component=381517).
+* Or privately through your existing contacts at Google.
+
+## Status
+
+[build/make/core/deprecation.mk] is the source of truth, but for easy browsing:
+
+| Module type                      | State     |
+| -------------------------------- | --------- |
+| `BUILD_AUX_EXECUTABLE`           | Warning   |
+| `BUILD_AUX_STATIC_LIBRARY`       | Warning   |
+| `BUILD_HOST_FUZZ_TEST`           | Warning   |
+| `BUILD_HOST_NATIVE_TEST`         | Warning   |
+| `BUILD_HOST_SHARED_TEST_LIBRARY` | Error     |
+| `BUILD_HOST_STATIC_TEST_LIBRARY` | Warning   |
+| `BUILD_HOST_TEST_CONFIG`         | Error     |
+| `BUILD_NATIVE_BENCHMARK`         | Warning   |
+| `BUILD_SHARED_TEST_LIBRARY`      | Error     |
+| `BUILD_STATIC_TEST_LIBRARY`      | Warning   |
+| `BUILD_TARGET_TEST_CONFIG`       | Error     |
+| `BUILD_*`                        | Available |
+
+## Module Type Deprecation Process
+
+We'll be turning off `BUILD_*` module types as all of the users are removed
+from AOSP (and Google's internal trees). The process will go something like
+this, using `BUILD_PACKAGE` as an example:
+
+* Prerequisite: all common users of `BUILD_PACKAGE` have been removed (some
+  device-specific ones may remain).
+* `BUILD_PACKAGE` will be moved from `AVAILABLE_BUILD_MODULE_TYPES` to
+  `DEFAULT_WARNING_BUILD_MODULE_TYPES` in [build/make/core/deprecation.mk]. This
+  will make referring to `BUILD_PACKAGE` a warning.
+* Any devices that still have warnings will have
+  `BUILD_BROKEN_USES_BUILD_PACKAGE := true` added to their `BoardConfig.mk`.
+* `BUILD_PACKAGE` will be switched from `DEFAULT_WARNING_BUILD_MODULE_TYPES` to
+  `DEFAULT_ERROR_BUILD_MODULE_TYPES`, which will turn referring to
+  `BUILD_PACKAGE` into an error unless the device has overridden it.
+* At some later point, after all devices in AOSP no longer set
+  `BUILD_BROKEN_USES_BUILD_PACKAGE`, `BUILD_PACKAGE` will be moved from
+  `DEFAULT_ERROR_BUILD_MODULE_TYPES` to `OBSOLETE_BUILD_MODULE_TYPES` and the
+  code will be removed. It will no longer be possible to use `BUILD_PACKAGE`.
+
+In most cases, we expect module types to stay in the default warning state for
+about two weeks before becoming an error by default. Then it will spend some
+amount of time in the default error state before moving to obsolete -- we'll
+try and keep that around for a while, but other development may cause those to
+break, and the fix may to be to obsolete them. There is no expectation that the
+`BUILD_BROKEN_USES_BUILD_*` workarounds will work in a future release, it's a
+short-term workaround.
+
+Just to be clear, the above process will happen on the AOSP master branch. So
+if you're following Android releases, none of the deprecation steps will be in
+Android Q, and the 2020 release will have jumped directly to the end for many
+module types.
+
+[android-building@googlegroups.com]: https://groups.google.com/forum/#!forum/android-building
+[build/make/core/deprecation.mk]: /core/deprecation.mk
diff --git a/core/Makefile b/core/Makefile
index 4c3d813..efa07db 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -91,9 +91,7 @@
 
 ifdef has_dup_copy_headers
   has_dup_copy_headers :=
-  ifneq ($(BUILD_BROKEN_DUP_COPY_HEADERS),true)
-    $(error duplicate header copies are no longer allowed. For more information about headers, see: https://android.googlesource.com/platform/build/soong/+/master/docs/best_practices.md#headers)
-  endif
+  $(error duplicate header copies are no longer allowed. For more information about headers, see: https://android.googlesource.com/platform/build/soong/+/master/docs/best_practices.md#headers)
 endif
 
 # -----------------------------------------------------------------
@@ -300,7 +298,7 @@
 # non-default dev keys (usually private keys from a vendor directory).
 # Both of these tags will be removed and replaced with "release-keys"
 # when the target-files is signed in a post-build step.
-ifeq ($(DEFAULT_SYSTEM_DEV_CERTIFICATE),build/target/product/security/testkey)
+ifeq ($(DEFAULT_SYSTEM_DEV_CERTIFICATE),build/make/target/product/security/testkey)
 BUILD_KEYS := test-keys
 else
 BUILD_KEYS := dev-keys
@@ -423,7 +421,6 @@
 	        BUILD_HOSTNAME="$(BUILD_HOSTNAME)" \
 	        BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
 	        BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
-	        AB_OTA_UPDATER="$(AB_OTA_UPDATER)" \
 	        PLATFORM_VERSION="$(PLATFORM_VERSION)" \
 	        PLATFORM_SECURITY_PATCH="$(PLATFORM_SECURITY_PATCH)" \
 	        PLATFORM_BASE_OS="$(PLATFORM_BASE_OS)" \
@@ -462,10 +459,11 @@
 
 build_desc :=
 
-ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
-INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img
-else
 INSTALLED_RECOVERYIMAGE_TARGET :=
+ifdef BUILDING_RECOVERY_IMAGE
+ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img
+endif
 endif
 
 $(INSTALLED_BUILD_PROP_TARGET): $(intermediate_system_build_prop) $(INSTALLED_RECOVERYIMAGE_TARGET)
@@ -507,6 +505,9 @@
 ifdef TARGET_SCREEN_DENSITY
 	$(hide) echo ro.sf.lcd_density="$(TARGET_SCREEN_DENSITY)">>$@
 endif
+ifeq ($(AB_OTA_UPDATER),true)
+	$(hide) echo ro.build.ab_update=true >> $@
+endif
 	$(hide) $(call generate-common-build-props,vendor,$@)
 	$(hide) echo "#" >> $@; \
 	        echo "# BOOTIMAGE_BUILD_PROPERTIES" >> $@; \
@@ -649,6 +650,8 @@
 
 # -----------------------------------------------------------------
 # package stats
+ifdef BUILDING_SYSTEM_IMAGE
+
 PACKAGE_STATS_FILE := $(PRODUCT_OUT)/package-stats.txt
 PACKAGES_TO_STAT := \
     $(sort $(filter $(TARGET_OUT)/% $(TARGET_OUT_DATA)/%, \
@@ -667,6 +670,8 @@
 .PHONY: package-stats
 package-stats: $(PACKAGE_STATS_FILE)
 
+endif # BUILDING_SYSTEM_IMAGE
+
 # -----------------------------------------------------------------
 # Cert-to-package mapping.  Used by the post-build signing tools.
 # Use a macro to add newline to each echo command
@@ -903,13 +908,14 @@
 
 endif # BUILDING_RAMDISK_IMAGE
 
-
-INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
-
-ifneq ($(strip $(TARGET_NO_KERNEL)),true)
-
 # -----------------------------------------------------------------
 # the boot image, which is a collection of other images.
+
+# This is defined here since we may be building recovery as boot
+# below and only want to define this once
+BUILT_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
+
+ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 INTERNAL_BOOTIMAGE_ARGS := \
 	$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
 	--kernel $(INSTALLED_KERNEL_TARGET)
@@ -948,8 +954,10 @@
     --os_version $(PLATFORM_VERSION) \
     --os_patch_level $(PLATFORM_SECURITY_PATCH)
 
-# We build recovery as boot image if BOARD_USES_RECOVERY_AS_BOOT is true.
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+# Define these only if we are building boot
+ifdef BUILDING_BOOT_IMAGE
+INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
+
 ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
 $(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore)
 
@@ -1020,7 +1028,7 @@
 	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 endif # TARGET_BOOTIMAGE_USE_EXT2
-endif # BOARD_USES_RECOVERY_AS_BOOT
+endif # BUILDING_BOOT_IMAGE
 
 else # TARGET_NO_KERNEL == "true"
 ifdef BOARD_PREBUILT_BOOTIMAGE
@@ -1230,42 +1238,6 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(ACP) $< $@
 
-# -----------------------------------------------------------------
-# Build a keystore with the authorized keys in it, used to verify the
-# authenticity of downloaded OTA packages.
-#
-# This rule adds to ALL_DEFAULT_INSTALLED_MODULES, so it needs to come
-# before the rules that use that variable to build the image.
-ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/security/otacerts.zip
-$(TARGET_OUT_ETC)/security/otacerts.zip: PRIVATE_CERT := $(DEFAULT_KEY_CERT_PAIR).x509.pem
-$(TARGET_OUT_ETC)/security/otacerts.zip: $(SOONG_ZIP)
-$(TARGET_OUT_ETC)/security/otacerts.zip: $(DEFAULT_KEY_CERT_PAIR).x509.pem
-	$(hide) rm -f $@
-	$(hide) mkdir -p $(dir $@)
-	$(hide) $(SOONG_ZIP) -o $@ -C $(dir $(PRIVATE_CERT)) -f $(PRIVATE_CERT)
-
-# Carry the public key for update_engine if it's a non-IoT target that
-# uses the AB updater. We use the same key as otacerts but in RSA public key
-# format.
-ifeq ($(AB_OTA_UPDATER),true)
-ifneq ($(PRODUCT_IOT),true)
-ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
-$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(DEFAULT_KEY_CERT_PAIR).x509.pem
-	$(hide) rm -f $@
-	$(hide) mkdir -p $(dir $@)
-	$(hide) openssl x509 -pubkey -noout -in $< > $@
-
-ALL_DEFAULT_INSTALLED_MODULES += \
-    $(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine/update-payload-key.pub.pem
-$(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine/update-payload-key.pub.pem: \
-	    $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
-	$(hide) cp -f $< $@
-endif
-endif
-
-.PHONY: otacerts
-otacerts: $(TARGET_OUT_ETC)/security/otacerts.zip
-
 
 # #################################################################
 # Targets for user images
@@ -1532,7 +1504,7 @@
 # Recovery image
 
 # Recovery image exists if we are building recovery, or building recovery as boot.
-ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
+ifdef BUILDING_RECOVERY_IMAGE
 
 INTERNAL_RECOVERYIMAGE_FILES := $(filter $(TARGET_RECOVERY_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES))
@@ -1545,6 +1517,7 @@
 # build-recoveryimage-target, which would touch the files under TARGET_RECOVERY_OUT and race with
 # the call to FILELIST.
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
 $(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_BOOTIMAGE_TARGET)
 else
 $(INSTALLED_FILES_FILE_RECOVERY): $(INSTALLED_RECOVERYIMAGE_TARGET)
@@ -1830,22 +1803,6 @@
   INTERNAL_RECOVERYIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
 endif
 
-# Keys authorized to sign OTA packages this build will accept.  The
-# build always uses dev-keys for this; release packaging tools will
-# substitute other keys for this one.
-OTA_PUBLIC_KEYS := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
-
-# Generate a file containing the keys that will be read by the
-# recovery binary.
-RECOVERY_INSTALL_OTA_KEYS := \
-	$(call intermediates-dir-for,PACKAGING,ota_keys)/otacerts.zip
-$(RECOVERY_INSTALL_OTA_KEYS): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
-$(RECOVERY_INSTALL_OTA_KEYS): extra_keys := $(patsubst %,%.x509.pem,$(PRODUCT_EXTRA_RECOVERY_KEYS))
-$(RECOVERY_INSTALL_OTA_KEYS): $(SOONG_ZIP) $(OTA_PUBLIC_KEYS) $(extra_keys)
-	$(hide) rm -f $@
-	$(hide) mkdir -p $(dir $@)
-	$(hide) $(SOONG_ZIP) -o $@ $(foreach key_file, $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys), -C $(dir $(key_file)) -f $(key_file))
-
 RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id
 
 # $(1): output file
@@ -1877,8 +1834,6 @@
     cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.fstab)
   $(if $(strip $(recovery_wipe)), \
     $(hide) cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe)
-  $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
-  $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security/otacerts.zip
   $(hide) ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop
   $(BOARD_RECOVERY_IMAGE_PREPARE)
   $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
@@ -1935,7 +1890,6 @@
 	    $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
 	    $(recovery_resource_deps) \
 	    $(recovery_fstab) \
-	    $(RECOVERY_INSTALL_OTA_KEYS) \
 	    $(BOARD_RECOVERY_KERNEL_MODULES) \
 	    $(DEPMOD)
 	$(call pretty,"Target boot image from recovery: $@")
@@ -1966,7 +1920,6 @@
 	    $(INSTALLED_RECOVERY_BUILD_PROP_TARGET) \
 	    $(recovery_resource_deps) \
 	    $(recovery_fstab) \
-	    $(RECOVERY_INSTALL_OTA_KEYS) \
 	    $(BOARD_RECOVERY_KERNEL_MODULES) \
 	    $(DEPMOD)
 	$(call build-recoveryimage-target, $@)
@@ -1983,9 +1936,9 @@
 	@echo "make $@: ignoring dependencies"
 	$(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET))
 
-else # INSTALLED_RECOVERYIMAGE_TARGET not defined
+else # BUILDING_RECOVERY_IMAGE
 RECOVERY_RESOURCE_ZIP :=
-endif
+endif # BUILDING_RECOVERY_IMAGE
 
 .PHONY: recoveryimage
 recoveryimage: $(INSTALLED_RECOVERYIMAGE_TARGET) $(RECOVERY_RESOURCE_ZIP)
@@ -2894,6 +2847,22 @@
 # on the device because it depends on everything in a given device
 # image which defines a vintf_fragment.
 
+ifdef BUILDING_SYSTEM_IMAGE
+
+ifndef BOARD_USES_PRODUCTIMAGE
+  # If no product image at all, check system manifest directly against device matrix.
+  check_framework_manifest := true
+else ifdef BUILDING_PRODUCT_IMAGE
+  # If device has a product image, only check if the product image is built.
+  check_framework_manifest := true
+endif
+
+# TODO (b/131425279): delete this line once build_mixed script can correctly merge system and
+# product manifests.
+check_framework_manifest := true
+
+ifeq ($(check_framework_manifest),true)
+
 BUILT_ASSEMBLED_FRAMEWORK_MANIFEST := $(PRODUCT_OUT)/verified_assembled_framework_manifest.xml
 $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST): $(HOST_OUT_EXECUTABLES)/assemble_vintf \
                                        $(BUILT_VENDOR_MATRIX) \
@@ -2914,6 +2883,11 @@
 
 droidcore: $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST)
 
+endif # check_framework_manifest
+check_framework_manifest :=
+
+endif # BUILDING_SYSTEM_IMAGE
+
 # -----------------------------------------------------------------
 # product_services partition image
 ifdef BUILDING_PRODUCT_SERVICES_IMAGE
@@ -3302,6 +3276,7 @@
       --output $@
 endef
 
+ifdef BUILDING_SYSTEM_IMAGE
 ifdef BOARD_AVB_VBMETA_SYSTEM
 INSTALLED_VBMETA_SYSTEMIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_system.img
 $(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET): \
@@ -3310,6 +3285,7 @@
 	    $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH)
 	$(call build-chained-vbmeta-image,vbmeta_system)
 endif
+endif # BUILDING_SYSTEM_IMAGE
 
 ifdef BOARD_AVB_VBMETA_VENDOR
 INSTALLED_VBMETA_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_vendor.img
@@ -3484,7 +3460,7 @@
   build_ota_package := false
   build_otatools_package := false
 else
-  # set build_ota_package, and allow opt-out below
+  # Set build_ota_package, and allow opt-out below.
   build_ota_package := true
   ifeq ($(TARGET_SKIP_OTA_PACKAGE),true)
     build_ota_package := false
@@ -3495,20 +3471,22 @@
   ifeq ($(TARGET_PRODUCT),sdk)
     build_ota_package := false
   endif
-  ifneq ($(filter generic%,$(TARGET_DEVICE)),)
-    build_ota_package := false
-  endif
-  ifeq ($(TARGET_NO_KERNEL),true)
-    build_ota_package := false
-  endif
-  ifeq ($(recovery_fstab),)
-    build_ota_package := false
-  endif
   ifeq ($(TARGET_BUILD_PDK),true)
     build_ota_package := false
   endif
+  ifneq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
+    ifneq ($(filter generic%,$(TARGET_DEVICE)),)
+      build_ota_package := false
+    endif
+    ifeq ($(TARGET_NO_KERNEL),true)
+      build_ota_package := false
+    endif
+    ifeq ($(recovery_fstab),)
+      build_ota_package := false
+    endif
+  endif # PRODUCT_BUILD_GENERIC_OTA_PACKAGE
 
-  # set build_otatools_package, and allow opt-out below
+  # Set build_otatools_package, and allow opt-out below.
   build_otatools_package := true
   ifeq ($(TARGET_SKIP_OTATOOLS_PACKAGE),true)
     build_otatools_package := false
@@ -3516,134 +3494,112 @@
 endif
 
 ifeq ($(build_otatools_package),true)
-OTATOOLS :=  $(HOST_OUT_EXECUTABLES)/minigzip \
-  $(HOST_OUT_EXECUTABLES)/aapt \
-  $(HOST_OUT_EXECUTABLES)/checkvintf \
-  $(HOST_OUT_EXECUTABLES)/mkbootfs \
-  $(HOST_OUT_EXECUTABLES)/mkbootimg \
-  $(HOST_OUT_EXECUTABLES)/fs_config \
-  $(HOST_OUT_EXECUTABLES)/zipalign \
-  $(HOST_OUT_EXECUTABLES)/bsdiff \
-  $(HOST_OUT_EXECUTABLES)/imgdiff \
-  $(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar \
-  $(HOST_OUT_EXECUTABLES)/mke2fs \
-  $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs \
-  $(HOST_OUT_EXECUTABLES)/e2fsdroid \
-  $(HOST_OUT_EXECUTABLES)/tune2fs \
-  $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh \
-  $(HOST_OUT_EXECUTABLES)/mksquashfs \
-  $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh \
-  $(HOST_OUT_EXECUTABLES)/make_f2fs \
-  $(HOST_OUT_EXECUTABLES)/sload_f2fs \
-  $(HOST_OUT_EXECUTABLES)/simg2img \
-  $(HOST_OUT_EXECUTABLES)/e2fsck \
-  $(HOST_OUT_EXECUTABLES)/generate_verity_key \
-  $(HOST_OUT_EXECUTABLES)/verity_signer \
-  $(HOST_OUT_EXECUTABLES)/verity_verifier \
-  $(HOST_OUT_EXECUTABLES)/append2simg \
-  $(HOST_OUT_EXECUTABLES)/img2simg \
-  $(HOST_OUT_EXECUTABLES)/boot_signer \
-  $(HOST_OUT_EXECUTABLES)/fec \
-  $(HOST_OUT_EXECUTABLES)/brillo_update_payload \
-  $(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
-  $(HOST_OUT_EXECUTABLES)/delta_generator \
-  $(HOST_OUT_EXECUTABLES)/care_map_generator \
-  $(HOST_OUT_EXECUTABLES)/fc_sort \
-  $(HOST_OUT_EXECUTABLES)/sefcontext_compile \
-  $(LPMAKE) \
-  $(AVBTOOL) \
-  $(BLK_ALLOC_TO_BASE_FS) \
-  $(BROTLI) \
-  $(BUILD_VERITY_METADATA) \
-  $(BUILD_VERITY_TREE)
+
+INTERNAL_OTATOOLS_MODULES := \
+  aapt \
+  append2simg \
+  avbtool \
+  blk_alloc_to_base_fs \
+  boot_signer \
+  brillo_update_payload \
+  brotli \
+  bsdiff \
+  build_verity_metadata \
+  build_verity_tree \
+  care_map_generator \
+  checkvintf \
+  delta_generator \
+  e2fsck \
+  e2fsdroid \
+  fc_sort \
+  fec \
+  fs_config \
+  generate_verity_key \
+  img2simg \
+  imgdiff \
+  libconscrypt_openjdk_jni \
+  lpmake \
+  make_f2fs \
+  minigzip \
+  mkbootfs \
+  mkbootimg \
+  mke2fs \
+  mke2fs.conf \
+  mkf2fsuserimg.sh \
+  mksquashfs \
+  mksquashfsimage.sh \
+  mkuserimg_mke2fs \
+  sefcontext_compile \
+  shflags \
+  signapk \
+  simg2img \
+  sload_f2fs \
+  tune2fs \
+  verity_signer \
+  verity_verifier \
+  zipalign \
 
 ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-OTATOOLS += \
-  $(FUTILITY) \
-  $(VBOOT_SIGNER)
+INTERNAL_OTATOOLS_MODULES += \
+  futility \
+  vboot_signer
 endif
 
-# Shared libraries.
-OTATOOLS += \
-  $(HOST_LIBRARY_PATH)/libc++$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/liblog$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libcutils$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libselinux$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libcrypto_utils$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libcrypto-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2fs-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_blkid-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_com_err-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_e2p-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_misc$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_profile-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_quota-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_uuid-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libbrillo$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libbrillo-stream$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libchrome$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libcurl-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libevent-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libprotobuf-cpp-lite$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libssl-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libz-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libsparse-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libbase$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libpcre2$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libbrotli$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/liblp$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext4_utils$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libfec$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libsquashfs_utils$(HOST_SHLIB_SUFFIX)
-
+INTERNAL_OTATOOLS_FILES := \
+  $(filter $(HOST_OUT)/%,$(call module-installed-files,$(INTERNAL_OTATOOLS_MODULES)))
 
 .PHONY: otatools
-otatools: $(OTATOOLS)
+otatools: $(INTERNAL_OTATOOLS_FILES)
 
-BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
-$(BUILT_OTATOOLS_PACKAGE): zip_root := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
+# For each module, recursively resolve its host shared library dependencies. Then we have a full
+# list of modules whose installed files need to be packed.
+INTERNAL_OTATOOLS_MODULES_WITH_DEPS := \
+  $(sort $(INTERNAL_OTATOOLS_MODULES) \
+      $(foreach m,$(INTERNAL_OTATOOLS_MODULES),$(call get-all-shared-libs-deps,$(m))))
 
-OTATOOLS_DEPS := \
-  system/extras/ext4_utils/mke2fs.conf \
-  $(sort $(shell find build/target/product/security -type f -name "*.x509.pem" -o -name "*.pk8" -o \
-      -name verity_key))
+INTERNAL_OTATOOLS_PACKAGE_FILES := \
+  $(filter $(HOST_OUT)/%,$(call module-installed-files,$(INTERNAL_OTATOOLS_MODULES_WITH_DEPS)))
+
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
+  $(sort $(shell find build/make/target/product/security -type f -name "*.x509.pem" -o \
+      -name "*.pk8" -o -name verity_key))
 
 ifneq (,$(wildcard device))
-OTATOOLS_DEPS += \
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find device $(wildcard vendor) -type f -name "*.pk8" -o -name "verifiedboot*" -o \
       -name "*.x509.pem" -o -name "oem*.prop"))
 endif
 ifneq (,$(wildcard external/avb))
-OTATOOLS_DEPS += \
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find external/avb/test/data -type f -name "testkey_*.pem" -o \
       -name "atx_metadata.bin"))
 endif
 ifneq (,$(wildcard system/update_engine))
-OTATOOLS_DEPS += \
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find system/update_engine/scripts -name "*.pyc" -prune -o -type f -print))
 endif
-
-OTATOOLS_RELEASETOOLS := \
-  $(sort $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o -type f))
-
 ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
-OTATOOLS_DEPS += \
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find external/vboot_reference/tests/devkeys -type f))
 endif
 
-$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) $(SOONG_ZIP)
+INTERNAL_OTATOOLS_RELEASETOOLS := \
+  $(sort $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
+      \( -type f -o -type l \) -print))
+
+BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
+$(BUILT_OTATOOLS_PACKAGE): PRIVATE_ZIP_ROOT := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
+$(BUILT_OTATOOLS_PACKAGE): PRIVATE_OTATOOLS_PACKAGE_FILES := $(INTERNAL_OTATOOLS_PACKAGE_FILES)
+$(BUILT_OTATOOLS_PACKAGE): PRIVATE_OTATOOLS_RELEASETOOLS := $(INTERNAL_OTATOOLS_RELEASETOOLS)
+$(BUILT_OTATOOLS_PACKAGE): $(INTERNAL_OTATOOLS_PACKAGE_FILES) $(INTERNAL_OTATOOLS_RELEASETOOLS)
+$(BUILT_OTATOOLS_PACKAGE): $(SOONG_ZIP)
 	@echo "Package OTA tools: $@"
-	$(hide) rm -rf $@ $(zip_root)
-	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
-	$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
-	$(hide) cp $(SOONG_ZIP) $(zip_root)/bin/
-	$(hide) cp -r -d -p build/make/tools/releasetools/* $(zip_root)/releasetools
-	$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
-	$(hide) $(SOONG_ZIP) -o $@ -C $(zip_root) -D $(zip_root) \
-	  -C . $(addprefix -f ,$(OTATOOLS_DEPS))
+	rm -rf $@ $(PRIVATE_ZIP_ROOT)
+	mkdir -p $(dir $@)
+	$(call copy-files-with-structure,$(PRIVATE_OTATOOLS_PACKAGE_FILES),$(HOST_OUT)/,$(PRIVATE_ZIP_ROOT))
+	$(call copy-files-with-structure,$(PRIVATE_OTATOOLS_RELEASETOOLS),build/make/tools/,$(PRIVATE_ZIP_ROOT))
+	cp $(SOONG_ZIP) $(PRIVATE_ZIP_ROOT)/bin/
+	$(SOONG_ZIP) -o $@ -C $(PRIVATE_ZIP_ROOT) -D $(PRIVATE_ZIP_ROOT)
 
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
@@ -3760,6 +3716,13 @@
     echo "super_image_in_update_package=true" >> $(1))
 endef
 
+# By conditionally including the dependency of the target files package on the
+# full system image deps, we speed up builds that do not build the system
+# image.
+ifdef BUILDING_SYSTEM_IMAGE
+$(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+endif
+
 # Depending on the various images guarantees that the underlying
 # directories are up-to-date.
 $(BUILT_TARGET_FILES_PACKAGE): \
@@ -3767,7 +3730,6 @@
 	    $(INSTALLED_BOOTIMAGE_TARGET) \
 	    $(INSTALLED_RADIOIMAGE_TARGET) \
 	    $(INSTALLED_RECOVERYIMAGE_TARGET) \
-	    $(FULL_SYSTEMIMAGE_DEPS) \
 	    $(INSTALLED_USERDATAIMAGE_TARGET) \
 	    $(INSTALLED_CACHEIMAGE_TARGET) \
 	    $(INSTALLED_VENDORIMAGE_TARGET) \
@@ -3939,6 +3901,9 @@
 ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
 	$(hide) echo "boot_size=$(BOARD_BOOTIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
+ifeq ($(INSTALLED_BOOTIMAGE_TARGET),)
+	$(hide) echo "no_boot=true" >> $(zip_root)/META/misc_info.txt
+endif
 ifeq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) echo "no_recovery=true" >> $(zip_root)/META/misc_info.txt
 endif
@@ -3966,6 +3931,9 @@
 	$(hide) echo 'mkbootimg_version_args=$(INTERNAL_MKBOOTIMG_VERSION_ARGS)' >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "blockimgdiff_versions=3,4" >> $(zip_root)/META/misc_info.txt
+ifeq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
+	$(hide) echo "build_generic_ota_package=true" >> $(zip_root)/META/misc_info.txt
+endif
 ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
 	# OTA scripts are only interested in fingerprint related properties
 	$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
@@ -4142,7 +4110,9 @@
 endif
 	@# Metadata for compatibility verification.
 	$(hide) cp $(BUILT_SYSTEM_MATRIX) $(zip_root)/META/system_matrix.xml
+ifdef BUILT_ASSEMBLED_FRAMEWORK_MANIFEST
 	$(hide) cp $(BUILT_ASSEMBLED_FRAMEWORK_MANIFEST) $(zip_root)/META/system_manifest.xml
+endif
 ifdef BUILT_ASSEMBLED_VENDOR_MANIFEST
 	$(hide) cp $(BUILT_ASSEMBLED_VENDOR_MANIFEST) $(zip_root)/META/vendor_manifest.xml
 endif
@@ -4310,7 +4280,7 @@
 	$(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
 	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE))
 	$(hide) find -L $(TARGET_OUT_UNSTRIPPED) -type f | sort >$(PRIVATE_LIST_FILE)
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+	$(hide) $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
 # -----------------------------------------------------------------
 # A zip of the coverage directory.
 #
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 1f7acf1..06bea5e 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -23,12 +23,6 @@
     $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/manifest/AndroidManifest.xml)
 endif
 
-# With aapt2, we'll link in the built resource from the AAR.
-ifneq ($(LOCAL_USE_AAPT2),true)
-  LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
-    $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
-endif
-
 full_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
 
 ifneq (,$(strip $(my_full_libs_manifest_files)))
diff --git a/core/binary.mk b/core/binary.mk
index 43063a8..a420c02 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1535,11 +1535,10 @@
 
 # Add dependency of clang-tidy and clang-tidy.sh
 ifneq ($(my_tidy_checks),)
-  my_clang_tidy_programs := $(PATH_TO_CLANG_TIDY) $(PATH_TO_CLANG_TIDY_SHELL)
-  $(cpp_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
-  $(c_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
-  $(gen_cpp_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
-  $(gen_c_objects): $(intermediates)/%.o: $(my_clang_tidy_programs)
+  $(cpp_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
+  $(c_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
+  $(gen_cpp_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
+  $(gen_c_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
 endif
 
 # Move -l* entries from ldflags to ldlibs, and everything else to ldflags
diff --git a/core/board_config.mk b/core/board_config.mk
index aa626e3..d83fa32 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -86,12 +86,16 @@
 
 _build_broken_var_list := \
   BUILD_BROKEN_ANDROIDMK_EXPORTS \
-  BUILD_BROKEN_DUP_COPY_HEADERS \
   BUILD_BROKEN_DUP_RULES \
-  BUILD_BROKEN_PHONY_TARGETS \
   BUILD_BROKEN_ENG_DEBUG_TAGS \
   BUILD_BROKEN_USES_NETWORK \
 
+_build_broken_var_list += \
+  $(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \
+              $(DEFAULT_WARNING_BUILD_MODULE_TYPES) \
+              $(DEFAULT_ERROR_BUILD_MODULE_TYPES), \
+    BUILD_BROKEN_USES_$(m))
+
 _board_true_false_vars := $(_build_broken_var_list)
 _board_strip_readonly_list += $(_build_broken_var_list)
 
@@ -177,7 +181,7 @@
 # Sanity check to warn about likely cryptic errors later in the build.
 ifeq ($(TARGET_IS_64_BIT),true)
   ifeq (,$(filter true false,$(TARGET_SUPPORTS_64_BIT_APPS)))
-    $(warning Building a 32-bit-app-only product on a 64-bit device. \
+    $(error Building a 32-bit-app-only product on a 64-bit device. \
       If this is intentional, set TARGET_SUPPORTS_64_BIT_APPS := false)
   endif
 endif
@@ -286,8 +290,33 @@
 endif
 .KATI_READONLY := BUILDING_CACHE_IMAGE
 
-# TODO: Add BUILDING_BOOT_IMAGE / BUILDING_RECOVERY_IMAGE
-# This gets complicated with BOARD_USES_RECOVERY_AS_BOOT, so skipping for now.
+# Are we building a boot image
+BUILDING_BOOT_IMAGE :=
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  BUILDING_BOOT_IMAGE :=
+else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),)
+  ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
+    BUILDING_BOOT_IMAGE := true
+  endif
+else ifeq ($(PRODUCT_BUILD_BOOT_IMAGE),true)
+  BUILDING_BOOT_IMAGE := true
+endif
+.KATI_READONLY := BUILDING_BOOT_IMAGE
+
+# Are we building a recovery image
+BUILDING_RECOVERY_IMAGE :=
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+  BUILDING_RECOVERY_IMAGE := true
+else ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),)
+  ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
+    ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
+      BUILDING_RECOVERY_IMAGE := true
+    endif
+  endif
+else ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),true)
+  BUILDING_RECOVERY_IMAGE := true
+endif
+.KATI_READONLY := BUILDING_RECOVERY_IMAGE
 
 # Are we building a ramdisk image
 BUILDING_RAMDISK_IMAGE := true
@@ -482,6 +511,13 @@
   endif
 endif
 
+# Sanity check for building generic OTA packages. Currently it only supports A/B OTAs.
+ifeq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
+  ifneq ($(AB_OTA_UPDATER),true)
+    $(error PRODUCT_BUILD_GENERIC_OTA_PACKAGE with 'AB_OTA_UPDATER != true' is not supported)
+  endif
+endif
+
 # Check BOARD_VNDK_VERSION
 define check_vndk_version
   $(eval vndk_path := prebuilts/vndk/v$(1)) \
@@ -518,3 +554,16 @@
   $(error System SDK versions '$(_unsupported_systemsdk_versions)' in BOARD_SYSTEMSDK_VERSIONS are not supported.\
           Supported versions are $(PLATFORM_SYSTEMSDK_VERSIONS))
 endif
+
+###########################################
+# Handle BUILD_BROKEN_USES_BUILD_*
+
+$(foreach m,$(DEFAULT_WARNING_BUILD_MODULE_TYPES),\
+  $(if $(filter false,$(BUILD_BROKEN_USES_$(m))),\
+    $(KATI_obsolete_var $(m),Please convert to Soong),\
+    $(KATI_deprecated_var $(m),Please convert to Soong)))
+
+$(foreach m,$(DEFAULT_ERROR_BUILD_MODULE_TYPES),\
+  $(if $(filter true,$(BUILD_BROKEN_USES_$(m))),\
+    $(KATI_deprecated_var $(m),Please convert to Soong),\
+    $(KATI_obsolete_var $(m),Please convert to Soong)))
diff --git a/core/build-system.html b/core/build-system.html
index 3a11a47..cc242d9 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -516,8 +516,8 @@
 example the root filesystem instead of in /system, add these lines to your
 Android.mk:</p>
 <pre>
-LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT_SBIN)
-LOCAL_UNSTRIPPED_PATH := $(TARGET_ROOT_OUT_SBIN_UNSTRIPPED)
+LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
+LOCAL_UNSTRIPPED_PATH := $(TARGET_ROOT_OUT_UNSTRIPPED)
 </pre>
 <p>For executables and libraries, you need to specify a
 <code>LOCAL_UNSTRIPPED_PATH</code> location if you specified a
@@ -527,9 +527,6 @@
 <code>LOCAL_MODULE_RELATIVE_PATH</code>.</p>
 <p>Look in <code>core/envsetup.mk</code> for all of the variables defining
 places to build things.</p>
-<p>FYI: If you're installing an executable to /sbin, you probably also want to
-set <code>LOCAL_FORCE_STATIC_EXCUTABLE := true</code> in your Android.mk, which
-will force the linker to only accept static libraries.</p>
 
 
 <h3>Android.mk variables</h3>
@@ -685,8 +682,7 @@
 <h4>LOCAL_FORCE_STATIC_EXECUTABLE</h4>
 <p>If your executable should be linked statically, set 
 <code>LOCAL_FORCE_STATIC_EXECUTABLE:=true</code>.  There is a very short
-list of libraries that we have in static form (currently only libc).  This is
-really only used for executables in /sbin on the root filesystem.</p>
+list of libraries that we have in static form (currently only libc).</p>
 
 <h4>LOCAL_GENERATED_SOURCES</h4>
 <p>Files that you add to <code>LOCAL_GENERATED_SOURCES</code> will be
@@ -812,7 +808,7 @@
 <h4>LOCAL_STATIC_LIBRARIES</h4>
 <p>These are the static libraries that you want to include in your module.
 Mostly, we use shared libraries, but there are a couple of places, like
-executables in sbin and host executables where we use static libraries instead.
+host executables where we use static libraries instead.
 <p><code>LOCAL_STATIC_LIBRARIES := \<br/>
 	&nbsp;&nbsp;&nbsp;&nbsp;libutils \<br/>
 	&nbsp;&nbsp;&nbsp;&nbsp;libtinyxml
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
new file mode 100644
index 0000000..b936bd7
--- /dev/null
+++ b/core/cc_prebuilt_internal.mk
@@ -0,0 +1,197 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+############################################################
+# Internal build rules for native prebuilt modules
+############################################################
+
+my_strip_module := $(firstword \
+  $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+  $(LOCAL_STRIP_MODULE))
+
+ifeq (SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS))
+  ifeq ($(LOCAL_IS_HOST_MODULE)$(my_strip_module),)
+    # Strip but not try to add debuglink
+    my_strip_module := no_debuglink
+  endif
+endif
+
+ifneq ($(filter STATIC_LIBRARIES SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+  prebuilt_module_is_a_library := true
+else
+  prebuilt_module_is_a_library :=
+endif
+
+# Don't install static libraries by default.
+ifndef LOCAL_UNINSTALLABLE_MODULE
+ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
+  LOCAL_UNINSTALLABLE_MODULE := true
+endif
+endif
+
+my_check_elf_file_shared_lib_files :=
+
+ifneq ($(filter true keep_symbols no_debuglink mini-debug-info,$(my_strip_module)),)
+  ifdef LOCAL_IS_HOST_MODULE
+    $(call pretty-error,Cannot strip/pack host module)
+  endif
+  ifeq ($(filter SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+    $(call pretty-error,Can strip/pack only shared libraries or executables)
+  endif
+  ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
+    $(call pretty-error,Cannot strip/pack scripts)
+  endif
+  # Set the arch-specific variables to set up the strip rules
+  LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_strip_module)
+  include $(BUILD_SYSTEM)/dynamic_binary.mk
+  built_module := $(linked_module)
+
+  ifneq ($(LOCAL_SDK_VERSION),)
+    # binary.mk filters out NDK_MIGRATED_LIBS from my_shared_libs, thus those NDK libs are not added
+    # to DEPENDENCIES_ON_SHARED_LIBRARIES. Assign $(my_ndk_shared_libraries_fullpath) to
+    # my_check_elf_file_shared_lib_files so that check_elf_file.py can see those NDK stub libs.
+    my_check_elf_file_shared_lib_files := $(my_ndk_shared_libraries_fullpath)
+  endif
+else  # my_strip_module not true
+  include $(BUILD_SYSTEM)/base_rules.mk
+  built_module := $(LOCAL_BUILT_MODULE)
+
+ifdef prebuilt_module_is_a_library
+export_includes := $(intermediates)/export_includes
+export_cflags := $(foreach d,$(LOCAL_EXPORT_C_INCLUDE_DIRS),-I $(d))
+$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
+$(export_includes): $(LOCAL_EXPORT_C_INCLUDE_DEPS)
+	@echo Export includes file: $< -- $@
+	$(hide) mkdir -p $(dir $@) && rm -f $@
+ifdef export_cflags
+	$(hide) echo "$(PRIVATE_EXPORT_CFLAGS)" >$@
+else
+	$(hide) touch $@
+endif
+export_cflags :=
+
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
+ifdef LOCAL_SDK_VERSION
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
+else ifdef LOCAL_USE_VNDK
+    _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
+    ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
+        ifeq ($(filter $(_name),$(VNDK_PRIVATE_LIBRARIES)),)
+            my_link_type := native:vndk
+        else
+            my_link_type := native:vndk_private
+        endif
+    else
+        my_link_type := native:vendor
+    endif
+else ifneq ($(filter $(TARGET_RECOVERY_OUT)/%,$(LOCAL_MODULE_PATH)),)
+my_link_type := native:recovery
+else
+my_link_type := native:platform
+endif
+
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
+endif  # prebuilt_module_is_a_library
+
+# The real dependency will be added after all Android.mks are loaded and the install paths
+# of the shared libraries are determined.
+ifdef LOCAL_INSTALLED_MODULE
+ifdef LOCAL_IS_HOST_MODULE
+    ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+        my_system_shared_libraries :=
+    else
+        my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
+    endif
+else
+    ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+        my_system_shared_libraries := libc libm libdl
+    else
+        my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
+        my_system_shared_libraries := $(patsubst libc,libc libdl,$(my_system_shared_libraries))
+    endif
+endif
+
+my_shared_libraries := \
+    $(filter-out $(my_system_shared_libraries),$(LOCAL_SHARED_LIBRARIES)) \
+    $(my_system_shared_libraries)
+
+ifdef my_shared_libraries
+# Extra shared libraries introduced by LOCAL_CXX_STL.
+include $(BUILD_SYSTEM)/cxx_stl_setup.mk
+ifdef LOCAL_USE_VNDK
+  my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+    $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+endif
+$(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)DEPENDENCIES_ON_SHARED_LIBRARIES += \
+  $(my_register_name):$(LOCAL_INSTALLED_MODULE):$(subst $(space),$(comma),$(my_shared_libraries))
+endif
+endif  # my_shared_libraries
+
+# We need to enclose the above export_includes and my_built_shared_libraries in
+# "my_strip_module not true" because otherwise the rules are defined in dynamic_binary.mk.
+endif  # my_strip_module not true
+
+
+# Check prebuilt ELF binaries.
+include $(BUILD_SYSTEM)/check_elf_file.mk
+
+ifeq ($(NATIVE_COVERAGE),true)
+ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
+  $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).gcnodir))
+  ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
+    ifdef LOCAL_IS_HOST_MODULE
+      my_coverage_path := $($(my_prefix)OUT_COVERAGE)/$(patsubst $($(my_prefix)OUT)/%,%,$(my_module_path))
+    else
+      my_coverage_path := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
+    endif
+    my_coverage_path := $(my_coverage_path)/$(patsubst %.so,%,$(my_installed_module_stem)).gcnodir
+    $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(my_coverage_path)))
+    $(LOCAL_BUILT_MODULE): $(my_coverage_path)
+  endif
+else
+# Coverage information is needed when static lib is a dependency of another
+# coverage-enabled module.
+ifeq (STATIC_LIBRARIES, $(LOCAL_MODULE_CLASS))
+GCNO_ARCHIVE := $(LOCAL_MODULE).gcnodir
+$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS :=
+$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES :=
+$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_PREFIX := $(my_prefix)
+$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+$(intermediates)/$(GCNO_ARCHIVE) :
+	$(transform-o-to-static-lib)
+endif
+endif
+endif
+
+ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init,$(dir $(LOCAL_INSTALLED_MODULE))),)
+  $(eval $(call copy-init-script-file-checked,$(my_prebuilt_src_file),$(built_module)))
+else ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
+$(built_module) : $(my_prebuilt_src_file)
+	$(transform-prebuilt-to-target-strip-comments)
+else
+$(built_module) : $(my_prebuilt_src_file)
+	$(transform-prebuilt-to-target)
+endif
+ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+	$(hide) chmod +x $@
+endif
+
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index d67c9f8..fafdce6 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -291,7 +291,7 @@
 LOCAL_UNCOMPRESS_DEX:=
 LOCAL_UNINSTALLABLE_MODULE:=
 LOCAL_UNSTRIPPED_PATH:=
-LOCAL_USE_AAPT2:=$(USE_AAPT2)
+LOCAL_USE_AAPT2:=
 LOCAL_USE_CLANG_LLD:=
 LOCAL_USE_VNDK:=
 LOCAL_USES_LIBRARIES:=
diff --git a/core/config.mk b/core/config.mk
index 42e57a9..bf9d5c4 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -93,6 +93,10 @@
 $(KATI_obsolete_var DIST_DIR dist_goal,Use dist-for-goals instead. See $(CHANGES_URL)#dist)
 $(KATI_obsolete_var TARGET_ANDROID_FILESYSTEM_CONFIG_H,Use TARGET_FS_CONFIG_GEN instead)
 $(KATI_deprecated_var USER,Use BUILD_USERNAME instead. See $(CHANGES_URL)#USER)
+$(KATI_obsolete_var TARGET_ROOT_OUT_SBIN,/sbin has been removed, use /system/bin instead)
+$(KATI_obsolete_var TARGET_ROOT_OUT_SBIN_UNSTRIPPED,/sbin has been removed, use /system/bin instead)
+$(KATI_obsolete_var BUILD_BROKEN_PHONY_TARGETS)
+$(KATI_obsolete_var BUILD_BROKEN_DUP_COPY_HEADERS)
 
 # This is marked as obsolete in envsetup.mk after reading the BoardConfig.mk
 $(KATI_deprecate_export It is a global setting. See $(CHANGES_URL)#export_keyword)
@@ -105,7 +109,7 @@
 
 UNAME := $(shell uname -sm)
 
-SRC_TARGET_DIR := $(TOPDIR)build/target
+SRC_TARGET_DIR := $(TOPDIR)build/make/target
 
 # Some specific paths to tools
 SRC_DROIDDOC_DIR := $(TOPDIR)build/make/tools/droiddoc
@@ -133,45 +137,48 @@
 # Build system internal files
 # ###############################################################
 
-BUILD_COMBOS:= $(BUILD_SYSTEM)/combo
+BUILD_COMBOS :=$= $(BUILD_SYSTEM)/combo
 
-CLEAR_VARS:= $(BUILD_SYSTEM)/clear_vars.mk
-BUILD_HOST_STATIC_LIBRARY:= $(BUILD_SYSTEM)/host_static_library.mk
-BUILD_HOST_SHARED_LIBRARY:= $(BUILD_SYSTEM)/host_shared_library.mk
-BUILD_STATIC_LIBRARY:= $(BUILD_SYSTEM)/static_library.mk
-BUILD_HEADER_LIBRARY:= $(BUILD_SYSTEM)/header_library.mk
-BUILD_AUX_STATIC_LIBRARY:= $(BUILD_SYSTEM)/aux_static_library.mk
-BUILD_AUX_EXECUTABLE:= $(BUILD_SYSTEM)/aux_executable.mk
-BUILD_SHARED_LIBRARY:= $(BUILD_SYSTEM)/shared_library.mk
-BUILD_EXECUTABLE:= $(BUILD_SYSTEM)/executable.mk
-BUILD_HOST_EXECUTABLE:= $(BUILD_SYSTEM)/host_executable.mk
-BUILD_PACKAGE:= $(BUILD_SYSTEM)/package.mk
-BUILD_PHONY_PACKAGE:= $(BUILD_SYSTEM)/phony_package.mk
-BUILD_RRO_PACKAGE:= $(BUILD_SYSTEM)/build_rro_package.mk
-BUILD_HOST_PREBUILT:= $(BUILD_SYSTEM)/host_prebuilt.mk
-BUILD_PREBUILT:= $(BUILD_SYSTEM)/prebuilt.mk
-BUILD_MULTI_PREBUILT:= $(BUILD_SYSTEM)/multi_prebuilt.mk
-BUILD_JAVA_LIBRARY:= $(BUILD_SYSTEM)/java_library.mk
-BUILD_STATIC_JAVA_LIBRARY:= $(BUILD_SYSTEM)/static_java_library.mk
-BUILD_HOST_JAVA_LIBRARY:= $(BUILD_SYSTEM)/host_java_library.mk
-BUILD_COPY_HEADERS := $(BUILD_SYSTEM)/copy_headers.mk
-BUILD_NATIVE_TEST := $(BUILD_SYSTEM)/native_test.mk
-BUILD_NATIVE_BENCHMARK := $(BUILD_SYSTEM)/native_benchmark.mk
-BUILD_HOST_NATIVE_TEST := $(BUILD_SYSTEM)/host_native_test.mk
-BUILD_FUZZ_TEST := $(BUILD_SYSTEM)/fuzz_test.mk
-BUILD_HOST_FUZZ_TEST := $(BUILD_SYSTEM)/host_fuzz_test.mk
+CLEAR_VARS :=$= $(BUILD_SYSTEM)/clear_vars.mk
 
-BUILD_SHARED_TEST_LIBRARY := $(BUILD_SYSTEM)/shared_test_lib.mk
-BUILD_HOST_SHARED_TEST_LIBRARY := $(BUILD_SYSTEM)/host_shared_test_lib.mk
-BUILD_STATIC_TEST_LIBRARY := $(BUILD_SYSTEM)/static_test_lib.mk
-BUILD_HOST_STATIC_TEST_LIBRARY := $(BUILD_SYSTEM)/host_static_test_lib.mk
+BUILD_HOST_STATIC_LIBRARY :=$= $(BUILD_SYSTEM)/host_static_library.mk
+BUILD_HOST_SHARED_LIBRARY :=$= $(BUILD_SYSTEM)/host_shared_library.mk
+BUILD_STATIC_LIBRARY :=$= $(BUILD_SYSTEM)/static_library.mk
+BUILD_HEADER_LIBRARY :=$= $(BUILD_SYSTEM)/header_library.mk
+BUILD_AUX_STATIC_LIBRARY :=$= $(BUILD_SYSTEM)/aux_static_library.mk
+BUILD_AUX_EXECUTABLE :=$= $(BUILD_SYSTEM)/aux_executable.mk
+BUILD_SHARED_LIBRARY :=$= $(BUILD_SYSTEM)/shared_library.mk
+BUILD_EXECUTABLE :=$= $(BUILD_SYSTEM)/executable.mk
+BUILD_HOST_EXECUTABLE :=$= $(BUILD_SYSTEM)/host_executable.mk
+BUILD_PACKAGE :=$= $(BUILD_SYSTEM)/package.mk
+BUILD_PHONY_PACKAGE :=$= $(BUILD_SYSTEM)/phony_package.mk
+BUILD_RRO_PACKAGE :=$= $(BUILD_SYSTEM)/build_rro_package.mk
+BUILD_HOST_PREBUILT :=$= $(BUILD_SYSTEM)/host_prebuilt.mk
+BUILD_PREBUILT :=$= $(BUILD_SYSTEM)/prebuilt.mk
+BUILD_MULTI_PREBUILT :=$= $(BUILD_SYSTEM)/multi_prebuilt.mk
+BUILD_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/java_library.mk
+BUILD_STATIC_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/static_java_library.mk
+BUILD_HOST_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_java_library.mk
+BUILD_COPY_HEADERS :=$= $(BUILD_SYSTEM)/copy_headers.mk
+BUILD_NATIVE_TEST :=$= $(BUILD_SYSTEM)/native_test.mk
+BUILD_NATIVE_BENCHMARK :=$= $(BUILD_SYSTEM)/native_benchmark.mk
+BUILD_HOST_NATIVE_TEST :=$= $(BUILD_SYSTEM)/host_native_test.mk
+BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
+BUILD_HOST_FUZZ_TEST :=$= $(BUILD_SYSTEM)/host_fuzz_test.mk
 
-BUILD_NOTICE_FILE := $(BUILD_SYSTEM)/notice_files.mk
-BUILD_HOST_DALVIK_JAVA_LIBRARY := $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY := $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
+BUILD_SHARED_TEST_LIBRARY :=$= $(BUILD_SYSTEM)/shared_test_lib.mk
+BUILD_HOST_SHARED_TEST_LIBRARY :=$= $(BUILD_SYSTEM)/host_shared_test_lib.mk
+BUILD_STATIC_TEST_LIBRARY :=$= $(BUILD_SYSTEM)/static_test_lib.mk
+BUILD_HOST_STATIC_TEST_LIBRARY :=$= $(BUILD_SYSTEM)/host_static_test_lib.mk
 
-BUILD_HOST_TEST_CONFIG := $(BUILD_SYSTEM)/host_test_config.mk
-BUILD_TARGET_TEST_CONFIG := $(BUILD_SYSTEM)/target_test_config.mk
+BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
+BUILD_HOST_DALVIK_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_java_library.mk
+BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
+
+BUILD_HOST_TEST_CONFIG :=$= $(BUILD_SYSTEM)/host_test_config.mk
+BUILD_TARGET_TEST_CONFIG :=$= $(BUILD_SYSTEM)/target_test_config.mk
+
+include $(BUILD_SYSTEM)/deprecation.mk
 
 # ###############################################################
 # Parse out any modifier targets.
@@ -597,15 +604,15 @@
 BUILD_IMAGE_SRCS := $(wildcard build/make/tools/releasetools/*.py)
 APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
-BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata.py
+BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
 BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
 FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
-VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh
+VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
 FEC := $(HOST_OUT_EXECUTABLES)/fec
 BRILLO_UPDATE_PAYLOAD := $(HOST_OUT_EXECUTABLES)/brillo_update_payload
 
-DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
+DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
 
 FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
@@ -617,11 +624,9 @@
 
 USE_OPENJDK9 := true
 
-ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
+ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),)
 TARGET_OPENJDK9 :=
-else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),1.8)
-TARGET_OPENJDK9 :=
-else ifeq ($(EXPERIMENTAL_USE_OPENJDK9),true)
+else ifeq ($(EXPERIMENTAL_JAVA_LANGUAGE_LEVEL_9),true)
 TARGET_OPENJDK9 := true
 endif
 
@@ -762,7 +767,7 @@
 ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE
   DEFAULT_SYSTEM_DEV_CERTIFICATE := $(PRODUCT_DEFAULT_DEV_CERTIFICATE)
 else
-  DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey
+  DEFAULT_SYSTEM_DEV_CERTIFICATE := build/make/target/product/security/testkey
 endif
 .KATI_READONLY := DEFAULT_SYSTEM_DEV_CERTIFICATE
 
@@ -808,15 +813,6 @@
     PLATFORM_SEPOLICY_VERSION \
     TOT_SEPOLICY_VERSION \
 
-ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),)
-  ifdef PRODUCT_SHIPPING_API_LEVEL
-    ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29))
-      PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS := true
-    endif
-  endif
-endif
-.KATI_READONLY := PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
-
 ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
   ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
     $(error PRODUCT_USE_DYNAMIC_PARTITIONS must be true when PRODUCT_RETROFIT_DYNAMIC_PARTITIONS \
diff --git a/core/definitions.mk b/core/definitions.mk
index 59aeb16..1157160 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1137,10 +1137,9 @@
 	$(PRIVATE_CPPFLAGS_NO_OVERRIDE)
 endef
 
-# PATH_TO_CLANG_TIDY_SHELL is defined in build/soong
+# PATH_TO_CLANG_TIDY is defined in build/soong
 define call-clang-tidy
-CLANG_TIDY=$(PATH_TO_CLANG_TIDY) \
-  $(PATH_TO_CLANG_TIDY_SHELL) \
+$(PATH_TO_CLANG_TIDY) \
   $(PRIVATE_TIDY_FLAGS) \
   -checks=$(PRIVATE_TIDY_CHECKS)
 endef
@@ -1847,42 +1846,6 @@
 # b/37750224
 AAPT_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
 
-# TODO: Right now we generate the asset resources twice, first as part
-# of generating the Java classes, then at the end when packaging the final
-# assets.  This should be changed to do one of two things: (1) Don't generate
-# any resource files the first time, only create classes during that stage;
-# or (2) Don't use the -c flag with the second stage, instead taking the
-# resource files from the first stage as additional input.  My original intent
-# was to use approach (2), but this requires a little more work in the tool.
-# Maybe we should just use approach (1).
-
-# This rule creates the R.java and Manifest.java files, both of which
-# are PRODUCT-neutral.  Don't pass PRIVATE_PRODUCT_AAPT_CONFIG to this invocation.
-define create-resource-java-files
-@mkdir -p $(dir $(PRIVATE_RESOURCE_PUBLICS_OUTPUT))
-rm -rf $(PRIVATE_JAVA_GEN_DIR)
-mkdir -p $(PRIVATE_JAVA_GEN_DIR)
-$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
-    $(eval # PRIVATE_PRODUCT_AAPT_CONFIG is intentionally missing-- see comment.) \
-    $(addprefix -J , $(PRIVATE_JAVA_GEN_DIR)) \
-    $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
-    $(addprefix -P , $(PRIVATE_RESOURCE_PUBLICS_OUTPUT)) \
-    $(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
-    $(addprefix -A , $(PRIVATE_ASSET_DIR)) \
-    $(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
-    $(addprefix -G , $(PRIVATE_PROGUARD_OPTIONS_FILE)) \
-    $(addprefix --min-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
-    $(addprefix --target-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
-    $(if $(filter --version-code,$(PRIVATE_AAPT_FLAGS)),,--version-code $(PLATFORM_SDK_VERSION)) \
-    $(if $(filter --version-name,$(PRIVATE_AAPT_FLAGS)),,--version-name $(APPS_DEFAULT_VERSION_NAME)) \
-    $(addprefix --rename-manifest-package , $(PRIVATE_MANIFEST_PACKAGE_NAME)) \
-    $(addprefix --rename-instrumentation-target-package , $(PRIVATE_MANIFEST_INSTRUMENTATION_FOR)) \
-    --skip-symbols-without-default-localization
-$(SOONG_ZIP) -o $(PRIVATE_SRCJAR) -C $(PRIVATE_JAVA_GEN_DIR) -D $(PRIVATE_JAVA_GEN_DIR)
-# So that we re-run aapt when the list of input files change
-$(hide) echo $(PRIVATE_RESOURCE_LIST) >/dev/null
-endef
-
 # Search for generated R.java/Manifest.java in $1, copy the found R.java as $2.
 # Also copy them to a central 'R' directory to make it easier to add the files to an IDE.
 define find-generated-R.java
@@ -2034,26 +1997,6 @@
         @$(call emit-line,$(wordlist 13001,13500,$(1)),$(2))
         @$(if $(wordlist 13501,13502,$(1)),$(error Too many words ($(words $(1)))))
 endef
-
-# For a list of jar files, unzip them to a specified directory,
-# but make sure that no META-INF files come along for the ride,
-# unless PRIVATE_DONT_DELETE_JAR_META_INF is set.
-#
-# $(1): files to unzip
-# $(2): destination directory
-define unzip-jar-files
-  $(hide) for f in $(1); \
-  do \
-    if [ ! -f $$f ]; then \
-      echo Missing file $$f; \
-      exit 1; \
-    fi; \
-    unzip -qo $$f -d $(2); \
-    rm -f $(2)/module-info.class; \
-  done
-  $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,$(hide) rm -rf $(2)/META-INF)
-endef
-
 # Return jar arguments to compress files in a given directory
 # $(1): directory
 #
@@ -2151,9 +2094,9 @@
     $(addprefix --bootclasspath ,$(strip $(PRIVATE_BOOTCLASSPATH))) \
     $(addprefix --classpath ,$(strip $(PRIVATE_ALL_JAVA_HEADER_LIBRARIES))) \
     || ( rm -rf $(dir $@)/classes-turbine ; exit 41 ) && \
-    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $@.premerged $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
+    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $@.premerged $(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES) ; \
 else \
-    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $(call reverse-list,$(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES)) ; \
+    $(MERGE_ZIPS) -j --ignore-duplicates -stripDir META-INF $@.tmp $(PRIVATE_STATIC_JAVA_HEADER_LIBRARIES) ; \
 fi
 $(hide) $(ZIPTIME) $@.tmp
 $(hide) $(call commit-change-for-toc,$@)
@@ -2247,37 +2190,6 @@
 $(hide) rm -f $(dir $@)d8_input.jar
 endef
 
-#TODO: we kinda want to build different asset packages for
-#      different configurations, then combine them later (or something).
-#      Per-locale, etc.
-#      A list of dynamic and static parameters;  build layers for
-#      dynamic params that lay over the static ones.
-#TODO: update the manifest to point to the package file
-#Note that the version numbers are given to aapt as simple default
-#values; applications can override these by explicitly stating
-#them in their manifest.
-# $(1) the package file
-define create-assets-package
-$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) \
-    $(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
-    $(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
-    $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
-    $(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
-    $(addprefix -A , $(PRIVATE_ASSET_DIR)) \
-    $(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
-    $(addprefix --min-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
-    $(addprefix --target-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
-    $(if $(filter --product,$(PRIVATE_AAPT_FLAGS)),,$(addprefix --product , $(PRIVATE_TARGET_AAPT_CHARACTERISTICS))) \
-    $(if $(filter --version-code,$(PRIVATE_AAPT_FLAGS)),,--version-code $(PLATFORM_SDK_VERSION)) \
-    $(if $(filter --version-name,$(PRIVATE_AAPT_FLAGS)),,--version-name $(APPS_DEFAULT_VERSION_NAME)) \
-    $(addprefix --rename-manifest-package , $(PRIVATE_MANIFEST_PACKAGE_NAME)) \
-    $(addprefix --rename-instrumentation-target-package , $(PRIVATE_MANIFEST_INSTRUMENTATION_FOR)) \
-    --skip-symbols-without-default-localization \
-    -F $(1)
-# So that we re-run aapt when the list of input files change
-$(hide) echo $(PRIVATE_RESOURCE_LIST) >/dev/null
-endef
-
 # We need the extra blank line, so that the command will be on a separate line.
 # $(1): the package
 # $(2): the ABI name
@@ -2382,16 +2294,14 @@
 ifeq ($(HOST_OS),linux)
 # Runs appcompat and store logs in $(PRODUCT_OUT)/appcompat
 define extract-package
-$(if $(filter aapt2, $(1)), \
-  $(AAPT2) dump resources $@ | awk -F ' |=' '/^Package/{print $$3}' >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log &&, \
-  $(AAPT) dump badging $@ | awk -F \' '/^package/{print $$2}' >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log &&)
+$(AAPT2) dump resources $@ | awk -F ' |=' '/^Package/{print $$3}' >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log &&
 endef
 define appcompat-header
 $(hide) \
   mkdir -p $(PRODUCT_OUT)/appcompat && \
   rm -f $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo -n "Package name: " >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  $(call extract-package, $(1)) \
+  $(extract-package) \
   echo "Module name in Android tree: $(PRIVATE_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo "Local path in Android tree: $(PRIVATE_PATH)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo "Install path on $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT): $(PRIVATE_INSTALLED_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
diff --git a/core/deprecation.mk b/core/deprecation.mk
new file mode 100644
index 0000000..11fe290
--- /dev/null
+++ b/core/deprecation.mk
@@ -0,0 +1,55 @@
+# These module types can still be used without warnings or errors.
+AVAILABLE_BUILD_MODULE_TYPES :=$= \
+  BUILD_COPY_HEADERS \
+  BUILD_EXECUTABLE \
+  BUILD_FUZZ_TEST \
+  BUILD_HEADER_LIBRARY \
+  BUILD_HOST_DALVIK_JAVA_LIBRARY \
+  BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
+  BUILD_HOST_EXECUTABLE \
+  BUILD_HOST_JAVA_LIBRARY \
+  BUILD_HOST_PREBUILT \
+  BUILD_HOST_SHARED_LIBRARY \
+  BUILD_HOST_STATIC_LIBRARY \
+  BUILD_JAVA_LIBRARY \
+  BUILD_MULTI_PREBUILT \
+  BUILD_NATIVE_TEST \
+  BUILD_NOTICE_FILE \
+  BUILD_PACKAGE \
+  BUILD_PHONY_PACKAGE \
+  BUILD_PREBUILT \
+  BUILD_RRO_PACKAGE \
+  BUILD_SHARED_LIBRARY \
+  BUILD_STATIC_JAVA_LIBRARY \
+  BUILD_STATIC_LIBRARY \
+
+# These are BUILD_* variables that will throw a warning when used. This is
+# generally a temporary state until all the devices are marked with the
+# relevant BUILD_BROKEN_USES_BUILD_* variables, then these would move to
+# DEFAULT_ERROR_BUILD_MODULE_TYPES.
+DEFAULT_WARNING_BUILD_MODULE_TYPES :=$= \
+  BUILD_AUX_EXECUTABLE \
+  BUILD_AUX_STATIC_LIBRARY \
+  BUILD_HOST_FUZZ_TEST \
+  BUILD_HOST_NATIVE_TEST \
+  BUILD_HOST_STATIC_TEST_LIBRARY \
+  BUILD_NATIVE_BENCHMARK \
+  BUILD_STATIC_TEST_LIBRARY \
+
+# These are BUILD_* variables that are errors to reference, but you can set
+# BUILD_BROKEN_USES_BUILD_* in your BoardConfig.mk in order to turn them back
+# to warnings.
+DEFAULT_ERROR_BUILD_MODULE_TYPES :=$= \
+  BUILD_HOST_TEST_CONFIG \
+  BUILD_TARGET_TEST_CONFIG \
+
+# These are BUILD_* variables that are always errors to reference.
+# Setting the BUILD_BROKEN_USES_BUILD_* variables is also an error.
+OBSOLETE_BUILD_MODULE_TYPES :=$= \
+  BUILD_HOST_SHARED_TEST_LIBRARY \
+  BUILD_SHARED_TEST_LIBRARY \
+
+$(foreach m,$(OBSOLETE_BUILD_MODULE_TYPES),\
+  $(KATI_obsolete_var $(m),Please convert to Soong) \
+  $(KATI_obsolete_var BUILD_BROKEN_USES_$(m),Please convert to Soong))
+
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 180edaf..32690fe 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -26,7 +26,7 @@
 # We can do this only if preopt is enabled and if the product uses libart config (which sets the
 # default properties for preopting).
 ifeq ($(WITH_DEXPREOPT), true)
-ifeq ($(PRODUCT_USES_ART), true)
+ifeq ($(PRODUCT_USES_DEFAULT_ART_CONFIG), true)
 
 boot_zip := $(PRODUCT_OUT)/boot.zip
 bootclasspath_jars := $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
@@ -45,5 +45,5 @@
 
 $(call dist-for-goals, droidcore, $(boot_zip))
 
-endif  #PRODUCT_USES_ART
+endif  #PRODUCT_USES_DEFAULT_ART_CONFIG
 endif  #WITH_DEXPREOPT
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index b5834b0..c44e4bd 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,4 +1,4 @@
-DEX_PREOPT_CONFIG := $(PRODUCT_OUT)/dexpreopt.config
+DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
 
 # The default value for LOCAL_DEX_PREOPT
 DEX_PREOPT_DEFAULT ?= true
@@ -169,12 +169,6 @@
     fi)
 endif
 
-# Dummy rule to create dexpreopt.config, it will already have been created
-# by the $(file) call above, but a rule needs to exist to keep the dangling
-# rule check happy.
-$(DEX_PREOPT_CONFIG):
-	@#empty
-
 DEXPREOPT_GEN_DEPS := \
   $(SOONG_HOST_OUT_EXECUTABLES)/profman \
   $(DEX2OAT) \
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 85ddbfa..266ebd2 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -242,12 +242,12 @@
 
   .KATI_RESTAT: $(my_dexpreopt_script) $(my_strip_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
-  $(my_dexpreopt_script): PRIVATE_GLOBAL_CONFIG := $(PRODUCT_OUT)/dexpreopt.config
+  $(my_dexpreopt_script): PRIVATE_GLOBAL_CONFIG := $(DEX_PREOPT_CONFIG_FOR_MAKE)
   $(my_dexpreopt_script): PRIVATE_MODULE_CONFIG := $(my_dexpreopt_config)
   $(my_dexpreopt_script): PRIVATE_STRIP_SCRIPT := $(my_strip_script)
   $(my_dexpreopt_script): .KATI_IMPLICIT_OUTPUTS := $(my_strip_script)
   $(my_dexpreopt_script): $(DEXPREOPT_GEN)
-  $(my_dexpreopt_script): $(my_dexpreopt_config) $(PRODUCT_OUT)/dexpreopt.config
+  $(my_dexpreopt_script): $(my_dexpreopt_config) $(DEX_PREOPT_CONFIG_FOR_MAKE)
 	@echo "$(PRIVATE_MODULE) dexpreopt gen"
 	$(DEXPREOPT_GEN) -global $(PRIVATE_GLOBAL_CONFIG) -module $(PRIVATE_MODULE_CONFIG) \
 	-dexpreopt_script $@ -strip_script $(PRIVATE_STRIP_SCRIPT) \
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
deleted file mode 100644
index ad073c7..0000000
--- a/core/dpi_specific_apk.mk
+++ /dev/null
@@ -1,77 +0,0 @@
-# Set up rules to build dpi-specific apk, with whatever else from the base apk.
-# Input variable: my_dpi, and all other variables set up in package_internal.mk.
-#
-
-dpi_apk_name := $(LOCAL_MODULE)_$(my_dpi)
-dpi_intermediate := $(call intermediates-dir-for,APPS,$(dpi_apk_name))
-built_dpi_apk := $(dpi_intermediate)/package.apk
-additional_certificates := $(foreach c,$(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
-
-# Set up all the target-specific variables.
-$(built_dpi_apk): PRIVATE_MODULE := $(dpi_apk_name)
-$(built_dpi_apk): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --pseudo-localize $($(LOCAL_PACKAGE_NAME)_aapt_flags_$(my_dpi))
-# Clear PRIVATE_PRODUCT_AAPT_CONFIG to include everything by default.
-$(built_dpi_apk): PRIVATE_PRODUCT_AAPT_CONFIG :=
-$(built_dpi_apk): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(my_dpi)
-$(built_dpi_apk): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-$(built_dpi_apk): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
-$(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
-$(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
-$(built_dpi_apk): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-$(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
-$(built_dpi_apk): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
-$(built_dpi_apk): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_INSTRUMENTATION_FOR)
-$(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
-$(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
-$(built_dpi_apk): PRIVATE_PRIVATE_KEY := $(private_key)
-$(built_dpi_apk): PRIVATE_CERTIFICATE := $(certificate)
-$(built_dpi_apk): $(additional_certificates)
-$(built_dpi_apk): PRIVATE_ADDITIONAL_CERTIFICATES := $(additional_certificates)
-
-$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE :=
-ifneq ($(full_classes_jar),)
-$(built_dpi_apk): PRIVATE_DEX_FILE := $(built_dex)
-# Use the jarjar processed arhive as the initial package file.
-$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
-$(built_dpi_apk): $(built_dex)
-else
-$(built_dpi_apk): PRIVATE_DEX_FILE :=
-endif # full_classes_jar
-
-# Set up dependenncies and the build recipe.
-$(built_dpi_apk) : $(R_file_stamp)
-$(built_dpi_apk) : $(all_library_res_package_export_deps)
-$(built_dpi_apk) : $(private_key) $(certificate) $(SIGNAPK_JAR)
-$(built_dpi_apk) : $(AAPT)
-$(built_dpi_apk) : $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
-$(built_dpi_apk) : $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
-	@echo "target Package: $(PRIVATE_MODULE) ($@)"
-	rm -rf $@.parts
-	mkdir -p $@.parts
-	$(call create-assets-package,$@.parts/apk.zip)
-ifneq ($(jni_shared_libraries),)
-	$(call create-jni-shared-libs-package,$@.parts/jni.zip)
-endif
-ifeq ($(full_classes_jar),)
-# We don't build jar, need to add the Java resources here.
-	$(if $(PRIVATE_EXTRA_JAR_ARGS),$(call create-java-resources-jar,$@.parts/res.zip))
-else
-	$(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
-	$(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
-endif
-	$(MERGE_ZIPS) $@ $@.parts/*.zip
-	rm -rf $@.parts
-	$(sign-package)
-
-# Set up global variables to register this apk to the higher-level dependency graph.
-ALL_MODULES += $(dpi_apk_name)
-ALL_MODULES.$(dpi_apk_name).CLASS := APPS
-ALL_MODULES.$(dpi_apk_name).BUILT := $(built_dpi_apk)
-ALL_MODULES.$(dpi_apk_name).TARGET_BUILT := $(built_dpi_apk)
-PACKAGES := $(PACKAGES) $(dpi_apk_name)
-PACKAGES.$(dpi_apk_name).PRIVATE_KEY := $(private_key)
-PACKAGES.$(dpi_apk_name).CERTIFICATE := $(certificate)
-
-# Phony targets used by "apps_only".
-.PHONY: $(dpi_apk_name)
-$(dpi_apk_name) : $(built_dpi_apk)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 5131598..46edc0e 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -826,7 +826,6 @@
 TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/lib
 TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/$(TARGET_COPY_OUT_VENDOR)/lib
 TARGET_ROOT_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
-TARGET_ROOT_OUT_SBIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/sbin
 TARGET_ROOT_OUT_BIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/bin
 TARGET_OUT_COVERAGE := $(PRODUCT_OUT)/coverage
 .KATI_READONLY := \
@@ -835,7 +834,6 @@
   TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED \
   TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED \
   TARGET_ROOT_OUT_UNSTRIPPED \
-  TARGET_ROOT_OUT_SBIN_UNSTRIPPED \
   TARGET_ROOT_OUT_BIN_UNSTRIPPED \
   TARGET_OUT_COVERAGE
 
@@ -845,13 +843,11 @@
 
 TARGET_ROOT_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ROOT)
 TARGET_ROOT_OUT_BIN := $(TARGET_ROOT_OUT)/bin
-TARGET_ROOT_OUT_SBIN := $(TARGET_ROOT_OUT)/sbin
 TARGET_ROOT_OUT_ETC := $(TARGET_ROOT_OUT)/etc
 TARGET_ROOT_OUT_USR := $(TARGET_ROOT_OUT)/usr
 .KATI_READONLY := \
   TARGET_ROOT_OUT \
   TARGET_ROOT_OUT_BIN \
-  TARGET_ROOT_OUT_SBIN \
   TARGET_ROOT_OUT_ETC \
   TARGET_ROOT_OUT_USR
 
diff --git a/core/force_aapt2.mk b/core/force_aapt2.mk
index db2e60f..25b45e4 100644
--- a/core/force_aapt2.mk
+++ b/core/force_aapt2.mk
@@ -14,50 +14,40 @@
 # limitations under the License.
 #
 
-# Including this makefile will force AAPT2 on if FORCE_AAPT2==true,
+# Including this makefile will force AAPT2 on,
 # rewriting some properties to convert standard AAPT usage to AAPT2.
 
-ifneq ($(FORCE_AAPT2),false)
-  ifeq ($(LOCAL_USE_AAPT2),)
-    # Force AAPT2 on
-    LOCAL_USE_AAPT2 := true
-    # Filter out support library resources
-    LOCAL_RESOURCE_DIR := $(filter-out \
-      prebuilts/sdk/current/% \
-      frameworks/support/%,\
-        $(LOCAL_RESOURCE_DIR))
-    # Filter out unnecessary aapt flags
-    ifneq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
-      LOCAL_AAPT_FLAGS := $(subst --extra-packages=,--extra-packages$(space), \
-        $(filter-out \
-          --extra-packages=android.support.% \
-          --extra-packages=androidx.%, \
-            $(subst --extra-packages$(space),--extra-packages=,$(LOCAL_AAPT_FLAGS))))
-        ifeq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
-          LOCAL_AAPT_FLAGS := $(filter-out --auto-add-overlay,$(LOCAL_AAPT_FLAGS))
-        endif
-    endif
-
-    # AAPT2 is pickier about missing resources.  Support library may have references to resources
-    # added in current, so always treat LOCAL_SDK_VERSION as LOCAL_SDK_RES_VERSION := current.
-    ifdef LOCAL_SDK_VERSION
-      LOCAL_SDK_RES_VERSION := current
-    endif
-
-    ifeq (,$(strip $(LOCAL_MANIFEST_FILE)$(LOCAL_FULL_MANIFEST_FILE)))
-      ifeq (,$(wildcard $(LOCAL_PATH)/AndroidManifest.xml))
-        # work around missing manifests by creating a default one
-        LOCAL_FULL_MANIFEST_FILE := $(call local-intermediates-dir,COMMON)/DefaultManifest.xml
-        $(call create-default-manifest-file,$(LOCAL_FULL_MANIFEST_FILE),$(call module-min-sdk-version))
-      endif
-    endif
-  endif
+ifeq ($(LOCAL_USE_AAPT2),false)
+  $(call pretty-error, LOCAL_USE_AAPT2 := false is no longer supported)
 endif
 
-ifneq ($(LOCAL_USE_AAPT2),true)
-  ifneq ($(LOCAL_USE_AAPT2),false)
-    ifneq ($(LOCAL_USE_AAPT2),)
-      $(call pretty-error,Invalid value for LOCAL_USE_AAPT2: "$(LOCAL_USE_AAPT2)")
+# Filter out support library resources
+LOCAL_RESOURCE_DIR := $(filter-out \
+  prebuilts/sdk/current/% \
+  frameworks/support/%,\
+    $(LOCAL_RESOURCE_DIR))
+# Filter out unnecessary aapt flags
+ifneq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
+  LOCAL_AAPT_FLAGS := $(subst --extra-packages=,--extra-packages$(space), \
+    $(filter-out \
+      --extra-packages=android.support.% \
+      --extra-packages=androidx.%, \
+        $(subst --extra-packages$(space),--extra-packages=,$(LOCAL_AAPT_FLAGS))))
+    ifeq (,$(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))
+      LOCAL_AAPT_FLAGS := $(filter-out --auto-add-overlay,$(LOCAL_AAPT_FLAGS))
     endif
+endif
+
+# AAPT2 is pickier about missing resources.  Support library may have references to resources
+# added in current, so always treat LOCAL_SDK_VERSION := <number> as LOCAL_SDK_RES_VERSION := current.
+ifneq (,$(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION)))
+  LOCAL_SDK_RES_VERSION := current
+endif
+
+ifeq (,$(strip $(LOCAL_MANIFEST_FILE)$(LOCAL_FULL_MANIFEST_FILE)))
+  ifeq (,$(wildcard $(LOCAL_PATH)/AndroidManifest.xml))
+    # work around missing manifests by creating a default one
+    LOCAL_FULL_MANIFEST_FILE := $(call local-intermediates-dir,COMMON)/DefaultManifest.xml
+    $(call create-default-manifest-file,$(LOCAL_FULL_MANIFEST_FILE),$(call module-min-sdk-version))
   endif
 endif
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index f7877f2..6a23aeb 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -34,7 +34,6 @@
 endif
 
 LOCAL_FULL_MANIFEST_FILE := $(rro_android_manifest_file)
-LOCAL_CERTIFICATE := platform
 
 LOCAL_AAPT_FLAGS += --auto-add-overlay
 LOCAL_RESOURCE_DIR := $(enforce_rro_source_overlays)
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 2a251e8..423575c 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -104,7 +104,6 @@
 ifneq ($(TURBINE_ENABLED),false)
 
 $(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_turbine_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
 $(full_classes_turbine_jar): \
     $(java_source_list_file) \
@@ -142,7 +141,7 @@
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
 	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
-            $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
+            $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
 
 # Run jarjar if necessary, otherwise just copy the file.
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index c8d2ee7..1225fa9 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -95,13 +95,15 @@
 javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
 .PHONY: javac-check-$(LOCAL_MODULE)
 
+$(full_classes_combined_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_combined_jar): $(full_classes_compiled_jar) \
                               $(jar_manifest_file) \
                               $(full_static_java_libs) | $(MERGE_ZIPS)
 	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
 	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
-            -stripDir META-INF -zipToNotStrip $< $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
+            $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
+            $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
 
 # Run jarjar if necessary, otherwise just copy the file.
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
diff --git a/core/host_static_test_lib.mk b/core/host_static_test_lib.mk
index a24cd62..a9e39b1 100644
--- a/core/host_static_test_lib.mk
+++ b/core/host_static_test_lib.mk
@@ -6,4 +6,4 @@
 
 include $(BUILD_SYSTEM)/host_test_internal.mk
 
-include $(BUILD_HOST_STATIC_LIBRARY)
+include $(BUILD_SYSTEM)/host_static_library.mk
diff --git a/core/install_jni_libs.mk b/core/install_jni_libs.mk
index 01f7f10..515d34f 100644
--- a/core/install_jni_libs.mk
+++ b/core/install_jni_libs.mk
@@ -13,10 +13,10 @@
 
 my_embed_jni :=
 ifneq ($(TARGET_BUILD_APPS),)
-my_embed_jni := true
+  my_embed_jni := true
 endif
 ifneq ($(filter tests samples, $(LOCAL_MODULE_TAGS)),)
-my_embed_jni := true
+  my_embed_jni := true
 endif
 
 # If the APK is not installed in one of the following partitions, force its libraries
@@ -29,13 +29,13 @@
     $(TARGET_OUT_PRODUCT_SERVICES)/% \
 
 ifeq ($(filter $(supported_partition_patterns),$(my_module_path)),)
-    my_embed_jni := true
+  my_embed_jni := true
 endif
 
 # If we're installing this APP as a compressed module, we include all JNI libraries
 # in the compressed artifact, rather than as separate files on the partition in question.
 ifdef LOCAL_COMPRESSED_MODULE
-my_embed_jni := true
+  my_embed_jni := true
 endif
 
 jni_shared_libraries :=
@@ -50,56 +50,56 @@
 my_add_jni :=
 # The module is built for TARGET_ARCH
 ifeq ($(my_2nd_arch_prefix),$(LOCAL_2ND_ARCH_VAR_PREFIX))
-my_add_jni := true
+  my_add_jni := true
 endif
 # Or it explicitly requires both
 ifeq ($(my_module_multilib),both)
-my_add_jni := true
+  my_add_jni := true
 endif
 ifeq ($(my_add_jni),true)
-my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS_$(TARGET_ARCH))
-ifndef my_prebuilt_jni_libs
-my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS)
-endif
-include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
-jni_shared_libraries += $(my_jni_shared_libraries)
-jni_shared_libraries_abis += $(my_jni_shared_libraries_abi)
-jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
-    $(my_jni_shared_libraries))
-embedded_prebuilt_jni_libs += $(my_embedded_prebuilt_jni_libs)
+  my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS_$(TARGET_ARCH))
+  ifndef my_prebuilt_jni_libs
+    my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS)
+  endif
+  include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+  jni_shared_libraries += $(my_jni_shared_libraries)
+  jni_shared_libraries_abis += $(my_jni_shared_libraries_abi)
+  jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
+      $(my_jni_shared_libraries))
+  embedded_prebuilt_jni_libs += $(my_embedded_prebuilt_jni_libs)
 
-# Include RS dynamically-generated libraries as well
-# TODO: Add multilib support once RS supports generating multilib libraries.
-jni_shared_libraries += $(rs_compatibility_jni_libs)
-jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
-    $(rs_compatibility_jni_libs))
+  # Include RS dynamically-generated libraries as well
+  # TODO: Add multilib support once RS supports generating multilib libraries.
+  jni_shared_libraries += $(rs_compatibility_jni_libs)
+  jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
+      $(rs_compatibility_jni_libs))
 endif  # my_add_jni
 
 #######################################
 # For TARGET_2ND_ARCH
 ifdef TARGET_2ND_ARCH
-my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-my_add_jni :=
-# The module is built for TARGET_2ND_ARCH
-ifeq ($(my_2nd_arch_prefix),$(LOCAL_2ND_ARCH_VAR_PREFIX))
-my_add_jni := true
-endif
-# Or it explicitly requires both
-ifeq ($(my_module_multilib),both)
-my_add_jni := true
-endif
-ifeq ($(my_add_jni),true)
-my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS_$(TARGET_2ND_ARCH))
-ifndef my_prebuilt_jni_libs
-my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS)
-endif
-include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
-jni_shared_libraries += $(my_jni_shared_libraries)
-jni_shared_libraries_abis += $(my_jni_shared_libraries_abi)
-jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
-    $(my_jni_shared_libraries))
-embedded_prebuilt_jni_libs += $(my_embedded_prebuilt_jni_libs)
-endif  # my_add_jni
+  my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+  my_add_jni :=
+  # The module is built for TARGET_2ND_ARCH
+  ifeq ($(my_2nd_arch_prefix),$(LOCAL_2ND_ARCH_VAR_PREFIX))
+    my_add_jni := true
+  endif
+  # Or it explicitly requires both
+  ifeq ($(my_module_multilib),both)
+    my_add_jni := true
+  endif
+  ifeq ($(my_add_jni),true)
+    my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS_$(TARGET_2ND_ARCH))
+    ifndef my_prebuilt_jni_libs
+      my_prebuilt_jni_libs := $(LOCAL_PREBUILT_JNI_LIBS)
+    endif
+    include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+    jni_shared_libraries += $(my_jni_shared_libraries)
+    jni_shared_libraries_abis += $(my_jni_shared_libraries_abi)
+    jni_shared_libraries_with_abis += $(addprefix $(my_jni_shared_libraries_abi):,\
+        $(my_jni_shared_libraries))
+    embedded_prebuilt_jni_libs += $(my_embedded_prebuilt_jni_libs)
+  endif  # my_add_jni
 endif  # TARGET_2ND_ARCH
 
 jni_shared_libraries := $(strip $(jni_shared_libraries))
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index a79a49a..eac0414 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -12,117 +12,117 @@
 #   my_embedded_prebuilt_jni_libs, prebuilt jni libs embedded in prebuilt apk.
 #
 
-my_jni_shared_libraries := \
+my_jni_shared_libraries := $(strip \
     $(foreach lib,$(LOCAL_JNI_SHARED_LIBRARIES), \
-      $(call intermediates-dir-for,SHARED_LIBRARIES,$(lib),,,$(my_2nd_arch_prefix))/$(lib).so)
+      $(call intermediates-dir-for,SHARED_LIBRARIES,$(lib),,,$(my_2nd_arch_prefix))/$(lib).so))
 
 # App-specific lib path.
 my_app_lib_path := $(dir $(LOCAL_INSTALLED_MODULE))lib/$(TARGET_$(my_2nd_arch_prefix)ARCH)
 my_embedded_prebuilt_jni_libs :=
 
 ifdef my_embed_jni
-# App explicitly requires the prebuilt NDK stl shared libraies.
-# The NDK stl shared libraries should never go to the system image.
-ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
-ifndef LOCAL_SDK_VERSION
-$(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
-    LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
-endif
-my_jni_shared_libraries += \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
-endif
+  # App explicitly requires the prebuilt NDK stl shared libraies.
+  # The NDK stl shared libraries should never go to the system image.
+  ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
+    ifndef LOCAL_SDK_VERSION
+      $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
+          LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
+    endif
+    my_jni_shared_libraries += \
+        $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
+  endif
 
-# Set the abi directory used by the local JNI shared libraries.
-# (Doesn't change how the local shared libraries are compiled, just
-# sets where they are stored in the apk.)
-ifeq ($(LOCAL_JNI_SHARED_LIBRARIES_ABI),)
+  # Set the abi directory used by the local JNI shared libraries.
+  # (Doesn't change how the local shared libraries are compiled, just
+  # sets where they are stored in the apk.)
+  ifeq ($(LOCAL_JNI_SHARED_LIBRARIES_ABI),)
     my_jni_shared_libraries_abi := $(TARGET_$(my_2nd_arch_prefix)CPU_ABI)
-else
+  else
     my_jni_shared_libraries_abi := $(LOCAL_JNI_SHARED_LIBRARIES_ABI)
-endif
+  endif
 
-else  # not my_embed_jni
+else ifneq ($(my_jni_shared_libraries),) # not my_embed_jni
 
-my_jni_shared_libraries := $(strip $(my_jni_shared_libraries))
-ifneq ($(my_jni_shared_libraries),)
-# The jni libaries will be installed to the system.img.
-my_jni_filenames := $(notdir $(my_jni_shared_libraries))
-# Make sure the JNI libraries get installed
-my_shared_library_path := $(call get_non_asan_path,\
-  $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
-# Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
-my_installed_library := $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
-$(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
-ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
+  # The jni libaries will be installed to the system.img.
+  my_jni_filenames := $(notdir $(my_jni_shared_libraries))
+  # Make sure the JNI libraries get installed
+  my_shared_library_path := $(call get_non_asan_path,\
+      $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
+  my_installed_library := $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
+  # Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
+  $(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
 
-# Create symlink in the app specific lib path
-# Skip creating this symlink when running the second part of a target sanitization build.
-ifeq ($(filter address,$(SANITIZE_TARGET)),)
-ifdef LOCAL_POST_INSTALL_CMD
-# Add a shell command separator
-LOCAL_POST_INSTALL_CMD += ;
-endif
+  ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
 
-my_symlink_target_dir := $(patsubst $(PRODUCT_OUT)%,%,\
-    $(my_shared_library_path))
-LOCAL_POST_INSTALL_CMD += \
-  mkdir -p $(my_app_lib_path) \
-  $(foreach lib, $(my_jni_filenames), ;ln -sf $(my_symlink_target_dir)/$(lib) $(my_app_lib_path)/$(lib))
-$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
-else
-ifdef LOCAL_POST_INSTALL_CMD
-$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
-endif
-endif
+  # Create symlink in the app specific lib path
+  # Skip creating this symlink when running the second part of a target sanitization build.
+  ifeq ($(filter address,$(SANITIZE_TARGET)),)
+    ifdef LOCAL_POST_INSTALL_CMD
+      # Add a shell command separator
+      LOCAL_POST_INSTALL_CMD += ;
+    endif
 
-# Clear jni_shared_libraries to not embed it into the apk.
-my_jni_shared_libraries :=
-endif  # $(my_jni_shared_libraries) not empty
+    my_symlink_target_dir := $(patsubst $(PRODUCT_OUT)%,%,\
+        $(my_shared_library_path))
+    LOCAL_POST_INSTALL_CMD += \
+        mkdir -p $(my_app_lib_path) \
+        $(foreach lib, $(my_jni_filenames), ;ln -sf $(my_symlink_target_dir)/$(lib) $(my_app_lib_path)/$(lib))
+    $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
+  else
+    ifdef LOCAL_POST_INSTALL_CMD
+      $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
+    endif
+  endif
+
+  # Clear jni_shared_libraries to not embed it into the apk.
+  my_jni_shared_libraries :=
 endif  # my_embed_jni
 
 ifdef my_prebuilt_jni_libs
-# Files like @lib/<abi>/libfoo.so (path inside the apk) are JNI libs embedded prebuilt apk;
-# Files like path/to/libfoo.so (path relative to LOCAL_PATH) are prebuilts in the source tree.
-my_embedded_prebuilt_jni_libs := $(patsubst @%,%, \
-    $(filter @%, $(my_prebuilt_jni_libs)))
+  # Files like @lib/<abi>/libfoo.so (path inside the apk) are JNI libs embedded prebuilt apk;
+  # Files like path/to/libfoo.so (path relative to LOCAL_PATH) are prebuilts in the source tree.
+  my_embedded_prebuilt_jni_libs := $(patsubst @%,%, \
+      $(filter @%, $(my_prebuilt_jni_libs)))
 
-# prebuilt JNI exsiting as separate source files.
-my_prebuilt_jni_libs := $(addprefix $(LOCAL_PATH)/, \
-    $(filter-out @%, $(my_prebuilt_jni_libs)))
-ifdef my_prebuilt_jni_libs
-ifdef my_embed_jni
-# Embed my_prebuilt_jni_libs to the apk
-my_jni_shared_libraries += $(my_prebuilt_jni_libs)
-else # not my_embed_jni
-# Install my_prebuilt_jni_libs as separate files.
-$(foreach lib, $(my_prebuilt_jni_libs), \
-    $(eval $(call copy-one-file, $(lib), $(my_app_lib_path)/$(notdir $(lib)))))
+  # prebuilt JNI exsiting as separate source files.
+  my_prebuilt_jni_libs := $(addprefix $(LOCAL_PATH)/, \
+      $(filter-out @%, $(my_prebuilt_jni_libs)))
+  ifdef my_prebuilt_jni_libs
+    ifdef my_embed_jni
+      # Embed my_prebuilt_jni_libs to the apk
+      my_jni_shared_libraries += $(my_prebuilt_jni_libs)
+    else # not my_embed_jni
+      # Install my_prebuilt_jni_libs as separate files.
+      $(foreach lib, $(my_prebuilt_jni_libs), \
+          $(eval $(call copy-one-file, $(lib), $(my_app_lib_path)/$(notdir $(lib)))))
 
-my_installed_library := $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
-$(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
-ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
-endif  # my_embed_jni
-endif  # inner my_prebuilt_jni_libs
+      my_installed_library := $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
+      $(LOCAL_INSTALLED_MODULE) : $(my_installed_library)
+
+      ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(my_installed_library)
+    endif  # my_embed_jni
+  endif  # inner my_prebuilt_jni_libs
 endif  # outer my_prebuilt_jni_libs
 
 # Verify that all included libraries are built against the NDK
 include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
 ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
-ifneq ($(LOCAL_SDK_VERSION),)
-my_link_type := app:sdk
-my_warn_types := native:platform $(my_warn_ndk_types)
-my_allowed_types := $(my_allowed_ndk_types)
+  ifneq ($(LOCAL_SDK_VERSION),)
+    my_link_type := app:sdk
+    my_warn_types := native:platform $(my_warn_ndk_types)
+    my_allowed_types := $(my_allowed_ndk_types)
     ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
-        my_allowed_types += native:vendor native:vndk native:platform_vndk
+      my_allowed_types += native:vendor native:vndk native:platform_vndk
     endif
-else
-my_link_type := app:platform
-my_warn_types := $(my_warn_ndk_types)
-my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private native:platform_vndk
-endif
+  else
+    my_link_type := app:platform
+    my_warn_types := $(my_warn_ndk_types)
+    my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private native:platform_vndk
+  endif
 
-my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
+  my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
 
-my_common :=
-include $(BUILD_SYSTEM)/link_type.mk
+  my_common :=
+  include $(BUILD_SYSTEM)/link_type.mk
 endif
diff --git a/core/java.mk b/core/java.mk
index 1533963..b951f14 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -244,7 +244,6 @@
 
 $(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_turbine_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_turbine_jar): \
     $(java_source_list_file) \
     $(java_sources_deps) \
@@ -278,7 +277,6 @@
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
-$(full_classes_compiled_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_compiled_jar): PRIVATE_JAVA_SOURCE_LIST := $(java_source_list_file)
 $(full_classes_compiled_jar): PRIVATE_ALL_JAVA_HEADER_LIBRARIES := $(full_java_header_libs)
 $(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
@@ -311,7 +309,7 @@
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
 	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
-            $@ $< $(call reverse-list,$(PRIVATE_STATIC_JAVA_LIBRARIES))
+            $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
 
 ifdef LOCAL_JAR_PROCESSOR
 # LOCAL_JAR_PROCESSOR_ARGS must be evaluated here to set up the rule-local
@@ -422,10 +420,8 @@
 common_proguard_flags += -dontshrink # don't shrink tests by default
 endif # test package
 ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
-  ifeq ($(LOCAL_USE_AAPT2),true)
-    common_proguard_flag_files += $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
-        $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags)
-  endif
+  common_proguard_flag_files += $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES),\
+      $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags)
 endif
 ifneq ($(common_proguard_flag_files),)
 common_proguard_flags += $(addprefix -include , $(common_proguard_flag_files))
diff --git a/core/java_common.mk b/core/java_common.mk
index f5da120..db5b6c3 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -534,13 +534,6 @@
 my_allowed_types := java:sdk java:system java:platform java:core
 endif
 
-ifdef LOCAL_AAPT2_ONLY
-my_link_type += aapt2_only
-endif
-ifeq ($(LOCAL_USE_AAPT2),true)
-my_allowed_types += aapt2_only
-endif
-
 my_link_deps := $(addprefix JAVA_LIBRARIES:,$(LOCAL_STATIC_JAVA_LIBRARIES) $(LOCAL_JAVA_LIBRARIES))
 my_link_deps += $(addprefix APPS:,$(apk_libraries))
 
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
new file mode 100644
index 0000000..31aae83
--- /dev/null
+++ b/core/java_prebuilt_internal.mk
@@ -0,0 +1,242 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+############################################################
+# Internal build rules for JAVA_LIBRARIES prebuilt modules
+############################################################
+
+ifneq (JAVA_LIBRARIES,$(LOCAL_MODULE_CLASS))
+$(call pretty-error,java_prebuilt_internal.mk is for JAVA_LIBRARIES modules only)
+endif
+
+include $(BUILD_SYSTEM)/base_rules.mk
+built_module := $(LOCAL_BUILT_MODULE)
+
+ifeq (,$(LOCAL_IS_HOST_MODULE)$(filter true,$(LOCAL_UNINSTALLABLE_MODULE)))
+  prebuilt_module_is_dex_javalib := true
+else
+  prebuilt_module_is_dex_javalib :=
+endif
+
+ifeq ($(prebuilt_module_is_dex_javalib),true)
+my_dex_jar := $(my_prebuilt_src_file)
+# This is a target shared library, i.e. a jar with classes.dex.
+
+ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)
+  $(call pretty-error,Modules in PRODUCT_BOOT_JARS must be defined in Android.bp files)
+endif
+
+#######################################
+# defines built_odex along with rule to install odex
+include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+#######################################
+ifdef LOCAL_DEX_PREOPT
+
+$(built_module): PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
+$(built_module): $(intermediates)/strip.sh
+$(built_module): | $(DEXPREOPT_STRIP_DEPS)
+$(built_module): .KATI_DEPFILE := $(built_module).d
+$(built_module): $(my_prebuilt_src_file)
+	$(PRIVATE_STRIP_SCRIPT) $< $@
+
+else # ! LOCAL_DEX_PREOPT
+$(built_module) : $(my_prebuilt_src_file)
+	$(call copy-file-to-target)
+endif # LOCAL_DEX_PREOPT
+
+else  # ! prebuilt_module_is_dex_javalib
+$(built_module) : $(my_prebuilt_src_file)
+	$(transform-prebuilt-to-target)
+endif # ! prebuilt_module_is_dex_javalib
+
+my_src_jar := $(my_prebuilt_src_file)
+
+ifdef LOCAL_IS_HOST_MODULE
+# for host java libraries deps should be in the common dir, so we make a copy in
+# the common dir.
+common_classes_jar := $(intermediates.COMMON)/classes.jar
+common_header_jar := $(intermediates.COMMON)/classes-header.jar
+
+$(common_classes_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(common_classes_jar): PRIVATE_PREFIX := $(my_prefix)
+
+$(common_classes_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
+ifneq ($(TURBINE_ENABLED),false)
+$(common_header_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+endif
+
+else # !LOCAL_IS_HOST_MODULE
+# for target java libraries, the LOCAL_BUILT_MODULE is in a product-specific dir,
+# while the deps should be in the common dir, so we make a copy in the common dir.
+common_classes_jar := $(intermediates.COMMON)/classes.jar
+common_header_jar := $(intermediates.COMMON)/classes-header.jar
+common_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
+common_javalib_jar := $(intermediates.COMMON)/javalib.jar
+
+$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_PREFIX := $(my_prefix)
+
+ifeq ($(LOCAL_SDK_VERSION),system_current)
+my_link_type := java:system
+else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
+my_link_type := java:system
+else ifeq ($(LOCAL_SDK_VERSION),core_current)
+my_link_type := java:core
+else ifneq ($(LOCAL_SDK_VERSION),)
+my_link_type := java:sdk
+else
+my_link_type := java:platform
+endif
+
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
+
+ifeq ($(prebuilt_module_is_dex_javalib),true)
+# For prebuilt shared Java library we don't have classes.jar.
+$(common_javalib_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
+else  # ! prebuilt_module_is_dex_javalib
+
+my_src_aar := $(filter %.aar, $(my_prebuilt_src_file))
+ifneq ($(my_src_aar),)
+# This is .aar file, archive of classes.jar and Android resources.
+
+# run Jetifier if needed
+LOCAL_JETIFIER_INPUT_FILE := $(my_src_aar)
+include $(BUILD_SYSTEM)/jetifier.mk
+my_src_aar := $(LOCAL_JETIFIER_OUTPUT_FILE)
+
+my_src_jar := $(intermediates.COMMON)/aar/classes.jar
+my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
+my_src_android_manifest := $(intermediates.COMMON)/aar/AndroidManifest.xml
+
+$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS := $(my_src_proguard_options)
+$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS += $(my_src_android_manifest)
+$(my_src_jar) : $(my_src_aar)
+	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
+	$(hide) unzip -qo -d $(dir $@) $<
+	# Make sure the extracted classes.jar has a new timestamp.
+	$(hide) touch $@
+	# Make sure the proguard and AndroidManifest.xml files exist
+	# and have a new timestamp.
+	$(hide) touch $(dir $@)/proguard.txt
+	$(hide) touch $(dir $@)/AndroidManifest.xml
+
+my_prebuilt_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
+$(eval $(call copy-one-file,$(my_src_android_manifest),$(my_prebuilt_android_manifest)))
+$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_prebuilt_android_manifest))
+
+else
+
+# run Jetifier if needed
+LOCAL_JETIFIER_INPUT_FILE := $(my_src_jar)
+include $(BUILD_SYSTEM)/jetifier.mk
+my_src_jar := $(LOCAL_JETIFIER_OUTPUT_FILE)
+
+endif
+
+$(common_classes_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
+ifneq ($(TURBINE_ENABLED),false)
+$(common_header_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+endif
+
+$(common_classes_pre_proguard_jar) : $(my_src_jar)
+	$(transform-prebuilt-to-target)
+
+$(common_javalib_jar) : $(common_classes_jar)
+	$(transform-prebuilt-to-target)
+
+include $(BUILD_SYSTEM)/force_aapt2.mk
+
+ifneq ($(my_src_aar),)
+
+$(intermediates.COMMON)/export_proguard_flags : $(my_src_proguard_options)
+	$(transform-prebuilt-to-target)
+
+LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
+ifeq ($(LOCAL_SDK_RES_VERSION),)
+  LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
+endif
+
+framework_res_package_export :=
+# Please refer to package.mk
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+framework_res_package_export := \
+    $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
+else
+framework_res_package_export := \
+    $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
+endif
+endif
+
+my_res_package := $(intermediates.COMMON)/package-res.apk
+
+# We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
+$(my_res_package): PRIVATE_AAPT2_CFLAGS :=
+$(my_res_package): PRIVATE_AAPT_FLAGS := --static-lib --no-static-lib-packages --auto-add-overlay
+$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(my_src_android_manifest)
+$(my_res_package): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
+$(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
+$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
+$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
+$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
+$(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
+$(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+$(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
+$(my_res_package) : $(framework_res_package_export)
+$(my_res_package) : $(my_src_android_manifest)
+
+full_android_manifest :=
+my_res_resources :=
+my_overlay_resources :=
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
+R_file_stamp :=
+proguard_options_file :=
+my_generated_res_dirs := $(intermediates.COMMON)/aar/res
+my_generated_res_dirs_deps := $(my_src_jar)
+include $(BUILD_SYSTEM)/aapt2.mk
+
+# Make sure my_res_package is created when you run mm/mmm.
+$(built_module) : $(my_res_package)
+endif  # $(my_src_aar)
+
+# make sure the classes.jar and javalib.jar are built before $(LOCAL_BUILT_MODULE)
+$(built_module) : $(common_javalib_jar)
+
+my_exported_sdk_libs_file := $(intermediates.COMMON)/exported-sdk-libs
+$(my_exported_sdk_libs_file): PRIVATE_EXPORTED_SDK_LIBS := $(LOCAL_EXPORT_SDK_LIBRARIES)
+$(my_exported_sdk_libs_file):
+	@echo "Export SDK libs $@"
+	$(hide) mkdir -p $(dir $@) && rm -f $@
+	$(if $(PRIVATE_EXPORTED_SDK_LIBS),\
+		$(hide) echo $(PRIVATE_EXPORTED_SDK_LIBS) | tr ' ' '\n' > $@,\
+		$(hide) touch $@)
+
+endif # ! prebuilt_module_is_dex_javalib
+endif # LOCAL_IS_HOST_MODULE is not set
+
diff --git a/core/main.mk b/core/main.mk
index df5c13c..656a6ec 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1214,6 +1214,8 @@
   libartd.so \
   libartpalette.so \
   libc.so \
+  libc_malloc_debug.so \
+  libc_malloc_hooks.so \
   libdexfile.so \
   libdexfile_external.so \
   libdexfiled.so \
diff --git a/core/node_fns.mk b/core/node_fns.mk
index ccfcc25..b81d60c 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -142,7 +142,8 @@
 #
 # $(1): context prefix
 # $(2): name of this node
-# $(3): list of variable names
+# $(3): list of node variable names
+# $(4): list of single value variable names (subset of $(3))
 #
 define _expand-inherited-values
   $(foreach v,$(3), \
@@ -154,15 +155,21 @@
             $(patsubst $(INHERIT_TAG)%,%, \
                 $(filter $(INHERIT_TAG)%, $($(_eiv_tv)) \
      )))) \
+    $(eval ### "Whether this variable should only take a single value") \
+    $(eval _eiv_sv := $(filter $(v),$(4))) \
     $(foreach i,$(_eiv_i), \
       $(eval ### "Make sure that this inherit appears only once") \
       $(eval $(_eiv_tv) := \
           $(call uniq-word,$($(_eiv_tv)),$(INHERIT_TAG)$(i))) \
+      $(eval ### "The expanded value, empty if we want a single value and have one") \
+      $(eval _eiv_ev := \
+        $(if $(and $(_eiv_sv),$(filter-out $(INHERIT_TAG)%,$($(_eiv_tv)))),,\
+          $($(1).$(i).$(v)) \
+        ) \
+      ) \
       $(eval ### "Expand the inherit tag") \
       $(eval $(_eiv_tv) := \
-          $(strip \
-              $(patsubst $(INHERIT_TAG)$(i),$($(1).$(i).$(v)), \
-                  $($(_eiv_tv))))) \
+          $(strip $(patsubst $(INHERIT_TAG)$(i),$(_eiv_ev),$($(_eiv_tv))))) \
       $(eval ### "Clear the child so DAGs don't create duplicate entries" ) \
       $(eval $(1).$(i).$(v) :=) \
       $(eval ### "If we just inherited ourselves, it's a cycle.") \
@@ -180,6 +187,7 @@
 # $(1): context prefix
 # $(2): makefile representing this node
 # $(3): list of node variable names
+# $(4): list of single value variable names (subset of $(3))
 #
 # _include_stack contains the list of included files, with the most recent files first.
 define _import-node
@@ -198,7 +206,7 @@
       $(call get-inherited-nodes,$(1).$(2),$(3)))
   $(call _import-nodes-inner,$(1),$($(1).$(2).inherited),$(3))
 
-  $(call _expand-inherited-values,$(1),$(2),$(3))
+  $(call _expand-inherited-values,$(1),$(2),$(3),$(4))
 
   $(eval $(1).$(2).inherited :=)
   $(eval _include_stack := $(wordlist 2,9999,$$(_include_stack)))
@@ -215,6 +223,7 @@
 # $(1): context prefix
 # $(2): list of makefiles representing nodes to import
 # $(3): list of node variable names
+# $(4): list of single value variable names (subset of $(3))
 #
 #TODO: Make the "does not exist" message more helpful;
 #      should print out the name of the file trying to include it.
@@ -225,7 +234,7 @@
         $(eval ### "skipping already-imported $(_in)") \
        , \
         $(eval $(1).$(_in).seen := true) \
-        $(call _import-node,$(1),$(strip $(_in)),$(3)) \
+        $(call _import-node,$(1),$(strip $(_in)),$(3),$(4)) \
        ) \
      , \
       $(error $(1): "$(_in)" does not exist) \
@@ -237,6 +246,8 @@
 # $(1): output list variable name, like "PRODUCTS" or "DEVICES"
 # $(2): list of makefiles representing nodes to import
 # $(3): list of node variable names
+# $(4): list with subset of variable names that take only a single value, instead
+#       of the default list semantics
 #
 define import-nodes
 $(if \
@@ -245,7 +256,7 @@
     $(if $(_include_stack),$(eval $(error ASSERTION FAILED: _include_stack \
                 should be empty here: $(_include_stack))),) \
     $(eval _include_stack := ) \
-    $(call _import-nodes-inner,$(_node_import_context),$(_in),$(3)) \
+    $(call _import-nodes-inner,$(_node_import_context),$(_in),$(3),$(4)) \
     $(call move-var-list,$(_node_import_context).$(_in),$(1).$(_in),$(3)) \
     $(eval _node_import_context :=) \
     $(eval $(1) := $($(1)) $(_in)) \
diff --git a/core/package_internal.mk b/core/package_internal.mk
index e27f6ce..493a0f1 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -171,12 +171,7 @@
 need_compile_asset := true
 endif
 
-ifdef LOCAL_AAPT2_ONLY
-LOCAL_USE_AAPT2 := true
-endif
-
 my_res_package :=
-ifeq ($(LOCAL_USE_AAPT2),true)
 # In aapt2 the last takes precedence.
 my_resource_dirs := $(call reverse-list,$(LOCAL_RESOURCE_DIR))
 my_res_dir :=
@@ -210,30 +205,12 @@
 my_res_package := $(intermediates)/package-res.apk
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 
-ifeq ($(LOCAL_USE_AAPT2),true)
-  my_bundle_module := $(intermediates)/base.zip
-  LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
-endif
+my_bundle_module := $(intermediates)/base.zip
+LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
 
 # Always run aapt2, because we need to at least compile the AndroidManifest.xml.
 need_compile_res := true
 
-else  # LOCAL_USE_AAPT2
-all_resources := $(strip \
-    $(foreach dir, $(LOCAL_RESOURCE_DIR), \
-      $(addprefix $(dir)/, \
-        $(patsubst res/%,%, \
-          $(call find-subdir-assets,$(dir)) \
-         ) \
-       ) \
-     ))
-
-ifdef LOCAL_PACKAGE_SPLITS
-LOCAL_AAPT_FLAGS += $(addprefix --split ,$(LOCAL_PACKAGE_SPLITS))
-endif
-
-endif  # LOCAL_USE_AAPT2
-
 ifneq ($(all_resources),)
   need_compile_res := true
 endif
@@ -369,71 +346,24 @@
 $(R_file_stamp) $(my_res_package): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
 
 ###############################
-## AAPT/AAPT2
+## AAPT2
 
-ifeq ($(LOCAL_USE_AAPT2),true)
-  my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
-  ifneq (,$(filter-out current,$(renderscript_target_api)))
-    ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
-      my_generated_res_zips := $(rs_generated_res_zip)
-    endif  # renderscript_target_api < 21
-  endif  # renderscript_target_api is set
-  my_asset_dirs := $(LOCAL_ASSET_DIR)
-  my_full_asset_paths := $(all_assets)
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
+ifneq (,$(filter-out current,$(renderscript_target_api)))
+  ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
+    my_generated_res_zips := $(rs_generated_res_zip)
+  endif  # renderscript_target_api < 21
+endif  # renderscript_target_api is set
+my_asset_dirs := $(LOCAL_ASSET_DIR)
+my_full_asset_paths := $(all_assets)
 
-  # Add AAPT2 link specific flags.
-  $(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
-  ifndef LOCAL_AAPT_NAMESPACES
-    $(my_res_package): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
-  endif
+# Add AAPT2 link specific flags.
+$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS)
+ifndef LOCAL_AAPT_NAMESPACES
+  $(my_res_package): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
+endif
 
-  include $(BUILD_SYSTEM)/aapt2.mk
-else  # LOCAL_USE_AAPT2
-
-  my_srcjar := $(intermediates.COMMON)/aapt.srcjar
-  LOCAL_SRCJARS += $(my_srcjar)
-  $(R_file_stamp): PRIVATE_SRCJAR := $(my_srcjar)
-  $(R_file_stamp): PRIVATE_JAVA_GEN_DIR := $(intermediates.COMMON)/aapt
-  $(R_file_stamp): .KATI_IMPLICIT_OUTPUTS := $(my_srcjar)
-  # Since we don't know where the real R.java file is going to end up,
-  # we need to use another file to stand in its place.  We'll just
-  # copy the generated file to src/R.stamp, which means it will
-  # have the same contents and timestamp as the actual file.
-  #
-  # At the same time, this will copy the R.java file to a central
-  # 'R' directory to make it easier to add the files to an IDE.
-  #
-
-  $(R_file_stamp): PRIVATE_RESOURCE_PUBLICS_OUTPUT := \
-			$(intermediates.COMMON)/public_resources.xml
-  $(R_file_stamp): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
-  $(R_file_stamp): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-  $(R_file_stamp): $(all_res_assets) $(full_android_manifest) $(rs_generated_res_zip) $(AAPT) $(SOONG_ZIP) | $(ACP)
-	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
-	@rm -rf $@ && mkdir -p $(dir $@)
-	$(create-resource-java-files)
-	$(call find-generated-R.java,$(PRIVATE_JAVA_GEN_DIR),$@)
-
-  $(proguard_options_file): $(R_file_stamp)
-
-  ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
-    # Put this module's resources into a PRODUCT-agnositc package that
-    # other packages can use to build their own PRODUCT-agnostic R.java (etc.)
-    # files.
-    resource_export_package := $(intermediates.COMMON)/package-export.apk
-    $(R_file_stamp): $(resource_export_package)
-
-    # create-assets-package looks at PRODUCT_AAPT_CONFIG, but this target
-    # can't know anything about PRODUCT.  Clear it out just for this target.
-    $(resource_export_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
-    $(resource_export_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
-    $(resource_export_package): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-    $(resource_export_package): $(all_res_assets) $(full_android_manifest) $(rs_generated_res_zip) $(AAPT)
-	@echo "target Export Resources: $(PRIVATE_MODULE) ($@)"
-	$(call create-assets-package,$@)
-  endif
-
-endif  # LOCAL_USE_AAPT2
+include $(BUILD_SYSTEM)/aapt2.mk
 
 endif  # need_compile_res
 
@@ -496,9 +426,7 @@
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
 
-ifeq ($(LOCAL_USE_AAPT2),true)
 $(my_res_package) : $(all_library_res_package_export_deps)
-endif
 
 # These four are set above for $(R_stamp_file) and $(my_res_package), but
 # $(LOCAL_BUILT_MODULE) is not set before java.mk, so they have to be set again
@@ -595,13 +523,8 @@
 $(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/resources
 $(LOCAL_BUILT_MODULE) : $(jni_shared_libraries)
 $(LOCAL_BUILT_MODULE) : $(JAR_ARGS) $(SOONG_ZIP) $(MERGE_ZIPS) $(ZIP2ZIP)
-ifeq ($(LOCAL_USE_AAPT2),true)
 $(LOCAL_BUILT_MODULE): PRIVATE_RES_PACKAGE := $(my_res_package)
 $(LOCAL_BUILT_MODULE) : $(my_res_package) $(AAPT2) | $(ACP)
-else
-$(LOCAL_BUILT_MODULE): PRIVATE_RESOURCE_LIST := $(all_res_assets)
-$(LOCAL_BUILT_MODULE) : $(all_res_assets) $(full_android_manifest) $(AAPT) $(ZIPALIGN)
-endif  # LOCAL_USE_AAPT2
 ifdef LOCAL_COMPRESSED_MODULE
 $(LOCAL_BUILT_MODULE) : $(MINIGZIP)
 endif
@@ -622,11 +545,7 @@
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
 	rm -rf $@.parts
 	mkdir -p $@.parts
-ifeq ($(LOCAL_USE_AAPT2),true)
 	cp -f $(PRIVATE_RES_PACKAGE) $@.parts/apk.zip
-else  # ! LOCAL_USE_AAPT2
-	$(call create-assets-package,$@.parts/apk.zip)
-endif  # LOCAL_USE_AAPT2
 ifneq ($(jni_shared_libraries),)
 	$(call create-jni-shared-libs-package,$@.parts/jni.zip,$(PRIVATE_USE_EMBEDDED_NATIVE_LIBS))
 endif
@@ -645,11 +564,7 @@
 endif
 # Run appcompat before stripping the classes.dex file.
 ifeq ($(module_run_appcompat),true)
-ifeq ($(LOCAL_USE_AAPT2),true)
-	$(call appcompat-header, aapt2)
-else
 	$(appcompat-header)
-endif
 	$(run-appcompat)
 endif  # module_run_appcompat
 ifdef LOCAL_DEX_PREOPT
@@ -665,63 +580,57 @@
 	$(compress-package)
 endif  # LOCAL_COMPRESSED_MODULE
 
-ifeq ($(LOCAL_USE_AAPT2),true)
-  my_package_res_pb := $(intermediates)/package-res.pb.apk
-  $(my_package_res_pb): $(my_res_package) $(AAPT2)
+my_package_res_pb := $(intermediates)/package-res.pb.apk
+$(my_package_res_pb): $(my_res_package) $(AAPT2)
 	$(AAPT2) convert --output-format proto $< -o $@
 
-  $(my_bundle_module): $(my_package_res_pb)
-  $(my_bundle_module): PRIVATE_RES_PACKAGE := $(my_package_res_pb)
+$(my_bundle_module): $(my_package_res_pb)
+$(my_bundle_module): PRIVATE_RES_PACKAGE := $(my_package_res_pb)
 
-  $(my_bundle_module): $(jni_shared_libraries)
-  $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
-  $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
+$(my_bundle_module): $(jni_shared_libraries)
+$(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
+$(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
 
-  ifneq ($(full_classes_jar),)
-    $(my_bundle_module): PRIVATE_DEX_FILE := $(built_dex)
-    # Use the jarjar processed archive as the initial package file.
-    $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
-    $(my_bundle_module): $(built_dex)
-  else
-    $(my_bundle_module): PRIVATE_DEX_FILE :=
-    $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
-  endif # full_classes_jar
+ifneq ($(full_classes_jar),)
+  $(my_bundle_module): PRIVATE_DEX_FILE := $(built_dex)
+  # Use the jarjar processed archive as the initial package file.
+  $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
+  $(my_bundle_module): $(built_dex)
+else
+  $(my_bundle_module): PRIVATE_DEX_FILE :=
+  $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
+endif # full_classes_jar
 
-  $(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
+$(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
 	@echo "target Bundle: $(PRIVATE_MODULE) ($@)"
 	rm -rf $@.parts
 	mkdir -p $@.parts
 	$(ZIP2ZIP) -i $(PRIVATE_RES_PACKAGE) -o $@.parts/apk.zip AndroidManifest.xml:manifest/AndroidManifest.xml resources.pb "res/**/*" "assets/**/*"
-        ifneq ($(jni_shared_libraries),)
+      ifneq ($(jni_shared_libraries),)
 	  $(call create-jni-shared-libs-package,$@.parts/jni.zip)
-        endif
-        ifeq ($(full_classes_jar),)
-        # We don't build jar, need to add the Java resources here.
+      endif
+      ifeq ($(full_classes_jar),)
+      # We don't build jar, need to add the Java resources here.
 	  $(if $(PRIVATE_EXTRA_JAR_ARGS),\
 	    $(call create-java-resources-jar,$@.parts/res.zip) && \
 	    $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/" && \
 	    mv -f $@.parts/res.zip.tmp $@.parts/res.zip)
-        else  # full_classes_jar
+      else  # full_classes_jar
 	  $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
 	  $(ZIP2ZIP) -i $@.parts/dex.zip -o $@.parts/dex.zip.tmp "classes*.dex:dex/"
 	  mv -f $@.parts/dex.zip.tmp $@.parts/dex.zip
 	  $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
 	  $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/"
 	  mv -f $@.parts/res.zip.tmp $@.parts/res.zip
-        endif  # full_classes_jar
+      endif  # full_classes_jar
 	$(MERGE_ZIPS) $@ $@.parts/*.zip
 	rm -rf $@.parts
-  ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(my_bundle_module)
-endif
+ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(my_bundle_module)
 
-###############################
-## Build dpi-specific apks, if it's apps_only build.
 ifdef TARGET_BUILD_APPS
-ifdef LOCAL_DPI_VARIANTS
-$(foreach d, $(LOCAL_DPI_VARIANTS), \
-  $(eval my_dpi := $(d)) \
-  $(eval include $(BUILD_SYSTEM)/dpi_specific_apk.mk))
-endif
+  ifdef LOCAL_DPI_VARIANTS
+    $(call pretty-error,Building DPI-specific APKs is no longer supported)
+  endif
 endif
 
 ###############################
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index e505945..f5b92fe 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -32,421 +32,27 @@
   my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
   LOCAL_SRC_FILES :=
 else ifdef LOCAL_REPLACE_PREBUILT_APK_INSTALLED
-  # This is handled specially below
+  # This is handled specially in app_prebuilt_internal.mk
 else
   $(call pretty-error,No source files specified)
 endif
 
 LOCAL_CHECKED_MODULE := $(my_prebuilt_src_file)
 
-ifeq (APPS,$(LOCAL_MODULE_CLASS))
-include $(BUILD_SYSTEM)/app_prebuilt_internal.mk
-else
-#
-# Non-APPS prebuilt modules handling almost to the end of the file
-#
-
-my_strip_module := $(firstword \
-  $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
-  $(LOCAL_STRIP_MODULE))
-
-ifeq (SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS))
-  ifeq ($(LOCAL_IS_HOST_MODULE)$(my_strip_module),)
-    # Strip but not try to add debuglink
-    my_strip_module := no_debuglink
-  endif
-endif
-
-ifneq ($(filter STATIC_LIBRARIES SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
-  prebuilt_module_is_a_library := true
-else
-  prebuilt_module_is_a_library :=
-endif
-
-# Don't install static libraries by default.
-ifndef LOCAL_UNINSTALLABLE_MODULE
-ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
-  LOCAL_UNINSTALLABLE_MODULE := true
-endif
-endif
-
-ifeq (JAVA_LIBRARIES,$(LOCAL_IS_HOST_MODULE)$(LOCAL_MODULE_CLASS)$(filter true,$(LOCAL_UNINSTALLABLE_MODULE)))
-  prebuilt_module_is_dex_javalib := true
-else
-  prebuilt_module_is_dex_javalib :=
-endif
-
+ifneq (APPS,$(LOCAL_MODULE_CLASS))
 ifdef LOCAL_COMPRESSED_MODULE
 $(error $(LOCAL_MODULE) : LOCAL_COMPRESSED_MODULE can only be defined for module class APPS)
 endif  # LOCAL_COMPRESSED_MODULE
+endif  # APPS
 
-my_check_elf_file_shared_lib_files :=
-
-ifneq ($(filter true keep_symbols no_debuglink mini-debug-info,$(my_strip_module)),)
-  ifdef LOCAL_IS_HOST_MODULE
-    $(call pretty-error,Cannot strip/pack host module)
-  endif
-  ifeq ($(filter SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-    $(call pretty-error,Can strip/pack only shared libraries or executables)
-  endif
-  ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
-    $(call pretty-error,Cannot strip/pack scripts)
-  endif
-  # Set the arch-specific variables to set up the strip rules
-  LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_strip_module)
-  include $(BUILD_SYSTEM)/dynamic_binary.mk
-  built_module := $(linked_module)
-
-  ifneq ($(LOCAL_SDK_VERSION),)
-    # binary.mk filters out NDK_MIGRATED_LIBS from my_shared_libs, thus those NDK libs are not added
-    # to DEPENDENCIES_ON_SHARED_LIBRARIES. Assign $(my_ndk_shared_libraries_fullpath) to
-    # my_check_elf_file_shared_lib_files so that check_elf_file.py can see those NDK stub libs.
-    my_check_elf_file_shared_lib_files := $(my_ndk_shared_libraries_fullpath)
-  endif
-else  # my_strip_module not true
-  include $(BUILD_SYSTEM)/base_rules.mk
-  built_module := $(LOCAL_BUILT_MODULE)
-
-ifdef prebuilt_module_is_a_library
-export_includes := $(intermediates)/export_includes
-export_cflags := $(foreach d,$(LOCAL_EXPORT_C_INCLUDE_DIRS),-I $(d))
-$(export_includes): PRIVATE_EXPORT_CFLAGS := $(export_cflags)
-$(export_includes): $(LOCAL_EXPORT_C_INCLUDE_DEPS)
-	@echo Export includes file: $< -- $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-ifdef export_cflags
-	$(hide) echo "$(PRIVATE_EXPORT_CFLAGS)" >$@
+ifeq (APPS,$(LOCAL_MODULE_CLASS))
+  include $(BUILD_SYSTEM)/app_prebuilt_internal.mk
+else ifeq (JAVA_LIBRARIES,$(LOCAL_MODULE_CLASS))
+  include $(BUILD_SYSTEM)/java_prebuilt_internal.mk
 else
-	$(hide) touch $@
+  # TODO(jungjw): Check LOCAL_MODULE_CLASS value and generate an error for unexpected ones.
+  include $(BUILD_SYSTEM)/cc_prebuilt_internal.mk
 endif
-export_cflags :=
-
-include $(BUILD_SYSTEM)/allowed_ndk_types.mk
-
-ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
-else ifdef LOCAL_USE_VNDK
-    _name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
-    ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
-        ifeq ($(filter $(_name),$(VNDK_PRIVATE_LIBRARIES)),)
-            my_link_type := native:vndk
-        else
-            my_link_type := native:vndk_private
-        endif
-    else
-        my_link_type := native:vendor
-    endif
-else ifneq ($(filter $(TARGET_RECOVERY_OUT)/%,$(LOCAL_MODULE_PATH)),)
-my_link_type := native:recovery
-else
-my_link_type := native:platform
-endif
-
-# TODO: check dependencies of prebuilt files
-my_link_deps :=
-
-my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-my_common :=
-include $(BUILD_SYSTEM)/link_type.mk
-endif  # prebuilt_module_is_a_library
-
-# The real dependency will be added after all Android.mks are loaded and the install paths
-# of the shared libraries are determined.
-ifdef LOCAL_INSTALLED_MODULE
-ifdef LOCAL_IS_HOST_MODULE
-    ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
-        my_system_shared_libraries :=
-    else
-        my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
-    endif
-else
-    ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
-        my_system_shared_libraries := libc libm libdl
-    else
-        my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
-        my_system_shared_libraries := $(patsubst libc,libc libdl,$(my_system_shared_libraries))
-    endif
-endif
-
-my_shared_libraries := \
-    $(filter-out $(my_system_shared_libraries),$(LOCAL_SHARED_LIBRARIES)) \
-    $(my_system_shared_libraries)
-
-ifdef my_shared_libraries
-# Extra shared libraries introduced by LOCAL_CXX_STL.
-include $(BUILD_SYSTEM)/cxx_stl_setup.mk
-ifdef LOCAL_USE_VNDK
-  my_shared_libraries := $(foreach l,$(my_shared_libraries),\
-    $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
-endif
-$(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)DEPENDENCIES_ON_SHARED_LIBRARIES += \
-  $(my_register_name):$(LOCAL_INSTALLED_MODULE):$(subst $(space),$(comma),$(my_shared_libraries))
-endif
-endif  # my_shared_libraries
-
-# We need to enclose the above export_includes and my_built_shared_libraries in
-# "my_strip_module not true" because otherwise the rules are defined in dynamic_binary.mk.
-endif  # my_strip_module not true
-
-# Check prebuilt ELF binaries.
-include $(BUILD_SYSTEM)/check_elf_file.mk
-
-ifeq ($(NATIVE_COVERAGE),true)
-ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
-  $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).gcnodir))
-  ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
-    ifdef LOCAL_IS_HOST_MODULE
-      my_coverage_path := $($(my_prefix)OUT_COVERAGE)/$(patsubst $($(my_prefix)OUT)/%,%,$(my_module_path))
-    else
-      my_coverage_path := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
-    endif
-    my_coverage_path := $(my_coverage_path)/$(patsubst %.so,%,$(my_installed_module_stem)).gcnodir
-    $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(my_coverage_path)))
-    $(LOCAL_BUILT_MODULE): $(my_coverage_path)
-  endif
-else
-# Coverage information is needed when static lib is a dependency of another
-# coverage-enabled module.
-ifeq (STATIC_LIBRARIES, $(LOCAL_MODULE_CLASS))
-GCNO_ARCHIVE := $(LOCAL_MODULE).gcnodir
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS :=
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES :=
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_PREFIX := $(my_prefix)
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-$(intermediates)/$(GCNO_ARCHIVE) :
-	$(transform-o-to-static-lib)
-endif
-endif
-endif
-
-ifeq ($(prebuilt_module_is_dex_javalib),true)
-my_dex_jar := $(my_prebuilt_src_file)
-# This is a target shared library, i.e. a jar with classes.dex.
-
-ifneq ($(filter $(LOCAL_MODULE),$(PRODUCT_BOOT_JARS)),)
-  $(call pretty-error,Modules in PRODUCT_BOOT_JARS must be defined in Android.bp files)
-endif
-
-#######################################
-# defines built_odex along with rule to install odex
-include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
-#######################################
-ifdef LOCAL_DEX_PREOPT
-
-$(built_module): PRIVATE_STRIP_SCRIPT := $(intermediates)/strip.sh
-$(built_module): $(intermediates)/strip.sh
-$(built_module): | $(DEXPREOPT_STRIP_DEPS)
-$(built_module): .KATI_DEPFILE := $(built_module).d
-$(built_module): $(my_prebuilt_src_file)
-	$(PRIVATE_STRIP_SCRIPT) $< $@
-
-else # ! LOCAL_DEX_PREOPT
-$(built_module) : $(my_prebuilt_src_file)
-	$(call copy-file-to-target)
-endif # LOCAL_DEX_PREOPT
-
-else  # ! prebuilt_module_is_dex_javalib
-ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init,$(dir $(LOCAL_INSTALLED_MODULE))),)
-  $(eval $(call copy-init-script-file-checked,$(my_prebuilt_src_file),$(built_module)))
-else ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
-$(built_module) : $(my_prebuilt_src_file)
-	$(transform-prebuilt-to-target-strip-comments)
-else
-$(built_module) : $(my_prebuilt_src_file)
-	$(transform-prebuilt-to-target)
-endif
-ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-	$(hide) chmod +x $@
-endif
-endif # ! prebuilt_module_is_dex_javalib
-
-ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-my_src_jar := $(my_prebuilt_src_file)
-
-ifdef LOCAL_IS_HOST_MODULE
-# for host java libraries deps should be in the common dir, so we make a copy in
-# the common dir.
-common_classes_jar := $(intermediates.COMMON)/classes.jar
-common_header_jar := $(intermediates.COMMON)/classes-header.jar
-
-$(common_classes_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(common_classes_jar): PRIVATE_PREFIX := $(my_prefix)
-
-$(common_classes_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-
-ifneq ($(TURBINE_ENABLED),false)
-$(common_header_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-endif
-
-else # !LOCAL_IS_HOST_MODULE
-# for target java libraries, the LOCAL_BUILT_MODULE is in a product-specific dir,
-# while the deps should be in the common dir, so we make a copy in the common dir.
-common_classes_jar := $(intermediates.COMMON)/classes.jar
-common_header_jar := $(intermediates.COMMON)/classes-header.jar
-common_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.jar
-common_javalib_jar := $(intermediates.COMMON)/javalib.jar
-
-$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_PREFIX := $(my_prefix)
-
-ifeq ($(LOCAL_SDK_VERSION),system_current)
-my_link_type := java:system
-else ifneq (,$(call has-system-sdk-version,$(LOCAL_SDK_VERSION)))
-my_link_type := java:system
-else ifeq ($(LOCAL_SDK_VERSION),core_current)
-my_link_type := java:core
-else ifneq ($(LOCAL_SDK_VERSION),)
-my_link_type := java:sdk
-else
-my_link_type := java:platform
-endif
-
-# TODO: check dependencies of prebuilt files
-my_link_deps :=
-
-my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-my_common := COMMON
-include $(BUILD_SYSTEM)/link_type.mk
-
-ifeq ($(prebuilt_module_is_dex_javalib),true)
-# For prebuilt shared Java library we don't have classes.jar.
-$(common_javalib_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-
-else  # ! prebuilt_module_is_dex_javalib
-
-my_src_aar := $(filter %.aar, $(my_prebuilt_src_file))
-ifneq ($(my_src_aar),)
-# This is .aar file, archive of classes.jar and Android resources.
-
-# run Jetifier if needed
-LOCAL_JETIFIER_INPUT_FILE := $(my_src_aar)
-include $(BUILD_SYSTEM)/jetifier.mk
-my_src_aar := $(LOCAL_JETIFIER_OUTPUT_FILE)
-
-my_src_jar := $(intermediates.COMMON)/aar/classes.jar
-my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
-my_src_android_manifest := $(intermediates.COMMON)/aar/AndroidManifest.xml
-
-$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS := $(my_src_proguard_options)
-$(my_src_jar) : .KATI_IMPLICIT_OUTPUTS += $(my_src_android_manifest)
-$(my_src_jar) : $(my_src_aar)
-	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
-	$(hide) unzip -qo -d $(dir $@) $<
-	# Make sure the extracted classes.jar has a new timestamp.
-	$(hide) touch $@
-	# Make sure the proguard and AndroidManifest.xml files exist
-	# and have a new timestamp.
-	$(hide) touch $(dir $@)/proguard.txt
-	$(hide) touch $(dir $@)/AndroidManifest.xml
-
-my_prebuilt_android_manifest := $(intermediates.COMMON)/manifest/AndroidManifest.xml
-$(eval $(call copy-one-file,$(my_src_android_manifest),$(my_prebuilt_android_manifest)))
-$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_prebuilt_android_manifest))
-
-else
-
-# run Jetifier if needed
-LOCAL_JETIFIER_INPUT_FILE := $(my_src_jar)
-include $(BUILD_SYSTEM)/jetifier.mk
-my_src_jar := $(LOCAL_JETIFIER_OUTPUT_FILE)
-
-endif
-
-$(common_classes_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-
-ifneq ($(TURBINE_ENABLED),false)
-$(common_header_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-endif
-
-$(common_classes_pre_proguard_jar) : $(my_src_jar)
-	$(transform-prebuilt-to-target)
-
-$(common_javalib_jar) : $(common_classes_jar)
-	$(transform-prebuilt-to-target)
-
-include $(BUILD_SYSTEM)/force_aapt2.mk
-
-ifdef LOCAL_AAPT2_ONLY
-LOCAL_USE_AAPT2 := true
-endif
-
-ifeq ($(LOCAL_USE_AAPT2),true)
-ifneq ($(my_src_aar),)
-
-$(intermediates.COMMON)/export_proguard_flags : $(my_src_proguard_options)
-	$(transform-prebuilt-to-target)
-
-LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
-ifeq ($(LOCAL_SDK_RES_VERSION),)
-  LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
-endif
-
-framework_res_package_export :=
-# Please refer to package.mk
-ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
-framework_res_package_export := \
-    $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
-else
-framework_res_package_export := \
-    $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-endif
-endif
-
-my_res_package := $(intermediates.COMMON)/package-res.apk
-
-# We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
-$(my_res_package): PRIVATE_AAPT2_CFLAGS :=
-$(my_res_package): PRIVATE_AAPT_FLAGS := --static-lib --no-static-lib-packages --auto-add-overlay
-$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(my_src_android_manifest)
-$(my_res_package): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
-$(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
-$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
-$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
-$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
-$(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
-$(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
-$(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
-$(my_res_package) : $(framework_res_package_export)
-$(my_res_package) : $(my_src_android_manifest)
-
-full_android_manifest :=
-my_res_resources :=
-my_overlay_resources :=
-my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
-R_file_stamp :=
-proguard_options_file :=
-my_generated_res_dirs := $(intermediates.COMMON)/aar/res
-my_generated_res_dirs_deps := $(my_src_jar)
-include $(BUILD_SYSTEM)/aapt2.mk
-
-# Make sure my_res_package is created when you run mm/mmm.
-$(built_module) : $(my_res_package)
-endif  # $(my_src_aar)
-endif  # LOCAL_USE_AAPT2
-# make sure the classes.jar and javalib.jar are built before $(LOCAL_BUILT_MODULE)
-$(built_module) : $(common_javalib_jar)
-
-my_exported_sdk_libs_file := $(intermediates.COMMON)/exported-sdk-libs
-$(my_exported_sdk_libs_file): PRIVATE_EXPORTED_SDK_LIBS := $(LOCAL_EXPORT_SDK_LIBRARIES)
-$(my_exported_sdk_libs_file):
-	@echo "Export SDK libs $@"
-	$(hide) mkdir -p $(dir $@) && rm -f $@
-	$(if $(PRIVATE_EXPORTED_SDK_LIBS),\
-		$(hide) echo $(PRIVATE_EXPORTED_SDK_LIBS) | tr ' ' '\n' > $@,\
-		$(hide) touch $@)
-
-endif # ! prebuilt_module_is_dex_javalib
-endif # LOCAL_IS_HOST_MODULE is not set
-
-endif # JAVA_LIBRARIES
-
-endif # APPS
 
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
diff --git a/core/product.mk b/core/product.mk
index 4b5c805..838673c 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -106,30 +106,37 @@
 $(call get-product-makefiles,$(_find-android-products-files))
 endef
 
-_product_var_list :=
-_product_var_list += PRODUCT_NAME
-_product_var_list += PRODUCT_MODEL
+# Variables that are meant to hold only a single value.
+# - The value set in the current makefile takes precedence over inherited values
+# - If multiple inherited makefiles set the var, the first-inherited value wins
+_product_single_value_vars :=
+
+# Variables that are lists of values.
+_product_list_vars :=
+
+_product_single_value_vars += PRODUCT_NAME
+_product_single_value_vars += PRODUCT_MODEL
 
 # The resoure configuration options to use for this product.
-_product_var_list += PRODUCT_LOCALES
-_product_var_list += PRODUCT_AAPT_CONFIG
-_product_var_list += PRODUCT_AAPT_PREF_CONFIG
-_product_var_list += PRODUCT_AAPT_PREBUILT_DPI
-_product_var_list += PRODUCT_HOST_PACKAGES
-_product_var_list += PRODUCT_PACKAGES
-_product_var_list += PRODUCT_PACKAGES_DEBUG
-_product_var_list += PRODUCT_PACKAGES_DEBUG_ASAN
-_product_var_list += PRODUCT_PACKAGES_ENG
-_product_var_list += PRODUCT_PACKAGES_TESTS
+_product_list_vars += PRODUCT_LOCALES
+_product_list_vars += PRODUCT_AAPT_CONFIG
+_product_list_vars += PRODUCT_AAPT_PREF_CONFIG
+_product_list_vars += PRODUCT_AAPT_PREBUILT_DPI
+_product_list_vars += PRODUCT_HOST_PACKAGES
+_product_list_vars += PRODUCT_PACKAGES
+_product_list_vars += PRODUCT_PACKAGES_DEBUG
+_product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ENG
+_product_list_vars += PRODUCT_PACKAGES_TESTS
 
 # The device that this product maps to.
-_product_var_list += PRODUCT_DEVICE
-_product_var_list += PRODUCT_MANUFACTURER
-_product_var_list += PRODUCT_BRAND
+_product_single_value_vars += PRODUCT_DEVICE
+_product_single_value_vars += PRODUCT_MANUFACTURER
+_product_single_value_vars += PRODUCT_BRAND
 
 # These PRODUCT_SYSTEM_* flags, if defined, are used in place of the
 # corresponding PRODUCT_* flags for the sysprops on /system.
-_product_var_list += \
+_product_single_value_vars += \
     PRODUCT_SYSTEM_NAME \
     PRODUCT_SYSTEM_MODEL \
     PRODUCT_SYSTEM_DEVICE \
@@ -138,24 +145,26 @@
 
 # A list of property assignments, like "key = value", with zero or more
 # whitespace characters on either side of the '='.
-_product_var_list += PRODUCT_PROPERTY_OVERRIDES
+_product_list_vars += PRODUCT_PROPERTY_OVERRIDES
 
 # A list of property assignments, like "key = value", with zero or more
 # whitespace characters on either side of the '='.
 # used for adding properties to default.prop
-_product_var_list += PRODUCT_DEFAULT_PROPERTY_OVERRIDES
+_product_list_vars += PRODUCT_DEFAULT_PROPERTY_OVERRIDES
 
 # A list of property assignments, like "key = value", with zero or more
 # whitespace characters on either side of the '='.
 # used for adding properties to build.prop of product partition
-_product_var_list += PRODUCT_PRODUCT_PROPERTIES
+_product_list_vars += PRODUCT_PRODUCT_PROPERTIES
 
 # A list of property assignments, like "key = value", with zero or more
 # whitespace characters on either side of the '='.
 # used for adding properties to build.prop of product partition
-_product_var_list += PRODUCT_PRODUCT_SERVICES_PROPERTIES
-_product_var_list += PRODUCT_ODM_PROPERTIES
-_product_var_list += PRODUCT_CHARACTERISTICS
+_product_list_vars += PRODUCT_PRODUCT_SERVICES_PROPERTIES
+_product_list_vars += PRODUCT_ODM_PROPERTIES
+
+# The characteristics of the product, which among other things is passed to aapt
+_product_single_value_vars += PRODUCT_CHARACTERISTICS
 
 # A list of words like <source path>:<destination path>[:<owner>].
 # The file at the source path should be copied to the destination path
@@ -163,157 +172,163 @@
 # $(PRODUCT_OUT), so it should look like, e.g., "system/etc/file.xml".
 # The rules for these copy steps are defined in build/make/core/Makefile.
 # The optional :<owner> is used to indicate the owner of a vendor file.
-_product_var_list += PRODUCT_COPY_FILES
+_product_list_vars += PRODUCT_COPY_FILES
 
 # The OTA key(s) specified by the product config, if any.  The names
 # of these keys are stored in the target-files zip so that post-build
 # signing tools can substitute them for the test key embedded by
 # default.
-_product_var_list += PRODUCT_OTA_PUBLIC_KEYS
-_product_var_list += PRODUCT_EXTRA_RECOVERY_KEYS
+_product_list_vars += PRODUCT_OTA_PUBLIC_KEYS
+_product_list_vars += PRODUCT_EXTRA_RECOVERY_KEYS
 
 # Should we use the default resources or add any product specific overlays
-_product_var_list += PRODUCT_PACKAGE_OVERLAYS
-_product_var_list += DEVICE_PACKAGE_OVERLAYS
+_product_list_vars += PRODUCT_PACKAGE_OVERLAYS
+_product_list_vars += DEVICE_PACKAGE_OVERLAYS
 
 # Resource overlay list which must be excluded from enforcing RRO.
-_product_var_list += PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS
+_product_list_vars += PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS
 
 # Package list to apply enforcing RRO.
-_product_var_list += PRODUCT_ENFORCE_RRO_TARGETS
+_product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
 
-_product_var_list += PRODUCT_SDK_ATREE_FILES
-_product_var_list += PRODUCT_SDK_ADDON_NAME
-_product_var_list += PRODUCT_SDK_ADDON_COPY_FILES
-_product_var_list += PRODUCT_SDK_ADDON_COPY_MODULES
-_product_var_list += PRODUCT_SDK_ADDON_DOC_MODULES
-_product_var_list += PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP
+_product_list_vars += PRODUCT_SDK_ATREE_FILES
+_product_list_vars += PRODUCT_SDK_ADDON_NAME
+_product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
+_product_list_vars += PRODUCT_SDK_ADDON_COPY_MODULES
+_product_list_vars += PRODUCT_SDK_ADDON_DOC_MODULES
+_product_list_vars += PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP
 
 # which Soong namespaces to export to Make
-_product_var_list += PRODUCT_SOONG_NAMESPACES
+_product_list_vars += PRODUCT_SOONG_NAMESPACES
 
-_product_var_list += PRODUCT_DEFAULT_WIFI_CHANNELS
-_product_var_list += PRODUCT_DEFAULT_DEV_CERTIFICATE
-_product_var_list += PRODUCT_RESTRICT_VENDOR_FILES
+_product_list_vars += PRODUCT_DEFAULT_WIFI_CHANNELS
+_product_list_vars += PRODUCT_DEFAULT_DEV_CERTIFICATE
+_product_list_vars += PRODUCT_RESTRICT_VENDOR_FILES
 
 # The list of product-specific kernel header dirs
-_product_var_list += PRODUCT_VENDOR_KERNEL_HEADERS
+_product_list_vars += PRODUCT_VENDOR_KERNEL_HEADERS
 
 # A list of module names of BOOTCLASSPATH (jar files)
-_product_var_list += PRODUCT_BOOT_JARS
-_product_var_list += PRODUCT_SUPPORTS_BOOT_SIGNER
-_product_var_list += PRODUCT_SUPPORTS_VBOOT
-_product_var_list += PRODUCT_SUPPORTS_VERITY
-_product_var_list += PRODUCT_SUPPORTS_VERITY_FEC
-_product_var_list += PRODUCT_OEM_PROPERTIES
+_product_list_vars += PRODUCT_BOOT_JARS
+_product_list_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
+_product_list_vars += PRODUCT_SUPPORTS_VBOOT
+_product_list_vars += PRODUCT_SUPPORTS_VERITY
+_product_list_vars += PRODUCT_SUPPORTS_VERITY_FEC
+_product_list_vars += PRODUCT_OEM_PROPERTIES
 
 # A list of property assignments, like "key = value", with zero or more
 # whitespace characters on either side of the '='.
 # used for adding properties to default.prop of system partition
-_product_var_list += PRODUCT_SYSTEM_DEFAULT_PROPERTIES
+_product_list_vars += PRODUCT_SYSTEM_DEFAULT_PROPERTIES
 
-_product_var_list += PRODUCT_SYSTEM_PROPERTY_BLACKLIST
-_product_var_list += PRODUCT_VENDOR_PROPERTY_BLACKLIST
-_product_var_list += PRODUCT_SYSTEM_SERVER_APPS
-_product_var_list += PRODUCT_SYSTEM_SERVER_JARS
+_product_list_vars += PRODUCT_SYSTEM_PROPERTY_BLACKLIST
+_product_list_vars += PRODUCT_VENDOR_PROPERTY_BLACKLIST
+_product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
+_product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
 
 # All of the apps that we force preopt, this overrides WITH_DEXPREOPT.
-_product_var_list += PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK
-_product_var_list += PRODUCT_DEXPREOPT_SPEED_APPS
-_product_var_list += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
-_product_var_list += PRODUCT_VBOOT_SIGNING_KEY
-_product_var_list += PRODUCT_VBOOT_SIGNING_SUBKEY
-_product_var_list += PRODUCT_VERITY_SIGNING_KEY
-_product_var_list += PRODUCT_SYSTEM_VERITY_PARTITION
-_product_var_list += PRODUCT_VENDOR_VERITY_PARTITION
-_product_var_list += PRODUCT_PRODUCT_VERITY_PARTITION
-_product_var_list += PRODUCT_PRODUCT_SERVICES_VERITY_PARTITION
-_product_var_list += PRODUCT_ODM_VERITY_PARTITION
-_product_var_list += PRODUCT_SYSTEM_SERVER_DEBUG_INFO
-_product_var_list += PRODUCT_OTHER_JAVA_DEBUG_INFO
+_product_list_vars += PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK
+_product_list_vars += PRODUCT_DEXPREOPT_SPEED_APPS
+_product_list_vars += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
+_product_single_value_vars += PRODUCT_VBOOT_SIGNING_KEY
+_product_single_value_vars += PRODUCT_VBOOT_SIGNING_SUBKEY
+_product_single_value_vars += PRODUCT_VERITY_SIGNING_KEY
+_product_single_value_vars += PRODUCT_SYSTEM_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_VENDOR_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_PRODUCT_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_PRODUCT_SERVICES_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_ODM_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_SYSTEM_SERVER_DEBUG_INFO
+_product_single_value_vars += PRODUCT_OTHER_JAVA_DEBUG_INFO
 
 # Per-module dex-preopt configs.
-_product_var_list += PRODUCT_DEX_PREOPT_MODULE_CONFIGS
-_product_var_list += PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER
-_product_var_list += PRODUCT_DEX_PREOPT_DEFAULT_FLAGS
-_product_var_list += PRODUCT_DEX_PREOPT_BOOT_FLAGS
-_product_var_list += PRODUCT_DEX_PREOPT_PROFILE_DIR
-_product_var_list += PRODUCT_DEX_PREOPT_GENERATE_DM_FILES
-_product_var_list += PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING
-_product_var_list += PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS
+_product_list_vars += PRODUCT_DEX_PREOPT_MODULE_CONFIGS
+_product_list_vars += PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER
+_product_list_vars += PRODUCT_DEX_PREOPT_DEFAULT_FLAGS
+_product_list_vars += PRODUCT_DEX_PREOPT_BOOT_FLAGS
+_product_list_vars += PRODUCT_DEX_PREOPT_PROFILE_DIR
+_product_list_vars += PRODUCT_DEX_PREOPT_GENERATE_DM_FILES
+_product_list_vars += PRODUCT_DEX_PREOPT_NEVER_ALLOW_STRIPPING
+_product_list_vars += PRODUCT_DEX_PREOPT_RESOLVE_STARTUP_STRINGS
 
 # Boot image options.
-_product_var_list += \
+_product_single_value_vars += \
     PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
     PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
-    PRODUCT_USES_ART \
+    PRODUCT_USES_DEFAULT_ART_CONFIG \
 
-_product_var_list += PRODUCT_SYSTEM_SERVER_COMPILER_FILTER
+_product_list_vars += PRODUCT_SYSTEM_SERVER_COMPILER_FILTER
 # Per-module sanitizer configs
-_product_var_list += PRODUCT_SANITIZER_MODULE_CONFIGS
-_product_var_list += PRODUCT_SYSTEM_BASE_FS_PATH
-_product_var_list += PRODUCT_VENDOR_BASE_FS_PATH
-_product_var_list += PRODUCT_PRODUCT_BASE_FS_PATH
-_product_var_list += PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH
-_product_var_list += PRODUCT_ODM_BASE_FS_PATH
-_product_var_list += PRODUCT_SHIPPING_API_LEVEL
-_product_var_list += VENDOR_PRODUCT_RESTRICT_VENDOR_FILES
-_product_var_list += VENDOR_EXCEPTION_MODULES
-_product_var_list += VENDOR_EXCEPTION_PATHS
+_product_list_vars += PRODUCT_SANITIZER_MODULE_CONFIGS
+_product_single_value_vars += PRODUCT_SYSTEM_BASE_FS_PATH
+_product_single_value_vars += PRODUCT_VENDOR_BASE_FS_PATH
+_product_single_value_vars += PRODUCT_PRODUCT_BASE_FS_PATH
+_product_single_value_vars += PRODUCT_PRODUCT_SERVICES_BASE_FS_PATH
+_product_single_value_vars += PRODUCT_ODM_BASE_FS_PATH
 
+# The first API level this product shipped with
+_product_single_value_vars += PRODUCT_SHIPPING_API_LEVEL
+
+_product_list_vars += VENDOR_PRODUCT_RESTRICT_VENDOR_FILES
+_product_list_vars += VENDOR_EXCEPTION_MODULES
+_product_list_vars += VENDOR_EXCEPTION_PATHS
 # Whether the product wants to ship libartd. For rules and meaning, see art/Android.mk.
-_product_var_list += PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD
+_product_single_value_vars += PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD
 
 # Make this art variable visible to soong_config.mk.
-_product_var_list += PRODUCT_ART_USE_READ_BARRIER
+_product_single_value_vars += PRODUCT_ART_USE_READ_BARRIER
 
 # Whether the product is an Android Things variant.
-_product_var_list += PRODUCT_IOT
+_product_single_value_vars += PRODUCT_IOT
 
 # Add reserved headroom to a system image.
-_product_var_list += PRODUCT_SYSTEM_HEADROOM
+_product_single_value_vars += PRODUCT_SYSTEM_HEADROOM
 
 # Whether to save disk space by minimizing java debug info
-_product_var_list += PRODUCT_MINIMIZE_JAVA_DEBUG_INFO
+_product_single_value_vars += PRODUCT_MINIMIZE_JAVA_DEBUG_INFO
 
 # Whether any paths are excluded from sanitization when SANITIZE_TARGET=integer_overflow
-_product_var_list += PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS
+_product_list_vars += PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS
 
-_product_var_list += PRODUCT_ADB_KEYS
+_product_single_value_vars += PRODUCT_ADB_KEYS
 
 # Whether any paths should have CFI enabled for components
-_product_var_list += PRODUCT_CFI_INCLUDE_PATHS
+_product_list_vars += PRODUCT_CFI_INCLUDE_PATHS
 
 # Whether any paths are excluded from sanitization when SANITIZE_TARGET=cfi
-_product_var_list += PRODUCT_CFI_EXCLUDE_PATHS
+_product_list_vars += PRODUCT_CFI_EXCLUDE_PATHS
 
 # Whether the Scudo hardened allocator is disabled platform-wide
-_product_var_list += PRODUCT_DISABLE_SCUDO
+_product_single_value_vars += PRODUCT_DISABLE_SCUDO
 
 # A flag to override PRODUCT_COMPATIBLE_PROPERTY
-_product_var_list += PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE
+_product_single_value_vars += PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE
+
+# List of extra VNDK versions to be included
+_product_list_vars += PRODUCT_EXTRA_VNDK_VERSIONS
 
 # Whether the whitelist of actionable compatible properties should be disabled or not
-_product_var_list += PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE
-_product_var_list += PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS
-_product_var_list += PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT
-_product_var_list += PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST
-_product_var_list += PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT
-_product_var_list += PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST
+_product_single_value_vars += PRODUCT_ACTIONABLE_COMPATIBLE_PROPERTY_DISABLE
+
+_product_single_value_vars += PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS
+_product_single_value_vars += PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT
+_product_list_vars += PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST
+_product_list_vars += PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT
+_product_list_vars += PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST
 
 # List of modules that should be forcefully unmarked from being LOCAL_PRODUCT_MODULE, and hence
 # installed on /system directory by default.
-_product_var_list += PRODUCT_FORCE_PRODUCT_MODULES_TO_SYSTEM_PARTITION
+_product_list_vars += PRODUCT_FORCE_PRODUCT_MODULES_TO_SYSTEM_PARTITION
 
 # When this is true, dynamic partitions is retrofitted on a device that has
 # already been launched without dynamic partitions. Otherwise, the device
 # is launched with dynamic partitions.
 # This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
-_product_var_list += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+_product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
 
 # Other dynamic partition feature flags.PRODUCT_USE_DYNAMIC_PARTITION_SIZE and
 # PRODUCT_BUILD_SUPER_PARTITION default to the value of PRODUCT_USE_DYNAMIC_PARTITIONS.
-_product_var_list += \
+_product_single_value_vars += \
     PRODUCT_USE_DYNAMIC_PARTITIONS \
     PRODUCT_USE_DYNAMIC_PARTITION_SIZE \
     PRODUCT_BUILD_SUPER_PARTITION \
@@ -322,28 +337,40 @@
 # during OTA). Otherwise, kernel configuration requirements are enforced in VTS.
 # Devices that checks the running kernel (instead of the kernel in OTA package) should not
 # set this variable to prevent OTA failures.
-_product_var_list += PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
+_product_list_vars += PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
+
+# If set to true, this product builds a generic OTA package, which installs generic system images
+# onto matching devices. The product may only build a subset of system images (e.g. only
+# system.img), so devices need to install the package in a system-only OTA manner.
+_product_single_value_vars += PRODUCT_BUILD_GENERIC_OTA_PACKAGE
 
 # Whether any paths are excluded from being set XOM when ENABLE_XOM=true
-_product_var_list += PRODUCT_XOM_EXCLUDE_PATHS
-_product_var_list += PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES
-_product_var_list += PRODUCT_PACKAGE_NAME_OVERRIDES
-_product_var_list += PRODUCT_CERTIFICATE_OVERRIDES
-_product_var_list += PRODUCT_BUILD_SYSTEM_IMAGE
-_product_var_list += PRODUCT_BUILD_SYSTEM_OTHER_IMAGE
-_product_var_list += PRODUCT_BUILD_VENDOR_IMAGE
-_product_var_list += PRODUCT_BUILD_PRODUCT_IMAGE
-_product_var_list += PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE
-_product_var_list += PRODUCT_BUILD_ODM_IMAGE
-_product_var_list += PRODUCT_BUILD_CACHE_IMAGE
-_product_var_list += PRODUCT_BUILD_RAMDISK_IMAGE
-_product_var_list += PRODUCT_BUILD_USERDATA_IMAGE
-_product_var_list += PRODUCT_UPDATABLE_BOOT_MODULES
-_product_var_list += PRODUCT_UPDATABLE_BOOT_LOCATIONS
+_product_list_vars += PRODUCT_XOM_EXCLUDE_PATHS
+_product_list_vars += PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES
+_product_list_vars += PRODUCT_PACKAGE_NAME_OVERRIDES
+_product_list_vars += PRODUCT_CERTIFICATE_OVERRIDES
+
+# Controls for whether different partitions are built for the current product.
+_product_single_value_vars += PRODUCT_BUILD_SYSTEM_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_SYSTEM_OTHER_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_VENDOR_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_PRODUCT_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_PRODUCT_SERVICES_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_ODM_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_CACHE_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_RAMDISK_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_USERDATA_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_RECOVERY_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_BOOT_IMAGE
+
+_product_list_vars += PRODUCT_UPDATABLE_BOOT_MODULES
+_product_list_vars += PRODUCT_UPDATABLE_BOOT_LOCATIONS
 
 # Whether the product would like to check prebuilt ELF files.
-_product_var_list += PRODUCT_CHECK_ELF_FILES
-.KATI_READONLY := _product_var_list
+_product_single_value_vars += PRODUCT_CHECK_ELF_FILES
+
+.KATI_READONLY := _product_single_value_vars _product_list_vars
+_product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
 define dump-product
 $(warning ==== $(1) ====)\
@@ -417,7 +444,7 @@
 #
 #TODO: check to make sure that products have all the necessary vars defined
 define import-products
-$(call import-nodes,PRODUCTS,$(1),$(_product_var_list))
+$(call import-nodes,PRODUCTS,$(1),$(_product_var_list),$(_product_single_value_vars))
 endef
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index a088f06..5ba69a6 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -376,6 +376,14 @@
   PRODUCT_BUILD_SUPER_PARTITION := $(PRODUCT_USE_DYNAMIC_PARTITIONS)
 endif
 
+ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),)
+  ifdef PRODUCT_SHIPPING_API_LEVEL
+    ifeq (true,$(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29))
+      PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS := true
+    endif
+  endif
+endif
+
 define product-overrides-config
 $$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
     $$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
@@ -390,7 +398,6 @@
 
 # Macro to use below. $(1) is the name of the partition
 define product-build-image-config
-PRODUCT_BUILD_$(1)_IMAGE := $$(firstword $$(PRODUCT_BUILD_$(1)_IMAGE))
 ifneq ($$(filter-out true false,$$(PRODUCT_BUILD_$(1)_IMAGE)),)
     $$(error Invalid PRODUCT_BUILD_$(1)_IMAGE: $$(PRODUCT_BUILD_$(1)_IMAGE) -- true false and empty are supported)
 endif
@@ -406,7 +413,9 @@
     ODM \
     CACHE \
     RAMDISK \
-    USERDATA, \
+    USERDATA \
+    BOOT \
+    RECOVERY, \
   $(eval $(call product-build-image-config,$(image))))
 
 product-build-image-config :=
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 3c25e89..a0f577d 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -1,6 +1,3 @@
-# To prevent name conflict in incremental obfuscation.
--useuniqueclassmembernames
-
 # Some classes in the libraries extend package private classes to chare common functionality
 # that isn't explicitly part of the API
 -dontskipnonpubliclibraryclasses -dontskipnonpubliclibraryclassmembers
@@ -66,5 +63,12 @@
 # See bug/20658265.
 # -dontwarn android.support.**
 
+# From https://github.com/google/guava/wiki/UsingProGuardWithGuava
+# Striped64, LittleEndianByteArray, UnsignedBytes, AbstractFuture
+-dontwarn sun.misc.Unsafe
+# Futures.getChecked (which often won't work with Proguard anyway) uses this. It
+# has a fallback, but again, don't use Futures.getChecked on Android regardless.
+-dontwarn java.lang.ClassValue
+
 # Less spammy.
 -dontnote
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 8d92b20..d873cc4 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -60,7 +60,7 @@
   $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
 	@echo "Copy: $@"
 	$(copy-file-to-target)
-	$(call appcompat-header, aapt2)
+	$(appcompat-header)
 	$(run-appcompat)
 else
   $(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
@@ -129,7 +129,15 @@
 my_2nd_arch_prefix :=
 
 PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
-ifdef LOCAL_CERTIFICATE
+ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
+  # The magic string "PRESIGNED" means this package is already checked
+  # signed with its release key.
+  #
+  # By setting .CERTIFICATE but not .PRIVATE_KEY, this package will be
+  # mentioned in apkcerts.txt (with certificate set to "PRESIGNED")
+  # but the dexpreopt process will not try to re-sign the app.
+  PACKAGES.$(LOCAL_MODULE).CERTIFICATE := PRESIGNED
+else ifneq ($(LOCAL_CERTIFICATE),)
   PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE)
   PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(patsubst %.x509.pem,%.pk8,$(LOCAL_CERTIFICATE))
 endif
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index 679d5b8..7a8f46e 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -96,9 +96,11 @@
 endif
 
 # Check prebuilt ELF binaries.
-ifneq ($(LOCAL_CHECK_ELF_FILES),)
-my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
-include $(BUILD_SYSTEM)/check_elf_file.mk
+ifdef LOCAL_INSTALLED_MODULE
+  ifneq ($(LOCAL_CHECK_ELF_FILES),)
+    my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+    include $(BUILD_SYSTEM)/check_elf_file.mk
+  endif
 endif
 
 # The real dependency will be added after all Android.mks are loaded and the install paths
@@ -221,7 +223,9 @@
 #
 # Filter out some NDK libraries that are not being exported.
 my_static_libraries := \
-    $(filter-out ndk_libc++_static ndk_libc++abi ndk_libandroid_support ndk_libunwind, \
+    $(filter-out ndk_libc++_static ndk_libc++abi ndk_libandroid_support ndk_libunwind \
+      ndk_libc++_static.native_bridge ndk_libc++abi.native_bridge \
+      ndk_libandroid_support.native_bridge ndk_libunwind.native_bridge, \
       $(LOCAL_STATIC_LIBRARIES))
 installed_static_library_notice_file_targets := \
     $(foreach lib,$(my_static_libraries) $(LOCAL_WHOLE_STATIC_LIBRARIES), \
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 46e026a..48db6b1 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -58,6 +58,16 @@
 $(call add_json_str,  DeviceSecondaryCpuVariant,         $(TARGET_2ND_CPU_VARIANT))
 $(call add_json_list, DeviceSecondaryAbi,                $(TARGET_2ND_CPU_ABI) $(TARGET_2ND_CPU_ABI2))
 
+$(call add_json_str,  NativeBridgeArch,                  $(TARGET_NATIVE_BRIDGE_ARCH))
+$(call add_json_str,  NativeBridgeArchVariant,           $(TARGET_NATIVE_BRIDGE_ARCH_VARIANT))
+$(call add_json_str,  NativeBridgeCpuVariant,            $(TARGET_NATIVE_BRIDGE_CPU_VARIANT))
+$(call add_json_list, NativeBridgeAbi,                   $(TARGET_NATIVE_BRIDGE_ABI))
+
+$(call add_json_str,  NativeBridgeSecondaryArch,         $(TARGET_NATIVE_BRIDGE_2ND_ARCH))
+$(call add_json_str,  NativeBridgeSecondaryArchVariant,  $(TARGET_NATIVE_BRIDGE_2ND_ARCH_VARIANT))
+$(call add_json_str,  NativeBridgeSecondaryCpuVariant,   $(TARGET_NATIVE_BRIDGE_2ND_CPU_VARIANT))
+$(call add_json_list, NativeBridgeSecondaryAbi,          $(TARGET_NATIVE_BRIDGE_2ND_ABI))
+
 $(call add_json_str,  HostArch,                          $(HOST_ARCH))
 $(call add_json_str,  HostSecondaryArch,                 $(HOST_2ND_ARCH))
 $(call add_json_bool, HostStaticBinaries,                $(BUILD_HOST_static))
@@ -149,6 +159,7 @@
 $(call add_json_list, BoardOdmSepolicyDirs,              $(BOARD_ODM_SEPOLICY_DIRS))
 $(call add_json_list, BoardPlatPublicSepolicyDirs,       $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
 $(call add_json_list, BoardPlatPrivateSepolicyDirs,      $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+$(call add_json_list, BoardSepolicyM4Defs,               $(BOARD_SEPOLICY_M4DEFS))
 
 $(call add_json_bool, FlattenApex,                       $(filter true,$(TARGET_FLATTEN_APEX)))
 
@@ -165,6 +176,10 @@
 $(call add_json_list, ProductHiddenAPIStubsSystem,       $(PRODUCT_HIDDENAPI_STUBS_SYSTEM))
 $(call add_json_list, ProductHiddenAPIStubsTest,         $(PRODUCT_HIDDENAPI_STUBS_TEST))
 
+$(call add_json_list, ProductPublicSepolicyDirs,         $(PRODUCT_PUBLIC_SEPOLICY_DIRS))
+$(call add_json_list, ProductPrivateSepolicyDirs,        $(PRODUCT_PRIVATE_SEPOLICY_DIRS))
+$(call add_json_bool, ProductCompatibleProperty,         $(PRODUCT_COMPATIBLE_PROPERTY))
+
 $(call add_json_list, TargetFSConfigGen,                 $(TARGET_FS_CONFIG_GEN))
 
 $(call add_json_map, VendorVars)
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 9692a99..6a76fc4 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -51,6 +51,13 @@
     $(intermediates.COMMON)/jacoco-report-classes.jar)
 endif
 
+ifdef LOCAL_SOONG_PROGUARD_DICT
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
+    $(intermediates.COMMON)/proguard_dictionary))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/proguard_dictionary)
+endif
+
 ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
   my_res_package := $(intermediates.COMMON)/package-res.apk
 
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index ee759b9..cb3281a 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -33,10 +33,6 @@
 
 include $(BUILD_SYSTEM)/force_aapt2.mk
 
-ifdef LOCAL_AAPT2_ONLY
-LOCAL_USE_AAPT2 := true
-endif
-
 # Hack to build static Java library with Android resource
 # See bug 5714516
 all_resources :=
@@ -46,11 +42,9 @@
 need_compile_res := true
 LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
 endif
-ifeq ($(LOCAL_USE_AAPT2),true)
 ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
 need_compile_res := true
 endif
-endif
 
 ifeq ($(need_compile_res),true)
 all_resources := $(strip \
@@ -84,22 +78,20 @@
 R_file_stamp := $(intermediates.COMMON)/src/R.stamp
 LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
 
-ifeq ($(LOCAL_USE_AAPT2),true)
-  ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
-    # If we are using static android libraries, every source file becomes an overlay.
-    # This is to emulate old AAPT behavior which simulated library support.
-    my_res_resources :=
-    my_overlay_resources := $(all_resources)
-  else
-    # Otherwise, for a library we treat all the resource equal with no overlay.
-    my_res_resources := $(all_resources)
-    my_overlay_resources :=
-  endif
-  # For libraries put everything in the COMMON intermediate directory.
-  my_res_package := $(intermediates.COMMON)/package-res.apk
+ifneq ($(strip $(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES)),)
+  # If we are using static android libraries, every source file becomes an overlay.
+  # This is to emulate old AAPT behavior which simulated library support.
+  my_res_resources :=
+  my_overlay_resources := $(all_resources)
+else
+  # Otherwise, for a library we treat all the resource equal with no overlay.
+  my_res_resources := $(all_resources)
+  my_overlay_resources :=
+endif
+# For libraries put everything in the COMMON intermediate directory.
+my_res_package := $(intermediates.COMMON)/package-res.apk
 
-  LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
-endif  # LOCAL_USE_AAPT2
+LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 
 endif  # need_compile_res
 
@@ -127,7 +119,6 @@
 endif
 endif
 
-ifeq ($(LOCAL_USE_AAPT2),true)
 import_proguard_flag_files := $(strip $(foreach l,$(LOCAL_STATIC_ANDROID_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
     $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/export_proguard_flags))
 $(intermediates.COMMON)/export_proguard_flags: $(import_proguard_flag_files) $(addprefix $(LOCAL_PATH)/,$(LOCAL_EXPORT_PROGUARD_FLAG_FILES))
@@ -139,7 +130,6 @@
 		cat $$f >>$@; \
 	done
 import_proguard_flag_files :=
-endif
 
 include $(BUILD_SYSTEM)/aapt_flags.mk
 
@@ -150,7 +140,6 @@
 
 # add --non-constant-id to prevent inlining constants.
 # AAR needs text symbol file R.txt.
-ifeq ($(LOCAL_USE_AAPT2),true)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --static-lib --output-text-symbols $(intermediates.COMMON)/R.txt
 ifndef LOCAL_AAPT_NAMESPACES
   $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS += --no-static-lib-packages
@@ -158,15 +147,6 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_CONFIG :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --non-constant-id --output-text-symbols $(intermediates.COMMON)
-
-my_srcjar := $(intermediates.COMMON)/aapt.srcjar
-LOCAL_SRCJARS += $(my_srcjar)
-$(R_file_stamp): PRIVATE_SRCJAR := $(my_srcjar)
-$(R_file_stamp): PRIVATE_JAVA_GEN_DIR := $(intermediates.COMMON)/aapt
-$(R_file_stamp): .KATI_IMPLICIT_OUTPUTS := $(my_srcjar)
-endif
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_PUBLICS_OUTPUT := $(intermediates.COMMON)/public_resources.xml
@@ -178,26 +158,16 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR :=
 
-ifeq ($(LOCAL_USE_AAPT2),true)
-  # One more level with name res so we can zip up the flat resources that can be linked by apps.
-  my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res/res
-  ifneq (,$(filter-out current,$(renderscript_target_api)))
-    ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
-      my_generated_res_zips := $(rs_generated_res_zip)
-    endif  # renderscript_target_api < 21
-  endif  # renderscript_target_api is set
-  include $(BUILD_SYSTEM)/aapt2.mk
-  $(my_res_package) : $(framework_res_package_export)
-  $(my_res_package): .KATI_IMPLICIT_OUTPUTS += $(intermediates.COMMON)/R.txt
-else
-  $(R_file_stamp): .KATI_IMPLICIT_OUTPUTS += $(intermediates.COMMON)/R.txt
-  $(R_file_stamp): PRIVATE_RESOURCE_LIST := $(all_resources)
-  $(R_file_stamp) : $(all_resources) $(full_android_manifest) $(AAPT) $(SOONG_ZIP) \
-    $(framework_res_package_export) $(rs_generated_res_zip)
-	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
-	$(create-resource-java-files)
-	$(hide) find $(PRIVATE_JAVA_GEN_DIR) -name R.java | xargs cat > $@
-endif  # LOCAL_USE_AAPT2
+# One more level with name res so we can zip up the flat resources that can be linked by apps.
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res/res
+ifneq (,$(filter-out current,$(renderscript_target_api)))
+  ifneq ($(call math_gt_or_eq,$(renderscript_target_api),21),true)
+    my_generated_res_zips := $(rs_generated_res_zip)
+  endif  # renderscript_target_api < 21
+endif  # renderscript_target_api is set
+include $(BUILD_SYSTEM)/aapt2.mk
+$(my_res_package) : $(framework_res_package_export)
+$(my_res_package): .KATI_IMPLICIT_OUTPUTS += $(intermediates.COMMON)/R.txt
 
 endif # need_compile_res
 
diff --git a/core/tasks/check_boot_jars/package_whitelist.txt b/core/tasks/check_boot_jars/package_whitelist.txt
index 38f2be5..8d9878f 100644
--- a/core/tasks/check_boot_jars/package_whitelist.txt
+++ b/core/tasks/check_boot_jars/package_whitelist.txt
@@ -46,6 +46,8 @@
 java\.util\.spi
 java\.util\.stream
 java\.util\.zip
+# TODO: Remove javax.annotation.processing if possible, see http://b/132338110:
+javax\.annotation\.processing
 javax\.crypto
 javax\.crypto\.interfaces
 javax\.crypto\.spec
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 6738a77..0c76a28 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -39,9 +39,9 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := QP1A
-MIN_PLATFORM_VERSION := QP1A
-MAX_PLATFORM_VERSION := QP1A
+DEFAULT_PLATFORM_VERSION := RP1A
+MIN_PLATFORM_VERSION := RP1A
+MAX_PLATFORM_VERSION := RP1A
 
 ALLOWED_VERSIONS := $(call allowed-platform-versions,\
   $(MIN_PLATFORM_VERSION),\
@@ -90,10 +90,12 @@
 # unreleased API level targetable by this branch, not just those that are valid
 # lunch targets for this branch.
 PLATFORM_VERSION.QP1A := 10
+PLATFORM_VERSION.RP1A := R
 
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.QP1A := REL
+PLATFORM_VERSION_CODENAME.QP1A := Q
+PLATFORM_VERSION_CODENAME.RP1A := R
 
 ifndef PLATFORM_VERSION
   PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
diff --git a/envsetup.sh b/envsetup.sh
index 9e381a2..5292d38 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -22,7 +22,7 @@
 - jgrep:      Greps on all local Java files.
 - resgrep:    Greps on all local res/*.xml files.
 - mangrep:    Greps on all local AndroidManifest.xml files.
-- mgrep:      Greps on all local Makefiles files.
+- mgrep:      Greps on all local Makefiles and *.bp files.
 - sepgrep:    Greps on all local sepolicy files.
 - sgrep:      Greps on all local source files.
 - godir:      Go to the directory containing a file.
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 6c56671..be7c804 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -36,9 +36,6 @@
 
 BOARD_CHARGER_ENABLE_SUSPEND := true
 
-# Enable A/B update
-AB_OTA_UPDATER := true
-
 # Enable system property split for Treble
 BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED := true
 
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index f07adb7..ecc547f 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -67,7 +67,7 @@
 
 # TODO(b/36764215): remove this setting when the generic system image
 # no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
+BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64_ab/sepolicy
 
 # Wifi.
 BOARD_WLAN_DEVICE           := emulator
diff --git a/target/board/generic_arm64_ab/BoardConfig.mk b/target/board/generic_arm64_ab/BoardConfig.mk
index 6e54d81..28140ce 100644
--- a/target/board/generic_arm64_ab/BoardConfig.mk
+++ b/target/board/generic_arm64_ab/BoardConfig.mk
@@ -36,4 +36,4 @@
 
 # TODO(b/36764215): remove this setting when the generic system image
 # no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
+BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 9100094..bcb4cc5 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -33,4 +33,4 @@
 
 # TODO(b/36764215): remove this setting when the generic system image
 # no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
+BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
index 90ddd0d..571d623 100644
--- a/target/board/gsi_arm64/BoardConfig.mk
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -34,4 +34,4 @@
 
 # TODO(b/36764215): remove this setting when the generic system image
 # no longer has QCOM-specific directories under /.
-BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
+BOARD_SEPOLICY_DIRS += build/make/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/mainline_arm64/BoardConfig.mk b/target/board/mainline_arm64/BoardConfig.mk
index 8bb6212..70505f4 100644
--- a/target/board/mainline_arm64/BoardConfig.mk
+++ b/target/board/mainline_arm64/BoardConfig.mk
@@ -28,6 +28,8 @@
 
 TARGET_NO_KERNEL := true
 
+# Build generic A/B format system-only OTA.
+AB_OTA_UPDATER := true
 AB_OTA_PARTITIONS := system
 
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 822d2ea..205f842 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -125,8 +125,6 @@
     libcamera2ndk \
     libcamera_client \
     libcameraservice \
-    libc_malloc_debug \
-    libc_malloc_hooks \
     libcutils \
     libdl.bootstrap \
     libdrmframework \
@@ -213,6 +211,7 @@
     netd \
     NetworkStack \
     org.apache.http.legacy \
+    otacerts \
     PackageInstaller \
     perfetto \
     PermissionController \
@@ -336,7 +335,8 @@
 
 # Add the compatibility library that is needed when android.test.base
 # is removed from the bootclasspath.
-ifeq ($(REMOVE_ATB_FROM_BCP),true)
+# Default to excluding android.test.base from the bootclasspath.
+ifneq ($(REMOVE_ATB_FROM_BCP),false)
 PRODUCT_PACKAGES += framework-atb-backward-compatibility
 PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
 else
@@ -362,6 +362,7 @@
     showmap \
     sqlite3 \
     ss \
+    start_with_lockagent \
     strace \
     su \
     sanitizer-status \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 584327c..c27407a 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -23,6 +23,7 @@
     init_second_stage.recovery \
     ld.config.recovery.txt \
     linker.recovery \
+    otacerts.recovery \
     recovery \
     shell_and_utilities_recovery \
     watchdogd.recovery \
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index 7bb6d91..b717486 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -15,7 +15,7 @@
 #
 
 # Inherit common Android Go defaults.
-$(call inherit-product, build/target/product/go_defaults_common.mk)
+$(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
 # Add the system properties.
 TARGET_SYSTEM_PROP += \
diff --git a/target/product/go_defaults_512.mk b/target/product/go_defaults_512.mk
index 5542818..70d067e 100644
--- a/target/product/go_defaults_512.mk
+++ b/target/product/go_defaults_512.mk
@@ -15,7 +15,7 @@
 #
 
 # Inherit common Android Go defaults.
-$(call inherit-product, build/target/product/go_defaults_common.mk)
+$(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
 # Add the system properties.
 TARGET_SYSTEM_PROP += \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index 54a9625..0949616 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -19,7 +19,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 # Enable updating of APEXes
-$(call inherit-product, $(SRC_TARGET_DIR)/product/updatable_apex.mk)
+#$(call inherit-product, $(SRC_TARGET_DIR)/product/updatable_apex.mk)
 # Add adb keys to debuggable AOSP builds (if they exist)
 $(call inherit-product-if-exists, vendor/google/security/adb/vendor_key.mk)
 
diff --git a/target/product/profile_boot_common.mk b/target/product/profile_boot_common.mk
index 4147dfa..fc19954 100644
--- a/target/product/profile_boot_common.mk
+++ b/target/product/profile_boot_common.mk
@@ -19,7 +19,7 @@
 # remove classes that are no longer in use.
 # Ideally we would just generate an empty boot.art but we don't have the build
 # support to separate the image from the compile code.
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := build/target/product/empty-profile
+PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := build/make/target/product/empty-profile
 PRODUCT_DEX_PREOPT_BOOT_FLAGS := --count-hotness-in-compiled-code
 DEX_PREOPT_DEFAULT := nostripping
 
@@ -28,7 +28,7 @@
 
 # Use an empty preloaded-classes list.
 PRODUCT_COPY_FILES += \
-    build/target/product/empty-preloaded-classes:system/etc/preloaded-classes
+    build/make/target/product/empty-preloaded-classes:system/etc/preloaded-classes
 
 # Boot image property overrides.
 PRODUCT_PROPERTY_OVERRIDES += \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 38a0968..5db32f2 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -83,6 +83,10 @@
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     dalvik.vm.dex2oat-resolve-startup-strings=true
 
+# Specify default block size of 512K to enable parallel image decompression.
+PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
+    dalvik.vm.dex2oat-max-image-block-size=524288
+
 # Enable minidebuginfo generation unless overridden.
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     dalvik.vm.minidebuginfo=true \
@@ -92,4 +96,4 @@
 PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
     ro.iorapd.enable=false
 
-PRODUCT_USES_ART := true
+PRODUCT_USES_DEFAULT_ART_CONFIG := true
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
new file mode 100644
index 0000000..080706b
--- /dev/null
+++ b/target/product/security/Android.bp
@@ -0,0 +1,5 @@
+// AOSP test certificate
+android_app_certificate {
+    name: "aosp-testkey",
+    certificate: "testkey",
+}
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 4142ea9..a0b2d6d 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -23,3 +23,67 @@
     include $(BUILD_PREBUILT)
   endif
 endif
+
+
+#######################################
+# otacerts: A keystore with the authorized keys in it, which is used to verify the authenticity of
+# downloaded OTA packages.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := otacerts
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_STEM := otacerts.zip
+LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/security
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
+$(LOCAL_BUILT_MODULE): $(SOONG_ZIP) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
+	$(SOONG_ZIP) -o $@ -j -f $(PRIVATE_CERT)
+
+
+#######################################
+# otacerts for recovery image.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := otacerts.recovery
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_STEM := otacerts.zip
+LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
+include $(BUILD_SYSTEM)/base_rules.mk
+
+extra_recovery_keys := $(patsubst %,%.x509.pem,$(PRODUCT_EXTRA_RECOVERY_KEYS))
+
+$(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
+$(LOCAL_BUILT_MODULE): PRIVATE_EXTRA_RECOVERY_KEYS := $(extra_recovery_keys)
+$(LOCAL_BUILT_MODULE): \
+	    $(SOONG_ZIP) \
+	    $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem \
+	    $(extra_recovery_keys)
+	$(SOONG_ZIP) -o $@ -j \
+	    $(foreach key_file, $(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS), -f $(key_file))
+
+
+#######################################
+# update_engine_payload_key, used by update_engine. We use the same key as otacerts but in RSA
+# public key format.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := update_engine_payload_key
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_STEM := update-payload-key.pub.pem
+LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/update_engine
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
+	openssl x509 -pubkey -noout -in $< > $@
+
+
+#######################################
+# update_engine_payload_key for recovery image, used by update_engine.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := update_engine_payload_key.recovery
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_STEM := update-payload-key.pub.pem
+LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/update_engine
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
+	openssl x509 -pubkey -noout -in $< > $@
diff --git a/target/product/security/README b/target/product/security/README
index 6a6e62d..6e75e4d 100644
--- a/target/product/security/README
+++ b/target/product/security/README
@@ -31,7 +31,7 @@
 dumpkey.jar is a Java tool that takes an x.509 certificate in PEM format as
 input and prints a C structure to standard output:
 
-    $ java -jar out/host/linux-x86/framework/dumpkey.jar build/target/product/security/testkey.x509.pem
+    $ java -jar out/host/linux-x86/framework/dumpkey.jar build/make/target/product/security/testkey.x509.pem
     {64,0xc926ad21,{1795090719,2141396315,950055447,2581568430,4268923165,1920809988,546586521,3498997798,1776797858,3740060814,1805317999,1429410244,129622599,1422441418,1783893377,1222374759,2563319927,323993566,28517732,609753416,1826472888,215237850,4261642700,4049082591,3228462402,774857746,154822455,2497198897,2758199418,3019015328,2794777644,87251430,2534927978,120774784,571297800,3695899472,2479925187,3811625450,3401832990,2394869647,3267246207,950095497,555058928,414729973,1136544882,3044590084,465547824,4058146728,2731796054,1689838846,3890756939,1048029507,895090649,247140249,178744550,3547885223,3165179243,109881576,3944604415,1044303212,3772373029,2985150306,3737520932,3599964420},{3437017481,3784475129,2800224972,3086222688,251333580,2131931323,512774938,325948880,2657486437,2102694287,3820568226,792812816,1026422502,2053275343,2800889200,3113586810,165549746,4273519969,4065247892,1902789247,772932719,3941848426,3652744109,216871947,3164400649,1942378755,3996765851,1055777370,964047799,629391717,2232744317,3910558992,191868569,2758883837,3682816752,2997714732,2702529250,3570700455,3776873832,3924067546,3555689545,2758825434,1323144535,61311905,1997411085,376844204,213777604,4077323584,9135381,1625809335,2804742137,2952293945,1117190829,4237312782,1825108855,3013147971,1111251351,2568837572,1684324211,2520978805,367251975,810756730,2353784344,1175080310}}
 
 This is called by build/make/core/Makefile to incorporate the OTA signing keys
diff --git a/target/product/verity.mk b/target/product/verity.mk
index d954159..5f09283 100644
--- a/target/product/verity.mk
+++ b/target/product/verity.mk
@@ -23,7 +23,7 @@
 # The dev key is used to sign boot and recovery images, and the verity
 # metadata table. Actual product deliverables will be re-signed by hand.
 # We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
-PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity
+PRODUCT_VERITY_SIGNING_KEY := build/make/target/product/security/verity
 
 PRODUCT_PACKAGES += \
         verity_key
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 24ac663..09d8f70 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -25,9 +25,6 @@
 if [ -n "$BOARD_BUILD_SYSTEM_ROOT_IMAGE" ] ; then
   echo "ro.build.system_root_image=$BOARD_BUILD_SYSTEM_ROOT_IMAGE"
 fi
-if [ -n "$AB_OTA_UPDATER" ] ; then
-  echo "ro.build.ab_update=$AB_OTA_UPDATER"
-fi
 
 # These values are deprecated, use "ro.product.cpu.abilist"
 # instead (see below).
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
new file mode 100644
index 0000000..b5ae009
--- /dev/null
+++ b/tools/releasetools/Android.bp
@@ -0,0 +1,72 @@
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+python_defaults {
+    name: "releasetools_test_defaults",
+    version: {
+        py2: {
+            enabled: true,
+            embedded_launcher: false,
+        },
+        py3: {
+            enabled: false,
+        },
+    },
+}
+
+python_library_host {
+    name: "releasetools_lib",
+    defaults: ["releasetools_test_defaults"],
+    srcs: [
+        "add_img_to_target_files.py",
+        "apex_utils.py",
+        "blockimgdiff.py",
+        "build_image.py",
+        "build_super_image.py",
+        "check_ota_package_signature.py",
+        "check_target_files_signatures.py",
+        "common.py",
+        "edify_generator.py",
+        "img_from_target_files.py",
+        "make_recovery_patch.py",
+        "merge_target_files.py",
+        "ota_from_target_files.py",
+        "ota_package_parser.py",
+        "rangelib.py",
+        "sign_target_files_apks.py",
+        "sparse_img.py",
+        "target_files_diff.py",
+        "validate_target_files.py",
+        "verity_utils.py",
+    ],
+}
+
+python_test_host {
+    name: "releasetools_test",
+    defaults: ["releasetools_test_defaults"],
+    main: "test_utils.py",
+    srcs: [
+        "test_*.py",
+    ],
+    libs: [
+        "releasetools_lib",
+    ],
+    data: [
+        "testdata/*",
+    ],
+    required: [
+        "otatools",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/tools/releasetools/TEST_MAPPING b/tools/releasetools/TEST_MAPPING
new file mode 100644
index 0000000..77cef07
--- /dev/null
+++ b/tools/releasetools/TEST_MAPPING
@@ -0,0 +1,8 @@
+{
+  "presubmit": [
+    {
+      "name": "releasetools_test",
+      "host": true
+    }
+  ]
+}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 10aecf9..4156c8b 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -113,22 +113,33 @@
 
   Returns:
     (which, care_map_ranges): care_map_ranges is the raw string of the care_map
-    RangeSet.
+    RangeSet; or None.
   """
   assert which in common.PARTITIONS_WITH_CARE_MAP
 
-  simg = sparse_img.SparseImage(imgname)
-  care_map_ranges = simg.care_map
-  size_key = which + "_image_size"
-  image_size = OPTIONS.info_dict.get(size_key)
-  if image_size:
-    # excludes the verity metadata blocks of the given image. When AVB is enabled,
-    # this size is the max image size returned by the AVB tool
-    image_blocks = int(image_size) / 4096 - 1
-    assert image_blocks > 0, "blocks for {} must be positive".format(which)
-    care_map_ranges = care_map_ranges.intersect(
+  # which + "_image_size" contains the size that the actual filesystem image
+  # resides in, which is all that needs to be verified. The additional blocks in
+  # the image file contain verity metadata, by reading which would trigger
+  # invalid reads.
+  image_size = OPTIONS.info_dict.get(which + "_image_size")
+  if not image_size:
+    return None
+
+  image_blocks = int(image_size) / 4096 - 1
+  assert image_blocks > 0, "blocks for {} must be positive".format(which)
+
+  # For sparse images, we will only check the blocks that are listed in the care
+  # map, i.e. the ones with meaningful data.
+  if "extfs_sparse_flag" in OPTIONS.info_dict:
+    simg = sparse_img.SparseImage(imgname)
+    care_map_ranges = simg.care_map.intersect(
         rangelib.RangeSet("0-{}".format(image_blocks)))
 
+  # Otherwise for non-sparse images, we read all the blocks in the filesystem
+  # image.
+  else:
+    care_map_ranges = rangelib.RangeSet("0-{}".format(image_blocks))
+
   return [which, care_map_ranges.to_string_raw()]
 
 
@@ -581,7 +592,11 @@
         OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
       image_path = image_paths[partition]
       assert os.path.exists(image_path)
-      care_map_list += GetCareMap(partition, image_path)
+
+      care_map = GetCareMap(partition, image_path)
+      if not care_map:
+        continue
+      care_map_list += care_map
 
       # adds fingerprint field to the care_map
       build_props = OPTIONS.info_dict.get(partition + ".build.prop", {})
@@ -715,6 +730,7 @@
   OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, repacking=True)
 
   has_recovery = OPTIONS.info_dict.get("no_recovery") != "true"
+  has_boot = OPTIONS.info_dict.get("no_boot") != "true"
 
   # {vendor,odm,product,product_services}.img are unlike system.img or
   # system_other.img. Because it could be built from source, or dropped into
@@ -762,17 +778,19 @@
   def banner(s):
     logger.info("\n\n++++ " + s + " ++++\n\n")
 
-  banner("boot")
-  # common.GetBootableImage() returns the image directly if present.
-  boot_image = common.GetBootableImage(
-      "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-  # boot.img may be unavailable in some targets (e.g. aosp_arm64).
-  if boot_image:
-    partitions['boot'] = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img")
-    if not os.path.exists(partitions['boot']):
-      boot_image.WriteToDir(OPTIONS.input_tmp)
-      if output_zip:
-        boot_image.AddToZip(output_zip)
+  boot_image = None
+  if has_boot:
+    banner("boot")
+    # common.GetBootableImage() returns the image directly if present.
+    boot_image = common.GetBootableImage(
+        "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+    # boot.img may be unavailable in some targets (e.g. aosp_arm64).
+    if boot_image:
+      partitions['boot'] = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img")
+      if not os.path.exists(partitions['boot']):
+        boot_image.WriteToDir(OPTIONS.input_tmp)
+        if output_zip:
+          boot_image.AddToZip(output_zip)
 
   recovery_image = None
   if has_recovery:
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index d14c94f..fb4ca76 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -19,11 +19,14 @@
 import re
 import shlex
 import sys
+import zipfile
 
 import common
 
 logger = logging.getLogger(__name__)
 
+OPTIONS = common.OPTIONS
+
 
 class ApexInfoError(Exception):
   """An Exception raised during Apex Information command."""
@@ -145,3 +148,72 @@
           'Failed to find {} prop in {}'.format(key, payload_path))
 
   return payload_info
+
+
+def SignApex(apex_data, payload_key, container_key, container_pw,
+             codename_to_api_level_map, signing_args=None):
+  """Signs the current APEX with the given payload/container keys.
+
+  Args:
+    apex_data: Raw APEX data.
+    payload_key: The path to payload signing key (w/ extension).
+    container_key: The path to container signing key (w/o extension).
+    container_pw: The matching password of the container_key, or None.
+    codename_to_api_level_map: A dict that maps from codename to API level.
+    signing_args: Additional args to be passed to the payload signer.
+
+  Returns:
+    The path to the signed APEX file.
+  """
+  apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
+  with open(apex_file, 'wb') as apex_fp:
+    apex_fp.write(apex_data)
+
+  APEX_PAYLOAD_IMAGE = 'apex_payload.img'
+  APEX_PUBKEY = 'apex_pubkey'
+
+  # 1a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
+  # payload_key.
+  payload_dir = common.MakeTempDir(prefix='apex-payload-')
+  with zipfile.ZipFile(apex_file) as apex_fd:
+    payload_file = apex_fd.extract(APEX_PAYLOAD_IMAGE, payload_dir)
+
+  payload_info = ParseApexPayloadInfo(payload_file)
+  SignApexPayload(
+      payload_file,
+      payload_key,
+      payload_info['apex.key'],
+      payload_info['Algorithm'],
+      payload_info['Salt'],
+      signing_args)
+
+  # 1b. Update the embedded payload public key.
+  payload_public_key = common.ExtractAvbPublicKey(payload_key)
+
+  common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
+  common.ZipDelete(apex_file, APEX_PUBKEY)
+  apex_zip = zipfile.ZipFile(apex_file, 'a')
+  common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
+  common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
+  common.ZipClose(apex_zip)
+
+  # 2. Align the files at page boundary (same as in apexer).
+  aligned_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+  common.RunAndCheckOutput(['zipalign', '-f', '4096', apex_file, aligned_apex])
+
+  # 3. Sign the APEX container with container_key.
+  signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+
+  # Specify the 4K alignment when calling SignApk.
+  extra_signapk_args = OPTIONS.extra_signapk_args[:]
+  extra_signapk_args.extend(['-a', '4096'])
+
+  common.SignFile(
+      aligned_apex,
+      signed_apex,
+      container_key,
+      container_pw,
+      codename_to_api_level_map=codename_to_api_level_map,
+      extra_signapk_args=extra_signapk_args)
+
+  return signed_apex
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3e2a113..e642297 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -93,9 +93,11 @@
 # Values for "certificate" in apkcerts that mean special things.
 SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
 
-# The partitions allowed to be signed by AVB (Android verified boot 2.0).
-AVB_PARTITIONS = ('boot', 'recovery', 'system', 'vendor', 'product',
-                  'product_services', 'dtbo', 'odm')
+# The partitions allowed to be signed by AVB (Android Verified Boot 2.0). Note
+# that system_other is not in the list because we don't want to include its
+# descriptor into vbmeta.img.
+AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'product_services',
+                  'recovery', 'system', 'vendor')
 
 # Partitions that should have their care_map added to META/care_map.pb
 PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product', 'product_services',
@@ -330,10 +332,8 @@
     raise ValueError("Failed to find 'fstab_version'")
 
   if repacking:
-    # We carry a copy of file_contexts.bin under META/. If not available, search
-    # BOOT/RAMDISK/. Note that sometimes we may need a different file to build
-    # images than the one running on device, in that case, we must have the one
-    # for image generation copied to META/.
+    # "selinux_fc" should point to the file_contexts file (file_contexts.bin)
+    # under META/.
     fc_basename = os.path.basename(d.get("selinux_fc", "file_contexts"))
     fc_config = os.path.join(input_file, "META", fc_basename)
     assert os.path.exists(fc_config)
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index e01b5e8..8fb9871 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -15,11 +15,19 @@
 # limitations under the License.
 
 """
-Given a target-files zipfile, produces an image zipfile suitable for
-use with 'fastboot update'.
+Given target-files, produces an image zipfile suitable for use
+with 'fastboot update'.
 
 Usage:  img_from_target_files [flags] input_target_files output_image_zip
 
+input_target_files: one of the following:
+  - directory containing extracted target files. It will load info from
+    OTA/android-info.txt, META/misc_info.txt and build the image zipfile using
+    images from IMAGES/.
+  - target files package. Same as above, but extracts the archive before
+    building the image zipfile.
+
+Flags:
   -z  (--bootable_zip)
       Include only the bootable images (eg 'boot' and 'recovery') in
       the output.
@@ -166,10 +174,19 @@
 
   common.InitLogging()
 
-  # We need files under IMAGES/, OTA/, META/ for img_from_target_files.py.
-  # However, common.LoadInfoDict() may read additional files under BOOT/,
-  # RECOVERY/ and ROOT/. So unzip everything from the target_files.zip.
-  OPTIONS.input_tmp = common.UnzipTemp(args[0])
+  target_files = args[0]
+  if os.path.isdir(target_files):
+    logger.info("Building image zip from extracted target files.")
+    OPTIONS.input_tmp = target_files
+  elif zipfile.is_zipfile(target_files):
+    logger.info("Building image zip from target files zip.")
+    # We need files under IMAGES/, OTA/, META/ for img_from_target_files.py.
+    # However, common.LoadInfoDict() may read additional files under BOOT/,
+    # RECOVERY/ and ROOT/. So unzip everything from the target_files.zip.
+    OPTIONS.input_tmp = common.UnzipTemp(target_files)
+  else:
+    raise ValueError("%s is not a valid path." % target_files)
+
   LoadOptions(OPTIONS.input_tmp)
   output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED,
                                allowZip64=not OPTIONS.sparse_userimages)
@@ -181,7 +198,6 @@
   finally:
     logger.info("cleaning up...")
     common.ZipClose(output_zip)
-    shutil.rmtree(OPTIONS.input_tmp)
 
   logger.info("done.")
 
@@ -193,3 +209,5 @@
   except common.ExternalError as e:
     logger.exception("\n   ERROR:\n")
     sys.exit(1)
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 3b72551..f03cc1e 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -13,11 +13,11 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations under
 # the License.
+"""This script merges two partial target files packages.
 
-"""
-This script merges two partial target files packages (one of which contains
-system files, and the other contains non-system files) together, producing a
-complete target files package that can be used to generate an OTA package.
+One package contains system files, and the other contains non-system files.
+It produces a complete target files package that can be used to generate an
+OTA package.
 
 Usage: merge_target_files.py [args]
 
@@ -42,7 +42,29 @@
       contents of default_other_item_list if provided.
 
   --output-target-files output-target-files-package
-      The output merged target files package. Also a zip archive.
+      If provided, the output merged target files package. Also a zip archive.
+
+  --output-dir output-directory
+      If provided, the destination directory for saving merged files. Requires
+      the --output-item-list flag.
+      Can be provided alongside --output-target-files, or by itself.
+
+  --output-item-list output-item-list-file.
+      The optional path to a newline-separated config file that specifies the
+      file patterns to copy into the --output-dir. Required if providing
+      the --output-dir flag.
+
+  --output-ota output-ota-package
+      The output ota package. This is a zip archive. Use of this flag may
+      require passing the --path common flag; see common.py.
+
+  --output-img output-img-package
+      The output img package, suitable for use with 'fastboot update'. Use of
+      this flag may require passing the --path common flag; see common.py.
+
+  --output-super-empty output-super-empty-image
+      If provided, creates a super_empty.img file from the merged target
+      files package and saves it at this path.
 
   --rebuild_recovery
       Rebuild the recovery patch used by non-A/B devices and write it to the
@@ -57,11 +79,16 @@
 import fnmatch
 import logging
 import os
+import shutil
+import subprocess
 import sys
 import zipfile
 
-import common
 import add_img_to_target_files
+import build_super_image
+import common
+import img_from_target_files
+import ota_from_target_files
 
 logger = logging.getLogger(__name__)
 OPTIONS = common.OPTIONS
@@ -72,6 +99,11 @@
 OPTIONS.other_target_files = None
 OPTIONS.other_item_list = None
 OPTIONS.output_target_files = None
+OPTIONS.output_dir = None
+OPTIONS.output_item_list = None
+OPTIONS.output_ota = None
+OPTIONS.output_img = None
+OPTIONS.output_super_empty = None
 OPTIONS.rebuild_recovery = False
 OPTIONS.keep_tmp = False
 
@@ -101,8 +133,8 @@
     'META/*',
 ]
 
-# default_system_misc_info_keys is a list of keys to obtain from the system instance of
-# META/misc_info.txt. The remaining keys from the other instance.
+# default_system_misc_info_keys is a list of keys to obtain from the system
+# instance of META/misc_info.txt. The remaining keys from the other instance.
 
 default_system_misc_info_keys = [
     'avb_system_hashtree_enable',
@@ -128,6 +160,7 @@
 
 default_other_item_list = [
     'META/boot_filesystem_config.txt',
+    'META/file_contexts.bin',
     'META/otakeys.txt',
     'META/releasetools.py',
     'META/vendor_filesystem_config.txt',
@@ -160,10 +193,8 @@
 
   Args:
     target_files: The target files zip archive from which to extract items.
-
     target_files_temp_dir: The temporary directory where the extracted items
-    will land.
-
+      will land.
     extract_item_list: A list of items to extract.
   """
 
@@ -173,9 +204,7 @@
   # zip file. Otherwise, the extraction step will fail.
 
   with zipfile.ZipFile(
-      target_files,
-      'r',
-      allowZip64=True) as target_files_zipfile:
+      target_files, 'r', allowZip64=True) as target_files_zipfile:
     target_files_namelist = target_files_zipfile.namelist()
 
   filtered_extract_item_list = []
@@ -189,10 +218,32 @@
   # Extract from target_files into target_files_temp_dir the
   # filtered_extract_item_list.
 
-  common.UnzipToDir(
-      target_files,
-      target_files_temp_dir,
-      filtered_extract_item_list)
+  common.UnzipToDir(target_files, target_files_temp_dir,
+                    filtered_extract_item_list)
+
+
+def copy_items(from_dir, to_dir, patterns):
+  """Similar to extract_items() except uses an input dir instead of zip."""
+  file_paths = []
+  for dirpath, _, filenames in os.walk(from_dir):
+    file_paths.extend(
+        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
+        for filename in filenames)
+
+  filtered_file_paths = set()
+  for pattern in patterns:
+    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
+
+  for file_path in filtered_file_paths:
+    original_file_path = os.path.join(from_dir, file_path)
+    copied_file_path = os.path.join(to_dir, file_path)
+    copied_file_dir = os.path.dirname(copied_file_path)
+    if not os.path.exists(copied_file_dir):
+      os.makedirs(copied_file_dir)
+    if os.path.islink(original_file_path):
+      os.symlink(os.readlink(original_file_path), copied_file_path)
+    else:
+      shutil.copyfile(original_file_path, copied_file_path)
 
 
 def read_config_list(config_file_path):
@@ -202,26 +253,25 @@
 
   Args:
     config_file_path: The path to the config file to open and read.
+
+  Returns:
+    The list of strings in the config file.
   """
   with open(config_file_path) as config_file:
     return config_file.read().splitlines()
 
 
-def validate_config_lists(
-    system_item_list,
-    system_misc_info_keys,
-    other_item_list):
+def validate_config_lists(system_item_list, system_misc_info_keys,
+                          other_item_list):
   """Performs validations on the merge config lists.
 
   Args:
-    system_item_list: The list of items to extract from the partial
-    system target files package as is.
-
-    system_misc_info_keys: A list of keys to obtain from the system instance
-    of META/misc_info.txt. The remaining keys from the other instance.
-
-    other_item_list: The list of items to extract from the partial
-    other target files package as is.
+    system_item_list: The list of items to extract from the partial system
+      target files package as is.
+    system_misc_info_keys: A list of keys to obtain from the system instance of
+      META/misc_info.txt. The remaining keys from the other instance.
+    other_item_list: The list of items to extract from the partial other target
+      files package as is.
 
   Returns:
     False if a validation fails, otherwise true.
@@ -236,7 +286,7 @@
   # by the default config lists.
   difference = default_combined_item_set.difference(combined_item_set)
   if difference:
-    logger.error('Missing merge config items: %s' % list(difference))
+    logger.error('Missing merge config items: %s', list(difference))
     logger.error('Please ensure missing items are in either the '
                  'system-item-list or other-item-list files provided to '
                  'this script.')
@@ -251,11 +301,10 @@
   return True
 
 
-def process_ab_partitions_txt(
-    system_target_files_temp_dir,
-    other_target_files_temp_dir,
-    output_target_files_temp_dir):
-  """Perform special processing for META/ab_partitions.txt
+def process_ab_partitions_txt(system_target_files_temp_dir,
+                              other_target_files_temp_dir,
+                              output_target_files_temp_dir):
+  """Perform special processing for META/ab_partitions.txt.
 
   This function merges the contents of the META/ab_partitions.txt files from
   the system directory and the other directory, placing the merged result in
@@ -266,22 +315,20 @@
   names.
 
   Args:
-    system_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the system target files package.
-
-    other_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the other target files package.
-
-    output_target_files_temp_dir: The name of a directory that will be used
-    to create the output target files package after all the special cases
-    are processed.
+    system_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the system target files package.
+    other_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the other target files package.
+    output_target_files_temp_dir: The name of a directory that will be used to
+      create the output target files package after all the special cases are
+      processed.
   """
 
-  system_ab_partitions_txt = os.path.join(
-      system_target_files_temp_dir, 'META', 'ab_partitions.txt')
+  system_ab_partitions_txt = os.path.join(system_target_files_temp_dir, 'META',
+                                          'ab_partitions.txt')
 
-  other_ab_partitions_txt = os.path.join(
-      other_target_files_temp_dir, 'META', 'ab_partitions.txt')
+  other_ab_partitions_txt = os.path.join(other_target_files_temp_dir, 'META',
+                                         'ab_partitions.txt')
 
   with open(system_ab_partitions_txt) as f:
     system_ab_partitions = f.read().splitlines()
@@ -291,8 +338,8 @@
 
   output_ab_partitions = set(system_ab_partitions + other_ab_partitions)
 
-  output_ab_partitions_txt = os.path.join(
-      output_target_files_temp_dir, 'META', 'ab_partitions.txt')
+  output_ab_partitions_txt = os.path.join(output_target_files_temp_dir, 'META',
+                                          'ab_partitions.txt')
 
   with open(output_ab_partitions_txt, 'w') as output:
     for partition in sorted(output_ab_partitions):
@@ -300,42 +347,92 @@
 
 
 def append_recovery_to_filesystem_config(output_target_files_temp_dir):
-  """Perform special processing for META/filesystem_config.txt
+  """Perform special processing for META/filesystem_config.txt.
 
   This function appends recovery information to META/filesystem_config.txt
   so that recovery patch regeneration will succeed.
 
   Args:
-    output_target_files_temp_dir: The name of a directory that will be used
-    to create the output target files package after all the special cases
-    are processed. We find filesystem_config.txt here.
+    output_target_files_temp_dir: The name of a directory that will be used to
+      create the output target files package after all the special cases are
+      processed. We find filesystem_config.txt here.
   """
 
-  filesystem_config_txt = os.path.join(
-      output_target_files_temp_dir,
-      'META',
-      'filesystem_config.txt')
+  filesystem_config_txt = os.path.join(output_target_files_temp_dir, 'META',
+                                       'filesystem_config.txt')
 
   with open(filesystem_config_txt, 'a') as f:
     # TODO(bpeckham) this data is hard coded. It should be generated
     # programmatically.
-    f.write(
-        'system/bin/install-recovery.sh 0 0 750 '
-        'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
-    f.write(
-        'system/recovery-from-boot.p 0 0 644 '
-        'selabel=u:object_r:system_file:s0 capabilities=0x0\n')
-    f.write(
-        'system/etc/recovery.img 0 0 440 '
-        'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
+    f.write('system/bin/install-recovery.sh 0 0 750 '
+            'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
+    f.write('system/recovery-from-boot.p 0 0 644 '
+            'selabel=u:object_r:system_file:s0 capabilities=0x0\n')
+    f.write('system/etc/recovery.img 0 0 440 '
+            'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
 
 
-def process_misc_info_txt(
-    system_target_files_temp_dir,
-    other_target_files_temp_dir,
-    output_target_files_temp_dir,
-    system_misc_info_keys):
-  """Perform special processing for META/misc_info.txt
+def merge_dynamic_partition_info_dicts(system_dict,
+                                       other_dict,
+                                       include_dynamic_partition_list=True,
+                                       size_prefix='',
+                                       size_suffix='',
+                                       list_prefix='',
+                                       list_suffix=''):
+  """Merges dynamic partition info variables.
+
+  Args:
+    system_dict: The dictionary of dynamic partition info variables from the
+      partial system target files.
+    other_dict: The dictionary of dynamic partition info variables from the
+      partial other target files.
+    include_dynamic_partition_list: If true, merges the dynamic_partition_list
+      variable. Not all use cases need this variable merged.
+    size_prefix: The prefix in partition group size variables that precedes the
+      name of the partition group. For example, partition group 'group_a' with
+      corresponding size variable 'super_group_a_group_size' would have the
+      size_prefix 'super_'.
+    size_suffix: Similar to size_prefix but for the variable's suffix. For
+      example, 'super_group_a_group_size' would have size_suffix '_group_size'.
+    list_prefix: Similar to size_prefix but for the partition group's
+      partition_list variable.
+    list_suffix: Similar to size_suffix but for the partition group's
+      partition_list variable.
+
+  Returns:
+    The merged dynamic partition info dictionary.
+  """
+  merged_dict = {}
+  # Partition groups and group sizes are defined by the other (non-system)
+  # dict because these values may vary for each board that uses a shared system
+  # image.
+  merged_dict['super_partition_groups'] = other_dict['super_partition_groups']
+  if include_dynamic_partition_list:
+    system_dynamic_partition_list = system_dict.get('dynamic_partition_list',
+                                                    '')
+    other_dynamic_partition_list = other_dict.get('dynamic_partition_list', '')
+    merged_dict['dynamic_partition_list'] = (
+        '%s %s' %
+        (system_dynamic_partition_list, other_dynamic_partition_list)).strip()
+  for partition_group in merged_dict['super_partition_groups'].split(' '):
+    # Set the partition group's size using the value from the other dict.
+    key = '%s%s%s' % (size_prefix, partition_group, size_suffix)
+    if key not in other_dict:
+      raise ValueError('Other dict does not contain required key %s.' % key)
+    merged_dict[key] = other_dict[key]
+
+    # Set the partition group's partition list using a concatenation of the
+    # system and other partition lists.
+    key = '%s%s%s' % (list_prefix, partition_group, list_suffix)
+    merged_dict[key] = (
+        '%s %s' % (system_dict.get(key, ''), other_dict.get(key, ''))).strip()
+  return merged_dict
+
+
+def process_misc_info_txt(system_target_files_temp_dir,
+                          other_target_files_temp_dir,
+                          output_target_files_temp_dir, system_misc_info_keys):
+  """Perform special processing for META/misc_info.txt.
 
   This function merges the contents of the META/misc_info.txt files from the
   system directory and the other directory, placing the merged result in the
@@ -344,18 +441,15 @@
   content.
 
   Args:
-    system_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the system target files package.
-
-    other_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the other target files package.
-
-    output_target_files_temp_dir: The name of a directory that will be used
-    to create the output target files package after all the special cases
-    are processed.
-
-    system_misc_info_keys: A list of keys to obtain from the system instance
-    of META/misc_info.txt. The remaining keys from the other instance.
+    system_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the system target files package.
+    other_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the other target files package.
+    output_target_files_temp_dir: The name of a directory that will be used to
+      create the output target files package after all the special cases are
+      processed.
+    system_misc_info_keys: A list of keys to obtain from the system instance of
+      META/misc_info.txt. The remaining keys from the other instance.
   """
 
   def read_helper(d):
@@ -380,141 +474,97 @@
   # Merge misc info keys used for Dynamic Partitions.
   if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (
       system_info_dict.get('use_dynamic_partitions') == 'true'):
-    merged_info_dict['dynamic_partition_list'] = '%s %s' % (
-        system_info_dict.get('dynamic_partition_list', ''),
-        merged_info_dict.get('dynamic_partition_list', ''))
-    # Partition groups and group sizes are defined by the other (non-system)
-    # misc info file because these values may vary for each board that uses
-    # a shared system image.
-    for partition_group in merged_info_dict['super_partition_groups'].split(' '):
-      if ('super_%s_group_size' % partition_group) not in merged_info_dict:
-        raise common.ExternalError(
-            'Other META/misc_info.txt does not contain required key '
-            'super_%s_group_size.' % partition_group)
-      key = 'super_%s_partition_list' % partition_group
-      merged_info_dict[key] = '%s %s' % (
-        system_info_dict.get(key, ''),
-        merged_info_dict.get(key, ''))
+    merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts(
+        system_dict=system_info_dict,
+        other_dict=merged_info_dict,
+        size_prefix='super_',
+        size_suffix='_group_size',
+        list_prefix='super_',
+        list_suffix='_partition_list')
+    merged_info_dict.update(merged_dynamic_partitions_dict)
 
-  output_misc_info_txt = os.path.join(
-      output_target_files_temp_dir,
-      'META', 'misc_info.txt')
-
-  sorted_keys = sorted(merged_info_dict.keys())
-
+  output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
+                                      'misc_info.txt')
   with open(output_misc_info_txt, 'w') as output:
+    sorted_keys = sorted(merged_info_dict.keys())
     for key in sorted_keys:
       output.write('{}={}\n'.format(key, merged_info_dict[key]))
 
 
-def process_file_contexts_bin(temp_dir, output_target_files_temp_dir):
-  """Perform special processing for META/file_contexts.bin.
+def process_dynamic_partitions_info_txt(system_target_files_dir,
+                                        other_target_files_dir,
+                                        output_target_files_dir):
+  """Perform special processing for META/dynamic_partitions_info.txt.
 
-  This function combines plat_file_contexts and vendor_file_contexts, which are
-  expected to already be extracted in temp_dir, to produce a merged
-  file_contexts.bin that will land in temp_dir at META/file_contexts.bin.
+  This function merges the contents of the META/dynamic_partitions_info.txt
+  files from the system directory and the other directory, placing the merged
+  result in the output directory.
+
+  This function does nothing if META/dynamic_partitions_info.txt from the other
+  directory does not exist.
 
   Args:
-    temp_dir: The name of a scratch directory that this function can use for
-    intermediate files generated during processing.
-
-    output_target_files_temp_dir: The name of the working directory that must
-    already contain plat_file_contexts and vendor_file_contexts (in the
-    appropriate sub directories), and to which META/file_contexts.bin will be
-    written.
+    system_target_files_dir: The name of a directory containing the special
+      items extracted from the system target files package.
+    other_target_files_dir: The name of a directory containing the special items
+      extracted from the other target files package.
+    output_target_files_dir: The name of a directory that will be used to create
+      the output target files package after all the special cases are processed.
   """
 
-  # To create a merged file_contexts.bin file, we use the system and vendor
-  # file contexts files as input, the m4 tool to combine them, the sorting tool
-  # to sort, and finally the sefcontext_compile tool to generate the final
-  # output. We currently omit a checkfc step since the files had been checked
-  # as part of the build.
+  if not os.path.exists(
+      os.path.join(other_target_files_dir, 'META',
+                   'dynamic_partitions_info.txt')):
+    return
 
-  # The m4 step concatenates the two input files contexts files. Since m4
-  # writes to stdout, we receive that into an array of bytes, and then write it
-  # to a file.
+  def read_helper(d):
+    dynamic_partitions_info_txt = os.path.join(d, 'META',
+                                               'dynamic_partitions_info.txt')
+    with open(dynamic_partitions_info_txt) as f:
+      return list(f.read().splitlines())
 
-  # Collect the file contexts that we're going to combine from SYSTEM, VENDOR,
-  # PRODUCT, and ODM. We require SYSTEM and VENDOR, but others are optional.
+  system_dynamic_partitions_dict = common.LoadDictionaryFromLines(
+      read_helper(system_target_files_dir))
+  other_dynamic_partitions_dict = common.LoadDictionaryFromLines(
+      read_helper(other_target_files_dir))
 
-  file_contexts_list = []
+  merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts(
+      system_dict=system_dynamic_partitions_dict,
+      other_dict=other_dynamic_partitions_dict,
+      # META/dynamic_partitions_info.txt does not use dynamic_partition_list.
+      include_dynamic_partition_list=False,
+      size_suffix='_size',
+      list_suffix='_partition_list')
 
-  for partition in ['SYSTEM', 'VENDOR', 'PRODUCT', 'ODM']:
-    prefix = 'plat' if partition == 'SYSTEM' else partition.lower()
-
-    file_contexts = os.path.join(
-        output_target_files_temp_dir,
-        partition, 'etc', 'selinux', prefix + '_file_contexts')
-
-    mandatory = partition in ['SYSTEM', 'VENDOR']
-
-    if mandatory or os.path.isfile(file_contexts):
-      file_contexts_list.append(file_contexts)
-    else:
-      logger.warning('file not found: %s', file_contexts)
-
-  command = ['m4', '--fatal-warnings', '-s'] + file_contexts_list
-
-  merged_content = common.RunAndCheckOutput(command, verbose=False)
-
-  merged_file_contexts_txt = os.path.join(temp_dir, 'merged_file_contexts.txt')
-
-  with open(merged_file_contexts_txt, 'wb') as f:
-    f.write(merged_content)
-
-  # The sort step sorts the concatenated file.
-
-  sorted_file_contexts_txt = os.path.join(temp_dir, 'sorted_file_contexts.txt')
-  command = ['fc_sort', merged_file_contexts_txt, sorted_file_contexts_txt]
-  common.RunAndWait(command, verbose=True)
-
-  # Finally, the compile step creates the final META/file_contexts.bin.
-
-  file_contexts_bin = os.path.join(
-      output_target_files_temp_dir,
-      'META', 'file_contexts.bin')
-
-  command = [
-      'sefcontext_compile',
-      '-o', file_contexts_bin,
-      sorted_file_contexts_txt,
-  ]
-
-  common.RunAndWait(command, verbose=True)
+  output_dynamic_partitions_info_txt = os.path.join(
+      output_target_files_dir, 'META', 'dynamic_partitions_info.txt')
+  with open(output_dynamic_partitions_info_txt, 'w') as output:
+    sorted_keys = sorted(merged_dynamic_partitions_dict.keys())
+    for key in sorted_keys:
+      output.write('{}={}\n'.format(key, merged_dynamic_partitions_dict[key]))
 
 
-def process_special_cases(
-    temp_dir,
-    system_target_files_temp_dir,
-    other_target_files_temp_dir,
-    output_target_files_temp_dir,
-    system_misc_info_keys,
-    rebuild_recovery
-):
+def process_special_cases(system_target_files_temp_dir,
+                          other_target_files_temp_dir,
+                          output_target_files_temp_dir, system_misc_info_keys,
+                          rebuild_recovery):
   """Perform special-case processing for certain target files items.
 
   Certain files in the output target files package require special-case
   processing. This function performs all that special-case processing.
 
   Args:
-    temp_dir: The name of a scratch directory that this function can use for
-    intermediate files generated during processing.
-
-    system_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the system target files package.
-
-    other_target_files_temp_dir: The name of a directory containing the
-    special items extracted from the other target files package.
-
-    output_target_files_temp_dir: The name of a directory that will be used
-    to create the output target files package after all the special cases
-    are processed.
-
-    system_misc_info_keys: A list of keys to obtain from the system instance
-    of META/misc_info.txt. The remaining keys from the other instance.
-
+    system_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the system target files package.
+    other_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the other target files package.
+    output_target_files_temp_dir: The name of a directory that will be used to
+      create the output target files package after all the special cases are
+      processed.
+    system_misc_info_keys: A list of keys to obtain from the system instance of
+      META/misc_info.txt. The remaining keys from the other instance.
     rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
-    devices and write it to the system image.
+      devices and write it to the system image.
   """
 
   if 'ab_update' in system_misc_info_keys:
@@ -533,20 +583,17 @@
       output_target_files_temp_dir=output_target_files_temp_dir,
       system_misc_info_keys=system_misc_info_keys)
 
-  process_file_contexts_bin(
-      temp_dir=temp_dir,
-      output_target_files_temp_dir=output_target_files_temp_dir)
+  process_dynamic_partitions_info_txt(
+      system_target_files_dir=system_target_files_temp_dir,
+      other_target_files_dir=other_target_files_temp_dir,
+      output_target_files_dir=output_target_files_temp_dir)
 
 
-def merge_target_files(
-    temp_dir,
-    system_target_files,
-    system_item_list,
-    system_misc_info_keys,
-    other_target_files,
-    other_item_list,
-    output_target_files,
-    rebuild_recovery):
+def merge_target_files(temp_dir, system_target_files, system_item_list,
+                       system_misc_info_keys, other_target_files,
+                       other_item_list, output_target_files, output_dir,
+                       output_item_list, output_ota, output_img,
+                       output_super_empty, rebuild_recovery):
   """Merge two target files packages together.
 
   This function takes system and other target files packages as input, performs
@@ -555,40 +602,36 @@
 
   Args:
     temp_dir: The name of a directory we use when we extract items from the
-    input target files packages, and also a scratch directory that we use for
-    temporary files.
-
+      input target files packages, and also a scratch directory that we use for
+      temporary files.
     system_target_files: The name of the zip archive containing the system
-    partial target files package.
-
+      partial target files package.
     system_item_list: The list of items to extract from the partial system
-    target files package as is, meaning these items will land in the output
-    target files package exactly as they appear in the input partial system
-    target files package.
-
+      target files package as is, meaning these items will land in the output
+      target files package exactly as they appear in the input partial system
+      target files package.
     system_misc_info_keys: The list of keys to obtain from the system instance
-    of META/misc_info.txt. The remaining keys from the other instance.
-
-    other_target_files: The name of the zip archive containing the other
-    partial target files package.
-
-    other_item_list: The list of items to extract from the partial other
-    target files package as is, meaning these items will land in the output
-    target files package exactly as they appear in the input partial other
-    target files package.
-
-    output_target_files: The name of the output zip archive target files
-    package created by merging system and other.
-
+      of META/misc_info.txt. The remaining keys from the other instance.
+    other_target_files: The name of the zip archive containing the other partial
+      target files package.
+    other_item_list: The list of items to extract from the partial other target
+      files package as is, meaning these items will land in the output target
+      files package exactly as they appear in the input partial other target
+      files package.
+    output_target_files: The name of the output zip archive target files package
+      created by merging system and other.
+    output_dir: The destination directory for saving merged files.
+    output_item_list: The list of items to copy into the output_dir.
+    output_ota: The name of the output zip archive ota package.
+    output_img: The name of the output zip archive img package.
+    output_super_empty: If provided, creates a super_empty.img file from the
+      merged target files package and saves it at this path.
     rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
-    devices and write it to the system image.
+      devices and write it to the system image.
   """
 
-  logger.info(
-      'starting: merge system %s and other %s into output %s',
-      system_target_files,
-      other_target_files,
-      output_target_files)
+  logger.info('starting: merge system %s and other %s into output %s',
+              system_target_files, other_target_files, output_target_files)
 
   # Create directory names that we'll use when we extract files from system,
   # and other, and for zipping the final output.
@@ -639,7 +682,6 @@
   # files package are in place.
 
   process_special_cases(
-      temp_dir=temp_dir,
       system_target_files_temp_dir=system_target_files_temp_dir,
       other_target_files_temp_dir=other_target_files_temp_dir,
       output_target_files_temp_dir=output_target_files_temp_dir,
@@ -655,30 +697,75 @@
 
   add_img_to_target_files.main(add_img_args)
 
-  # Finally, create the output target files zip archive.
+  # Create super_empty.img using the merged misc_info.txt.
+
+  misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
+                               'misc_info.txt')
+
+  def read_helper():
+    with open(misc_info_txt) as f:
+      return list(f.read().splitlines())
+
+  use_dynamic_partitions = common.LoadDictionaryFromLines(
+      read_helper()).get('use_dynamic_partitions')
+
+  if use_dynamic_partitions != 'true' and output_super_empty:
+    raise ValueError(
+        'Building super_empty.img requires use_dynamic_partitions=true.')
+  elif use_dynamic_partitions == 'true':
+    super_empty_img = os.path.join(output_target_files_temp_dir, 'IMAGES',
+                                   'super_empty.img')
+    build_super_image_args = [
+        misc_info_txt,
+        super_empty_img,
+    ]
+    build_super_image.main(build_super_image_args)
+
+    # Copy super_empty.img to the user-provided output_super_empty location.
+    if output_super_empty:
+      shutil.copyfile(super_empty_img, output_super_empty)
+
+  # Create the IMG package from the merged target files (before zipping, in
+  # order to avoid an unnecessary unzip and copy).
+
+  if output_img:
+    img_from_target_files_args = [
+        output_target_files_temp_dir,
+        output_img,
+    ]
+    img_from_target_files.main(img_from_target_files_args)
+
+  # Finally, create the output target files zip archive and/or copy the
+  # output items to the output target files directory.
+
+  if output_dir:
+    copy_items(output_target_files_temp_dir, output_dir, output_item_list)
+
+  if not output_target_files:
+    return
 
   output_zip = os.path.abspath(output_target_files)
   output_target_files_list = os.path.join(temp_dir, 'output.list')
-  output_target_files_meta_dir = os.path.join(
-      output_target_files_temp_dir, 'META')
+  output_target_files_meta_dir = os.path.join(output_target_files_temp_dir,
+                                              'META')
 
-  command = [
+  find_command = [
       'find',
       output_target_files_meta_dir,
   ]
-  # TODO(bpeckham): sort this to be more like build.
-  meta_content = common.RunAndCheckOutput(command, verbose=False)
-  command = [
-      'find',
-      output_target_files_temp_dir,
-      '-path',
-      output_target_files_meta_dir,
-      '-prune',
-      '-o',
-      '-print'
+  find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False)
+  meta_content = common.RunAndCheckOutput(['sort'],
+                                          stdin=find_process.stdout,
+                                          verbose=False)
+
+  find_command = [
+      'find', output_target_files_temp_dir, '-path',
+      output_target_files_meta_dir, '-prune', '-o', '-print'
   ]
-  # TODO(bpeckham): sort this to be more like build.
-  other_content = common.RunAndCheckOutput(command, verbose=False)
+  find_process = common.Run(find_command, stdout=subprocess.PIPE, verbose=False)
+  other_content = common.RunAndCheckOutput(['sort'],
+                                           stdin=find_process.stdout,
+                                           verbose=False)
 
   with open(output_target_files_list, 'wb') as f:
     f.write(meta_content)
@@ -687,13 +774,25 @@
   command = [
       'soong_zip',
       '-d',
-      '-o', output_zip,
-      '-C', output_target_files_temp_dir,
-      '-l', output_target_files_list,
+      '-o',
+      output_zip,
+      '-C',
+      output_target_files_temp_dir,
+      '-l',
+      output_target_files_list,
   ]
   logger.info('creating %s', output_target_files)
   common.RunAndWait(command, verbose=True)
 
+  # Create the OTA package from the merged target files package.
+
+  if output_ota:
+    ota_from_target_files_args = [
+        output_zip,
+        output_ota,
+    ]
+    ota_from_target_files.main(ota_from_target_files_args)
+
 
 def call_func_with_temp_dir(func, keep_tmp):
   """Manage the creation and cleanup of the temporary directory.
@@ -702,9 +801,8 @@
   directory. It also cleans up the temporary directory.
 
   Args:
-    func: The function to call. Should accept one parameter, the path to
-    the temporary directory.
-
+    func: The function to call. Should accept one parameter, the path to the
+      temporary directory.
     keep_tmp: Keep the temporary directory after processing is complete.
   """
 
@@ -747,6 +845,16 @@
       OPTIONS.other_item_list = a
     elif o == '--output-target-files':
       OPTIONS.output_target_files = a
+    elif o == '--output-dir':
+      OPTIONS.output_dir = a
+    elif o == '--output-item-list':
+      OPTIONS.output_item_list = a
+    elif o == '--output-ota':
+      OPTIONS.output_ota = a
+    elif o == '--output-img':
+      OPTIONS.output_img = a
+    elif o == '--output-super-empty':
+      OPTIONS.output_super_empty = a
     elif o == '--rebuild_recovery':
       OPTIONS.rebuild_recovery = True
     elif o == '--keep-tmp':
@@ -756,7 +864,8 @@
     return True
 
   args = common.ParseOptions(
-      sys.argv[1:], __doc__,
+      sys.argv[1:],
+      __doc__,
       extra_long_opts=[
           'system-target-files=',
           'system-item-list=',
@@ -764,15 +873,20 @@
           'other-target-files=',
           'other-item-list=',
           'output-target-files=',
+          'output-dir=',
+          'output-item-list=',
+          'output-ota=',
+          'output-img=',
+          'output-super-empty=',
           'rebuild_recovery',
           'keep-tmp',
       ],
       extra_option_handler=option_handler)
 
-  if (len(args) != 0 or
-      OPTIONS.system_target_files is None or
+  if (args or OPTIONS.system_target_files is None or
       OPTIONS.other_target_files is None or
-      OPTIONS.output_target_files is None):
+      (OPTIONS.output_target_files is None and OPTIONS.output_dir is None) or
+      (OPTIONS.output_dir is not None and OPTIONS.output_item_list is None)):
     common.Usage(__doc__)
     sys.exit(1)
 
@@ -791,6 +905,11 @@
   else:
     other_item_list = default_other_item_list
 
+  if OPTIONS.output_item_list:
+    output_item_list = read_config_list(OPTIONS.output_item_list)
+  else:
+    output_item_list = None
+
   if not validate_config_lists(
       system_item_list=system_item_list,
       system_misc_info_keys=system_misc_info_keys,
@@ -806,8 +925,12 @@
           other_target_files=OPTIONS.other_target_files,
           other_item_list=other_item_list,
           output_target_files=OPTIONS.output_target_files,
-          rebuild_recovery=OPTIONS.rebuild_recovery),
-      OPTIONS.keep_tmp)
+          output_dir=OPTIONS.output_dir,
+          output_item_list=output_item_list,
+          output_ota=OPTIONS.output_ota,
+          output_img=OPTIONS.output_img,
+          output_super_empty=OPTIONS.output_super_empty,
+          rebuild_recovery=OPTIONS.rebuild_recovery), OPTIONS.keep_tmp)
 
 
 if __name__ == '__main__':
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 8b55f03..f686ca0 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -38,7 +38,7 @@
   -k  (--package_key) <key>
       Key to use to sign the package (default is the value of
       default_system_dev_certificate from the input target-files's
-      META/misc_info.txt, or "build/target/product/security/testkey" if that
+      META/misc_info.txt, or "build/make/target/product/security/testkey" if that
       value is not specified).
 
       For incremental OTAs, the default value is based on the source
@@ -2266,7 +2266,7 @@
     if OPTIONS.package_key is None:
       OPTIONS.package_key = OPTIONS.info_dict.get(
           "default_system_dev_certificate",
-          "build/target/product/security/testkey")
+          "build/make/target/product/security/testkey")
     # Get signing keys
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
 
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
new file mode 100755
index 0000000..1778615
--- /dev/null
+++ b/tools/releasetools/sign_apex.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Signs a standalone APEX file.
+
+Usage:  sign_apex [flags] input_apex_file output_apex_file
+
+  --container_key <key>
+      Mandatory flag that specifies the container signing key.
+
+  --payload_key <key>
+      Mandatory flag that specifies the payload signing key.
+
+  --payload_extra_args <args>
+      Optional flag that specifies any extra args to be passed to payload signer
+      (e.g. --payload_extra_args="--signing_helper_with_files /path/to/helper").
+"""
+
+import logging
+import shutil
+import sys
+
+import apex_utils
+import common
+
+logger = logging.getLogger(__name__)
+
+
+def main(argv):
+
+  options = {}
+
+  def option_handler(o, a):
+    if o == '--container_key':
+      # Strip the suffix if any, as common.SignFile expects no suffix.
+      DEFAULT_CONTAINER_KEY_SUFFIX = '.x509.pem'
+      if a.endswith(DEFAULT_CONTAINER_KEY_SUFFIX):
+        a = a[:-len(DEFAULT_CONTAINER_KEY_SUFFIX)]
+      options['container_key'] = a
+    elif o == '--payload_key':
+      options['payload_key'] = a
+    elif o == '--payload_extra_args':
+      options['payload_extra_args'] = a
+    else:
+      return False
+    return True
+
+  args = common.ParseOptions(
+      argv, __doc__,
+      extra_opts='',
+      extra_long_opts=[
+          'container_key=',
+          'payload_extra_args=',
+          'payload_key=',
+      ],
+      extra_option_handler=option_handler)
+
+  if (len(args) != 2 or 'container_key' not in options or
+      'payload_key' not in options):
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  common.InitLogging()
+
+  input_zip = args[0]
+  output_zip = args[1]
+  with open(input_zip) as input_fp:
+    apex_data = input_fp.read()
+
+  signed_apex = apex_utils.SignApex(
+      apex_data,
+      payload_key=options['payload_key'],
+      container_key=options['container_key'],
+      container_pw=None,
+      codename_to_api_level_map=None,
+      signing_args=options.get('payload_extra_args'))
+
+  shutil.copyfile(signed_apex, output_zip)
+  logger.info("done.")
+
+
+if __name__ == '__main__':
+  try:
+    main(sys.argv[1:])
+  except common.ExternalError:
+    logger.exception("\n   ERROR:\n")
+    sys.exit(1)
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b4d0136..7de0978 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -56,7 +56,7 @@
 
       where $devkey is the directory part of the value of
       default_system_dev_certificate from the input target-files's
-      META/misc_info.txt.  (Defaulting to "build/target/product/security"
+      META/misc_info.txt.  (Defaulting to "build/make/target/product/security"
       if the value is not present in misc_info.
 
       -d and -k options are added to the set of mappings in the order
@@ -179,6 +179,9 @@
   Returns:
     A dict that contains the updated APEX key mapping, which should be used for
     the current signing.
+
+  Raises:
+    AssertionError: On invalid container / payload key overrides.
   """
   # Apply all the --extra_apex_payload_key options to override the payload
   # signing keys in the given keys_info.
@@ -200,6 +203,24 @@
       key = 'PRESIGNED'
     keys_info[apex] = (keys_info[apex][0], key_map.get(key, key))
 
+  # A PRESIGNED container entails a PRESIGNED payload. Apply this to all the
+  # APEX key pairs. However, a PRESIGNED container with non-PRESIGNED payload
+  # (overridden via commandline) indicates a config error, which should not be
+  # allowed.
+  for apex, (payload_key, container_key) in keys_info.items():
+    if container_key != 'PRESIGNED':
+      continue
+    if apex in OPTIONS.extra_apex_payload_keys:
+      payload_override = OPTIONS.extra_apex_payload_keys[apex]
+      assert payload_override == '', \
+          ("Invalid APEX key overrides: {} has PRESIGNED container but "
+           "non-PRESIGNED payload key {}").format(apex, payload_override)
+    if payload_key != 'PRESIGNED':
+      print(
+          "Setting {} payload as PRESIGNED due to PRESIGNED container".format(
+              apex))
+    keys_info[apex] = ('PRESIGNED', 'PRESIGNED')
+
   return keys_info
 
 
@@ -292,7 +313,9 @@
        "not sign this apk).".format("\n  ".join(unknown_files)))
 
   # For all the APEXes, double check that we won't have an APEX that has only
-  # one of the payload / container keys set.
+  # one of the payload / container keys set. Note that non-PRESIGNED container
+  # with PRESIGNED payload could be allowed but currently unsupported. It would
+  # require changing SignApex implementation.
   if not apex_keys:
     return
 
@@ -380,77 +403,6 @@
   return data
 
 
-def SignApex(apex_data, payload_key, container_key, container_pw,
-             codename_to_api_level_map, signing_args=None):
-  """Signs the current APEX with the given payload/container keys.
-
-  Args:
-    apex_data: Raw APEX data.
-    payload_key: The path to payload signing key (w/ extension).
-    container_key: The path to container signing key (w/o extension).
-    container_pw: The matching password of the container_key, or None.
-    codename_to_api_level_map: A dict that maps from codename to API level.
-    signing_args: Additional args to be passed to the payload signer.
-
-  Returns:
-    The path to the signed APEX file.
-  """
-  apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
-  with open(apex_file, 'wb') as apex_fp:
-    apex_fp.write(apex_data)
-
-  APEX_PAYLOAD_IMAGE = 'apex_payload.img'
-  APEX_PUBKEY = 'apex_pubkey'
-
-  # 1a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
-  # payload_key.
-  payload_dir = common.MakeTempDir(prefix='apex-payload-')
-  with zipfile.ZipFile(apex_file) as apex_fd:
-    payload_file = apex_fd.extract(APEX_PAYLOAD_IMAGE, payload_dir)
-
-  payload_info = apex_utils.ParseApexPayloadInfo(payload_file)
-  apex_utils.SignApexPayload(
-      payload_file,
-      payload_key,
-      payload_info['apex.key'],
-      payload_info['Algorithm'],
-      payload_info['Salt'],
-      signing_args)
-
-  # 1b. Update the embedded payload public key.
-  payload_public_key = common.ExtractAvbPublicKey(payload_key)
-
-  common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
-  common.ZipDelete(apex_file, APEX_PUBKEY)
-  apex_zip = zipfile.ZipFile(apex_file, 'a')
-  common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
-  common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
-  common.ZipClose(apex_zip)
-
-  # 2. Align the files at page boundary (same as in apexer).
-  aligned_apex = common.MakeTempFile(
-      prefix='apex-container-', suffix='.apex')
-  common.RunAndCheckOutput(
-      ['zipalign', '-f', '4096', apex_file, aligned_apex])
-
-  # 3. Sign the APEX container with container_key.
-  signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
-
-  # Specify the 4K alignment when calling SignApk.
-  extra_signapk_args = OPTIONS.extra_signapk_args[:]
-  extra_signapk_args.extend(['-a', '4096'])
-
-  common.SignFile(
-      aligned_apex,
-      signed_apex,
-      container_key,
-      container_pw,
-      codename_to_api_level_map=codename_to_api_level_map,
-      extra_signapk_args=extra_signapk_args)
-
-  return signed_apex
-
-
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
@@ -515,7 +467,7 @@
         print("           : %-*s payload   (%s)" % (
             maxsize, name, payload_key))
 
-        signed_apex = SignApex(
+        signed_apex = apex_utils.SignApex(
             data,
             payload_key,
             container_key,
@@ -819,7 +771,7 @@
     print("for OTA package verification")
   else:
     devkey = misc_info.get("default_system_dev_certificate",
-                           "build/target/product/security/testkey")
+                           "build/make/target/product/security/testkey")
     mapped_devkey = OPTIONS.key_map.get(devkey, devkey)
     if mapped_devkey != devkey:
       misc_info["default_system_dev_certificate"] = mapped_devkey
@@ -978,7 +930,7 @@
   for s, d in key_mapping_options:
     if s is None:   # -d option
       devkey = misc_info.get("default_system_dev_certificate",
-                             "build/target/product/security/testkey")
+                             "build/make/target/product/security/testkey")
       devkeydir = os.path.dirname(devkey)
 
       OPTIONS.key_map.update({
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index 482f86c..08e0190 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -34,18 +34,6 @@
   def setUp(self):
     OPTIONS.input_tmp = common.MakeTempDir()
 
-  def _verifyCareMap(self, expected, file_name):
-    """Parses the care_map.pb; and checks the content in plain text."""
-    text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt")
-
-    # Calls an external binary to convert the proto message.
-    cmd = ["care_map_generator", "--parse_proto", file_name, text_file]
-    common.RunAndCheckOutput(cmd)
-
-    with open(text_file, 'r') as verify_fp:
-      plain_text = verify_fp.read()
-    self.assertEqual('\n'.join(expected), plain_text)
-
   @staticmethod
   def _create_images(images, prefix):
     """Creates images under OPTIONS.input_tmp/prefix."""
@@ -135,6 +123,9 @@
   def _test_AddCareMapForAbOta():
     """Helper function to set up the test for test_AddCareMapForAbOta()."""
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
+        'system_image_size' : 65536,
+        'vendor_image_size' : 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
         'system.build.prop': {
@@ -143,7 +134,7 @@
         },
         'vendor.build.prop': {
             'ro.vendor.build.fingerprint': 'google/sailfish/678:user/dev-keys',
-        }
+        },
     }
 
     # Prepare the META/ folder.
@@ -154,9 +145,9 @@
     system_image = test_utils.construct_sparse_image([
         (0xCAC1, 6),
         (0xCAC3, 4),
-        (0xCAC1, 6)])
+        (0xCAC1, 8)])
     vendor_image = test_utils.construct_sparse_image([
-        (0xCAC2, 10)])
+        (0xCAC2, 12)])
 
     image_paths = {
         'system' : system_image,
@@ -164,6 +155,19 @@
     }
     return image_paths
 
+  def _verifyCareMap(self, expected, file_name):
+    """Parses the care_map.pb; and checks the content in plain text."""
+    text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt")
+
+    # Calls an external binary to convert the proto message.
+    cmd = ["care_map_generator", "--parse_proto", file_name, text_file]
+    common.RunAndCheckOutput(cmd)
+
+    with open(text_file) as verify_fp:
+      plain_text = verify_fp.read()
+    self.assertEqual('\n'.join(expected), plain_text)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta(self):
     image_paths = self._test_AddCareMapForAbOta()
 
@@ -179,6 +183,7 @@
 
     self._verifyCareMap(expected, care_map_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_withNonCareMapPartitions(self):
     """Partitions without care_map should be ignored."""
     image_paths = self._test_AddCareMapForAbOta()
@@ -196,10 +201,14 @@
 
     self._verifyCareMap(expected, care_map_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_withAvb(self):
     """Tests the case for device using AVB."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
+        'system_image_size' : 65536,
+        'vendor_image_size' : 40960,
         'avb_system_hashtree_enable' : 'true',
         'avb_vendor_hashtree_enable' : 'true',
         'system.build.prop': {
@@ -223,10 +232,14 @@
 
     self._verifyCareMap(expected, care_map_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_noFingerprint(self):
     """Tests the case for partitions without fingerprint."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
+        'system_image_size' : 65536,
+        'vendor_image_size' : 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
     }
@@ -240,10 +253,14 @@
 
     self._verifyCareMap(expected, care_map_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_withThumbprint(self):
     """Tests the case for partitions with thumbprint."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
+        'system_image_size' : 65536,
+        'vendor_image_size' : 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
         'system.build.prop': {
@@ -251,7 +268,7 @@
         },
         'vendor.build.prop' : {
             'ro.vendor.build.thumbprint': 'google/sailfish/456:user/dev-keys',
-        }
+        },
     }
 
     AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
@@ -266,6 +283,35 @@
 
     self._verifyCareMap(expected, care_map_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_AddCareMapForAbOta_skipPartition(self):
+    image_paths = self._test_AddCareMapForAbOta()
+
+    # Remove vendor_image_size to invalidate the care_map for vendor.img.
+    del OPTIONS.info_dict['vendor_image_size']
+
+    AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
+
+    care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
+    expected = ['system', RangeSet("0-5 10-15").to_string_raw(),
+                "ro.system.build.fingerprint",
+                "google/sailfish/12345:user/dev-keys"]
+
+    self._verifyCareMap(expected, care_map_file)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_AddCareMapForAbOta_skipAllPartitions(self):
+    image_paths = self._test_AddCareMapForAbOta()
+
+    # Remove the image_size properties for all the partitions.
+    del OPTIONS.info_dict['system_image_size']
+    del OPTIONS.info_dict['vendor_image_size']
+
+    AddCareMapForAbOta(None, ['system', 'vendor'], image_paths)
+
+    self.assertFalse(
+        os.path.exists(os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')))
+
   def test_AddCareMapForAbOta_verityNotEnabled(self):
     """No care_map.pb should be generated if verity not enabled."""
     image_paths = self._test_AddCareMapForAbOta()
@@ -282,6 +328,7 @@
     self.assertRaises(AssertionError, AddCareMapForAbOta, None,
                       ['system', 'vendor'], image_paths)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_zipOutput(self):
     """Tests the case with ZIP output."""
     image_paths = self._test_AddCareMapForAbOta()
@@ -304,6 +351,7 @@
                 "google/sailfish/678:user/dev-keys"]
     self._verifyCareMap(expected, os.path.join(temp_dir, care_map_name))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_zipOutput_careMapEntryExists(self):
     """Tests the case with ZIP output which already has care_map entry."""
     image_paths = self._test_AddCareMapForAbOta()
@@ -338,6 +386,7 @@
     self.assertEqual(
         ['--include_descriptors_from_image', '/path/to/system.img'], cmd)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_AppendVBMetaArgsForPartition_vendorAsChainedPartition(self):
     testdata_dir = test_utils.get_testdata_dir()
     pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
@@ -362,6 +411,7 @@
         (0xCAC3, 4),
         (0xCAC1, 6)])
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
         'system_image_size' : 53248,
     }
     name, care_map = GetCareMap('system', sparse_image)
@@ -377,6 +427,17 @@
         (0xCAC3, 4),
         (0xCAC1, 6)])
     OPTIONS.info_dict = {
+        'extfs_sparse_flag' : '-s',
         'system_image_size' : -45056,
     }
     self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
+
+  def test_GetCareMap_nonSparseImage(self):
+    OPTIONS.info_dict = {
+        'system_image_size' : 53248,
+    }
+    # 'foo' is the image filename, which is expected to be not used by
+    # GetCareMap().
+    name, care_map = GetCareMap('system', 'foo')
+    self.assertEqual('system', name)
+    self.assertEqual(RangeSet("0-12").to_string_raw(), care_map)
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 2f8ee49..c7d5807 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -39,6 +39,7 @@
       payload_fp.write(os.urandom(8192))
     return payload_file
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ParseApexPayloadInfo(self):
     payload_file = self._GetTestPayload()
     apex_utils.SignApexPayload(
@@ -48,16 +49,20 @@
     self.assertEqual(self.SALT, payload_info['Salt'])
     self.assertEqual('testkey', payload_info['apex.key'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_SignApexPayload(self):
     payload_file = self._GetTestPayload()
     apex_utils.SignApexPayload(
         payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT)
     apex_utils.VerifyApexPayload(payload_file, self.payload_key)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_SignApexPayload_withSignerHelper(self):
     payload_file = self._GetTestPayload()
+    signing_helper = os.path.join(self.testdata_dir, 'signing_helper.sh')
+    os.chmod(signing_helper, 0o700)
     payload_signer_args = '--signing_helper_with_files {}'.format(
-        os.path.join(self.testdata_dir, 'signing_helper.sh'))
+        signing_helper)
     apex_utils.SignApexPayload(
         payload_file,
         self.payload_key,
@@ -65,6 +70,7 @@
         payload_signer_args)
     apex_utils.VerifyApexPayload(payload_file, self.payload_key)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_SignApexPayload_invalidKey(self):
     self.assertRaises(
         apex_utils.ApexSigningError,
@@ -75,6 +81,7 @@
         'SHA256_RSA2048',
         self.SALT)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_VerifyApexPayload_wrongKey(self):
     payload_file = self._GetTestPayload()
     apex_utils.SignApexPayload(
diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py
index 1cebd0c..b24805f 100644
--- a/tools/releasetools/test_build_image.py
+++ b/tools/releasetools/test_build_image.py
@@ -18,12 +18,13 @@
 import os.path
 
 import common
+import test_utils
 from build_image import (
-    BuildImageError, CheckHeadroom, GetFilesystemCharacteristics, SetUpInDirAndFsConfig)
-from test_utils import ReleaseToolsTestCase
+    BuildImageError, CheckHeadroom, GetFilesystemCharacteristics,
+    SetUpInDirAndFsConfig)
 
 
-class BuildImageTest(ReleaseToolsTestCase):
+class BuildImageTest(test_utils.ReleaseToolsTestCase):
 
   # Available: 1000 blocks.
   EXT4FS_OUTPUT = (
@@ -48,6 +49,7 @@
     self.assertRaises(
         BuildImageError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_CheckHeadroom_WrongFsType(self):
     prop_dict = {
         'fs_type' : 'f2fs',
@@ -72,6 +74,7 @@
     self.assertRaises(
         AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_CheckHeadroom_WithMke2fsOutput(self):
     """Tests the result parsing from actual call to mke2fs."""
     input_dir = common.MakeTempDir()
@@ -177,13 +180,14 @@
     self.assertIn('fs-config-root\n', fs_config_data)
     self.assertEqual('/', prop_dict['mount_point'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetFilesystemCharacteristics(self):
     input_dir = common.MakeTempDir()
     output_image = common.MakeTempFile(suffix='.img')
     command = ['mkuserimg_mke2fs', input_dir, output_image, 'ext4',
                '/system', '409600', '-j', '0']
     proc = common.Run(command)
-    ext4fs_output, _ = proc.communicate()
+    proc.communicate()
     self.assertEqual(0, proc.returncode)
 
     output_file = common.MakeTempFile(suffix='.img')
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index dce27fe..9b76734 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -315,6 +315,7 @@
     finally:
       os.remove(zip_file_name)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ZipDelete(self):
     zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip')
     output_zip = zipfile.ZipFile(zip_file.name, 'w',
@@ -376,6 +377,7 @@
     common.ZipClose(output_zip)
     return zip_file
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_UnzipTemp(self):
     zip_file = self._test_UnzipTemp_createZipFile()
     unzipped_dir = common.UnzipTemp(zip_file)
@@ -385,6 +387,7 @@
     self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
     self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_UnzipTemp_withPatterns(self):
     zip_file = self._test_UnzipTemp_createZipFile()
 
@@ -425,6 +428,7 @@
     self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4')))
     self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5')))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_UnzipTemp_withPartiallyMatchingPatterns(self):
     zip_file = self._test_UnzipTemp_createZipFile()
     unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*'])
@@ -451,14 +455,14 @@
       'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
       ' private_key="certs/devkey.pk8"\n'
       'name="Settings.apk"'
-      ' certificate="build/target/product/security/platform.x509.pem"'
-      ' private_key="build/target/product/security/platform.pk8"\n'
+      ' certificate="build/make/target/product/security/platform.x509.pem"'
+      ' private_key="build/make/target/product/security/platform.pk8"\n'
       'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
   )
 
   APKCERTS_CERTMAP1 = {
       'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/target/product/security/platform',
+      'Settings.apk' : 'build/make/target/product/security/platform',
       'TV.apk' : 'PRESIGNED',
   }
 
@@ -575,6 +579,7 @@
     wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
     self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ExtractAvbPublicKey(self):
     privkey = os.path.join(self.testdata_dir, 'testkey.key')
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
@@ -594,18 +599,22 @@
       actual = common.ParseCertificate(cert_fp.read())
     self.assertEqual(expected, actual)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetMinSdkVersion(self):
     test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
     self.assertEqual('24', common.GetMinSdkVersion(test_app))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetMinSdkVersion_invalidInput(self):
     self.assertRaises(
         common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk')
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetMinSdkVersionInt(self):
     test_app = os.path.join(self.testdata_dir, 'TestApp.apk')
     self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {}))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetMinSdkVersionInt_invalidInput(self):
     self.assertRaises(
         common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk',
@@ -617,6 +626,7 @@
   def setUp(self):
     self.testdata_dir = test_utils.get_testdata_dir()
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_emptyBlockMapFile(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
@@ -649,6 +659,7 @@
         AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir,
         None, False)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_missingBlockMapFile(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
@@ -667,6 +678,7 @@
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_sharedBlocks_notAllowed(self):
     """Tests the case of having overlapping blocks but disallowed."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -689,6 +701,7 @@
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_sharedBlocks_allowed(self):
     """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -731,6 +744,7 @@
     self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
     self.assertFalse(sparse_image.file_map['/system/file1'].extra)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_incompleteRanges(self):
     """Tests the case of ext4 images with holes."""
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -754,6 +768,7 @@
     self.assertFalse(sparse_image.file_map['/system/file1'].extra)
     self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
@@ -781,6 +796,7 @@
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_systemRootImage_nonSystemFiles(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
@@ -803,6 +819,7 @@
     self.assertFalse(sparse_image.file_map['//system/file1'].extra)
     self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetSparseImage_fileNotFound(self):
     target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
@@ -822,6 +839,7 @@
           AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
           False)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetAvbChainedPartitionArg(self):
     pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
     info_dict = {
@@ -835,6 +853,7 @@
     self.assertEqual('2', args[1])
     self.assertTrue(os.path.exists(args[2]))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetAvbChainedPartitionArg_withPrivateKey(self):
     key = os.path.join(self.testdata_dir, 'testkey.key')
     info_dict = {
@@ -848,6 +867,7 @@
     self.assertEqual('2', args[1])
     self.assertTrue(os.path.exists(args[2]))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetAvbChainedPartitionArg_withSpecifiedKey(self):
     info_dict = {
         'avb_avbtool': 'avbtool',
@@ -862,6 +882,7 @@
     self.assertEqual('2', args[1])
     self.assertTrue(os.path.exists(args[2]))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetAvbChainedPartitionArg_invalidKey(self):
     pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem')
     info_dict = {
@@ -922,6 +943,7 @@
       self.assertIn('/', loaded_dict['fstab'])
       self.assertIn('/system', loaded_dict['fstab'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_LoadInfoDict_dirInput(self):
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
@@ -933,6 +955,7 @@
     self.assertIn('/', loaded_dict['fstab'])
     self.assertIn('/system', loaded_dict['fstab'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self):
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
@@ -990,6 +1013,7 @@
       self.assertEqual(2, loaded_dict['fstab_version'])
       self.assertIsNone(loaded_dict['fstab'])
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_LoadInfoDict_missingMetaMiscInfoTxt(self):
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
@@ -998,6 +1022,7 @@
     with zipfile.ZipFile(target_files, 'r') as target_files_zip:
       self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_LoadInfoDict_repacking(self):
     target_files = self._test_LoadInfoDict_createTargetFiles(
         self.INFO_DICT_DEFAULT,
@@ -1066,6 +1091,7 @@
     validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
                                                         self._info)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_recovery_from_boot(self):
     recovery_image = common.File("recovery.img", self.recovery_data)
     self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index bb9ce8e..3f15d8f 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -18,9 +18,11 @@
 
 import common
 import test_utils
-from merge_target_files import (
-    read_config_list, validate_config_lists, default_system_item_list,
-    default_other_item_list, default_system_misc_info_keys)
+from merge_target_files import (read_config_list, validate_config_lists,
+                                default_system_item_list,
+                                default_other_item_list,
+                                default_system_misc_info_keys, copy_items,
+                                merge_dynamic_partition_info_dicts)
 
 
 class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
@@ -28,6 +30,58 @@
   def setUp(self):
     self.testdata_dir = test_utils.get_testdata_dir()
 
+  def test_copy_items_CopiesItemsMatchingPatterns(self):
+
+    def createEmptyFile(path):
+      if not os.path.exists(os.path.dirname(path)):
+        os.makedirs(os.path.dirname(path))
+      open(path, 'a').close()
+      return path
+
+    def createSymLink(source, dest):
+      os.symlink(source, dest)
+      return dest
+
+    def getRelPaths(start, filepaths):
+      return set(
+          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
+
+    input_dir = common.MakeTempDir()
+    output_dir = common.MakeTempDir()
+    expected_copied_items = []
+    actual_copied_items = []
+    patterns = ['*.cpp', 'subdir/*.txt']
+
+    # Create various files that we expect to get copied because they
+    # match one of the patterns.
+    expected_copied_items.extend([
+        createEmptyFile(os.path.join(input_dir, 'a.cpp')),
+        createEmptyFile(os.path.join(input_dir, 'b.cpp')),
+        createEmptyFile(os.path.join(input_dir, 'subdir', 'c.txt')),
+        createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
+        createEmptyFile(
+            os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
+        createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
+    ])
+    # Create some more files that we expect to not get copied.
+    createEmptyFile(os.path.join(input_dir, 'a.h'))
+    createEmptyFile(os.path.join(input_dir, 'b.h'))
+    createEmptyFile(os.path.join(input_dir, 'subdir', 'subsubdir', 'f.gif'))
+    createSymLink('a.h', os.path.join(input_dir, 'a_link.h'))
+
+    # Copy items.
+    copy_items(input_dir, output_dir, patterns)
+
+    # Assert the actual copied items match the ones we expected.
+    for dirpath, _, filenames in os.walk(output_dir):
+      actual_copied_items.extend(
+          os.path.join(dirpath, filename) for filename in filenames)
+    self.assertEqual(
+        getRelPaths(output_dir, actual_copied_items),
+        getRelPaths(input_dir, expected_copied_items))
+    self.assertEqual(
+        os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
+
   def test_read_config_list(self):
     system_item_list_file = os.path.join(self.testdata_dir,
                                          'merge_config_system_item_list')
@@ -75,3 +129,34 @@
       self.assertFalse(
           validate_config_lists(default_system_item_list, system_misc_info_keys,
                                 default_other_item_list))
+
+  def test_merge_dynamic_partition_info_dicts_ReturnsMergedDict(self):
+    system_dict = {
+        'super_partition_groups': 'group_a',
+        'dynamic_partition_list': 'system',
+        'super_group_a_list': 'system',
+    }
+    other_dict = {
+        'super_partition_groups': 'group_a group_b',
+        'dynamic_partition_list': 'vendor product',
+        'super_group_a_list': 'vendor',
+        'super_group_a_size': '1000',
+        'super_group_b_list': 'product',
+        'super_group_b_size': '2000',
+    }
+    merged_dict = merge_dynamic_partition_info_dicts(
+        system_dict=system_dict,
+        other_dict=other_dict,
+        size_prefix='super_',
+        size_suffix='_size',
+        list_prefix='super_',
+        list_suffix='_list')
+    expected_merged_dict = {
+        'super_partition_groups': 'group_a group_b',
+        'dynamic_partition_list': 'system vendor product',
+        'super_group_a_list': 'system vendor',
+        'super_group_a_size': '1000',
+        'super_group_b_list': 'product',
+        'super_group_b_size': '2000',
+    }
+    self.assertEqual(merged_dict, expected_merged_dict)
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 466fde1..ee831e3 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -425,7 +425,6 @@
     }
 
     common.OPTIONS.search_path = test_utils.get_search_path()
-    self.assertIsNotNone(common.OPTIONS.search_path)
 
   def test_GetPackageMetadata_abOta_full(self):
     target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
@@ -582,6 +581,7 @@
         },
         metadata)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetTargetFilesZipForSecondaryImages(self):
     input_file = construct_target_files(secondary=True)
     target_file = GetTargetFilesZipForSecondaryImages(input_file)
@@ -600,6 +600,7 @@
     self.assertNotIn('IMAGES/system_other.img', namelist)
     self.assertNotIn('IMAGES/system.map', namelist)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self):
     input_file = construct_target_files(secondary=True)
     target_file = GetTargetFilesZipForSecondaryImages(
@@ -619,6 +620,7 @@
     self.assertNotIn('IMAGES/system.map', namelist)
     self.assertNotIn(POSTINSTALL_CONFIG, namelist)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetTargetFilesZipForSecondaryImages_withoutRadioImages(self):
     input_file = construct_target_files(secondary=True)
     common.ZipDelete(input_file, 'RADIO/bootloader.img')
@@ -639,12 +641,14 @@
     self.assertNotIn('RADIO/bootloader.img', namelist)
     self.assertNotIn('RADIO/modem.img', namelist)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetTargetFilesZipWithoutPostinstallConfig(self):
     input_file = construct_target_files()
     target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
     with zipfile.ZipFile(target_file) as verify_zip:
       self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self):
     input_file = construct_target_files()
     common.ZipDelete(input_file, POSTINSTALL_CONFIG)
@@ -675,20 +679,25 @@
     FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
     self.assertIn('ota-test-property-files', metadata)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata(self):
     self._test_FinalizeMetadata()
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata_withNoSigning(self):
     common.OPTIONS.no_signing = True
     self._test_FinalizeMetadata()
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata_largeEntry(self):
     self._test_FinalizeMetadata(large_entry=True)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata_largeEntry_withNoSigning(self):
     common.OPTIONS.no_signing = True
     self._test_FinalizeMetadata(large_entry=True)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata_insufficientSpace(self):
     entries = [
         'required-entry1',
@@ -766,6 +775,7 @@
           expected = entry.replace('.', '-').upper().encode()
         self.assertEqual(expected, input_fp.read(size))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Compute(self):
     entries = (
         'required-entry1',
@@ -805,6 +815,7 @@
     with zipfile.ZipFile(zip_file, 'r') as zip_fp:
       self.assertRaises(KeyError, property_files.Compute, zip_fp)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Finalize(self):
     entries = [
         'required-entry1',
@@ -825,6 +836,7 @@
     entries[2] = 'metadata'
     self._verify_entries(zip_file, tokens, entries)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Finalize_assertReservedLength(self):
     entries = (
         'required-entry1',
@@ -998,6 +1010,7 @@
         ),
         property_files.optional)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetPayloadMetadataOffsetAndSize(self):
     target_file = construct_target_files()
     payload = Payload()
@@ -1071,6 +1084,7 @@
 
     return zip_file
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Compute(self):
     zip_file = self.construct_zip_package_withValidPayload()
     property_files = AbOtaPropertyFiles()
@@ -1084,6 +1098,7 @@
     self._verify_entries(
         zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Finalize(self):
     zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
     property_files = AbOtaPropertyFiles()
@@ -1099,6 +1114,7 @@
     self._verify_entries(
         zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Verify(self):
     zip_file = self.construct_zip_package_withValidPayload(with_metadata=True)
     property_files = AbOtaPropertyFiles()
@@ -1204,6 +1220,7 @@
 
   def test_GetKeySizeInBytes_512Bytes(self):
     signing_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    # pylint: disable=protected-access
     key_size = PayloadSigner._GetKeySizeInBytes(signing_key)
     self.assertEqual(512, key_size)
 
@@ -1233,6 +1250,7 @@
     """Uses testdata/payload_signer.sh as the external payload signer."""
     common.OPTIONS.payload_signer = os.path.join(
         self.testdata_dir, 'payload_signer.sh')
+    os.chmod(common.OPTIONS.payload_signer, 0o700)
     common.OPTIONS.payload_signer_args = [
         os.path.join(self.testdata_dir, 'testkey.pk8')]
     payload_signer = PayloadSigner()
@@ -1272,14 +1290,17 @@
     payload.Generate(target_file, source_file)
     return payload
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Generate_full(self):
     payload = self._create_payload_full()
     self.assertTrue(os.path.exists(payload.payload_file))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Generate_incremental(self):
     payload = self._create_payload_incremental()
     self.assertTrue(os.path.exists(payload.payload_file))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Generate_additionalArgs(self):
     target_file = construct_target_files()
     source_file = construct_target_files()
@@ -1290,12 +1311,14 @@
         target_file, additional_args=["--source_image", source_file])
     self.assertTrue(os.path.exists(payload.payload_file))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Generate_invalidInput(self):
     target_file = construct_target_files()
     common.ZipDelete(target_file, 'IMAGES/vendor.img')
     payload = Payload()
     self.assertRaises(common.ExternalError, payload.Generate, target_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Sign_full(self):
     payload = self._create_payload_full()
     payload.Sign(PayloadSigner())
@@ -1309,6 +1332,7 @@
         os.path.join(self.testdata_dir, 'testkey.x509.pem'),
         output_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Sign_incremental(self):
     payload = self._create_payload_incremental()
     payload.Sign(PayloadSigner())
@@ -1322,6 +1346,7 @@
         os.path.join(self.testdata_dir, 'testkey.x509.pem'),
         output_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Sign_withDataWipe(self):
     common.OPTIONS.wipe_user_data = True
     payload = self._create_payload_full()
@@ -1330,6 +1355,7 @@
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("POWERWASH=1", properties_fp.read())
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Sign_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())
@@ -1337,6 +1363,7 @@
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_Sign_badSigner(self):
     """Tests that signing failure can be captured."""
     payload = self._create_payload_full()
@@ -1344,6 +1371,7 @@
     payload_signer.signer_args.append('bad-option')
     self.assertRaises(common.ExternalError, payload.Sign, payload_signer)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_WriteToZip(self):
     payload = self._create_payload_full()
     payload.Sign(PayloadSigner())
@@ -1365,6 +1393,7 @@
           continue
         self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_WriteToZip_unsignedPayload(self):
     """Unsigned payloads should not be allowed to be written to zip."""
     payload = self._create_payload_full()
@@ -1380,6 +1409,7 @@
     with zipfile.ZipFile(output_file, 'w') as output_zip:
       self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_WriteToZip_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 710fde5..43f3eba 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -34,8 +34,8 @@
 </policy>"""
 
   # pylint: disable=line-too-long
-  APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
-name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/target/product/security/testkey.x509.pem" container_private_key="build/target/product/security/testkey.pk8"
+  APEX_KEYS_TXT = """name="apex.apexd_test.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8"
+name="apex.apexd_test_different_app.apex" public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" container_certificate="build/make/target/product/security/testkey.x509.pem" container_private_key="build/make/target/product/security/testkey.pk8"
 """
 
   def setUp(self):
@@ -419,10 +419,10 @@
     self.assertEqual({
         'apex.apexd_test.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
         'apex.apexd_test_different_app.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
         }, keys_info)
 
   def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
@@ -431,8 +431,8 @@
         'name="apex.apexd_test_different_app2.apex" '
         'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
         'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/target/product/security/testkey2.pk8"')
+        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
+        'container_private_key="build/make/target/product/security/testkey2.pk8"')
     target_files = common.MakeTempFile(suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
@@ -445,8 +445,8 @@
     apex_keys = self.APEX_KEYS_TXT + (
         'name="apex.apexd_test_different_app2.apex" '
         'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'container_certificate="build/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/target/product/security/testkey.pk8"')
+        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
+        'container_private_key="build/make/target/product/security/testkey.pk8"')
     target_files = common.MakeTempFile(suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
@@ -457,10 +457,10 @@
     self.assertEqual({
         'apex.apexd_test.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
         'apex.apexd_test_different_app.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
         }, keys_info)
 
   def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
@@ -468,8 +468,8 @@
     apex_keys = self.APEX_KEYS_TXT + (
         'name="apex.apexd_test_different_app2.apex" '
         'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/target/product/security/testkey.pk8"')
+        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
+        'container_private_key="build/make/target/product/security/testkey.pk8"')
     target_files = common.MakeTempFile(suffix='.zip')
     with zipfile.ZipFile(target_files, 'w') as target_files_zip:
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
@@ -480,10 +480,33 @@
     self.assertEqual({
         'apex.apexd_test.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
         'apex.apexd_test_different_app.apex': (
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/target/product/security/testkey'),
+            'build/make/target/product/security/testkey'),
+        }, keys_info)
+
+  def test_ReadApexKeysInfo_presignedKeys(self):
+    apex_keys = self.APEX_KEYS_TXT + (
+        'name="apex.apexd_test_different_app2.apex" '
+        'private_key="PRESIGNED" '
+        'public_key="PRESIGNED" '
+        'container_certificate="PRESIGNED" '
+        'container_private_key="PRESIGNED"')
+    target_files = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
+
+    with zipfile.ZipFile(target_files) as target_files_zip:
+      keys_info = ReadApexKeysInfo(target_files_zip)
+
+    self.assertEqual({
+        'apex.apexd_test.apex': (
+            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
+            'build/make/target/product/security/testkey'),
+        'apex.apexd_test_different_app.apex': (
+            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
+            'build/make/target/product/security/testkey'),
         }, keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
old mode 100644
new mode 100755
index edb3d41..1e919f7
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 #
 # Copyright (C) 2018 The Android Open Source Project
 #
@@ -30,6 +31,18 @@
 # Some test runner doesn't like outputs from stderr.
 logging.basicConfig(stream=sys.stdout)
 
+# Use ANDROID_BUILD_TOP as an indicator to tell if the needed tools (e.g.
+# avbtool, mke2fs) are available while running the tests. Not having the var or
+# having empty string means we can't run the tests that require external tools.
+EXTERNAL_TOOLS_UNAVAILABLE = not os.environ.get("ANDROID_BUILD_TOP")
+
+
+def SkipIfExternalToolsUnavailable():
+  """Decorator function that allows skipping tests per tools availability."""
+  if EXTERNAL_TOOLS_UNAVAILABLE:
+    return unittest.skip('External tools unavailable')
+  return lambda func: func
+
 
 def get_testdata_dir():
   """Returns the testdata dir, in relative to the script dir."""
@@ -40,6 +53,19 @@
 
 def get_search_path():
   """Returns the search path that has 'framework/signapk.jar' under."""
+
+  def signapk_exists(path):
+    signapk_path = os.path.realpath(
+        os.path.join(path, 'framework', 'signapk.jar'))
+    return os.path.exists(signapk_path)
+
+  # Try with ANDROID_BUILD_TOP first.
+  full_path = os.path.realpath(os.path.join(
+      os.environ.get('ANDROID_BUILD_TOP', ''), 'out', 'host', 'linux-x86'))
+  if signapk_exists(full_path):
+    return full_path
+
+  # Otherwise try going with relative pathes.
   current_dir = os.path.dirname(os.path.realpath(__file__))
   for path in (
       # In relative to 'build/make/tools/releasetools' in the Android source.
@@ -47,9 +73,7 @@
       # Or running the script unpacked from otatools.zip.
       ['..']):
     full_path = os.path.realpath(os.path.join(current_dir, *path))
-    signapk_path = os.path.realpath(
-        os.path.join(full_path, 'framework', 'signapk.jar'))
-    if os.path.exists(signapk_path):
+    if signapk_exists(full_path):
       return full_path
   return None
 
@@ -123,3 +147,10 @@
 
   def tearDown(self):
     common.Cleanup()
+
+
+if __name__ == '__main__':
+  testsuite = unittest.TestLoader().discover(
+      os.path.dirname(os.path.realpath(__file__)))
+  # atest needs a verbosity level of >= 2 to correctly parse the result.
+  unittest.TextTestRunner(verbosity=2).run(testsuite)
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index 5f619ec..70e3b49 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -55,6 +55,7 @@
         0, proc.returncode,
         "Failed to sign boot image with boot_signer: {}".format(stdoutdata))
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ValidateVerifiedBootImages_bootImage(self):
     input_tmp = common.MakeTempDir()
     os.mkdir(os.path.join(input_tmp, 'IMAGES'))
@@ -69,6 +70,7 @@
     }
     ValidateVerifiedBootImages(input_tmp, info_dict, options)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ValidateVerifiedBootImages_bootImage_wrongKey(self):
     input_tmp = common.MakeTempDir()
     os.mkdir(os.path.join(input_tmp, 'IMAGES'))
@@ -85,6 +87,7 @@
         AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict,
         options)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ValidateVerifiedBootImages_bootImage_corrupted(self):
     input_tmp = common.MakeTempDir()
     os.mkdir(os.path.join(input_tmp, 'IMAGES'))
@@ -139,6 +142,7 @@
     # Append the verity metadata.
     verity_image_builder.Build(output_file)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ValidateVerifiedBootImages_systemImage(self):
     input_tmp = common.MakeTempDir()
     os.mkdir(os.path.join(input_tmp, 'IMAGES'))
@@ -162,6 +166,7 @@
     }
     ValidateVerifiedBootImages(input_tmp, info_dict, options)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_ValidateFileConsistency_incompleteRange(self):
     input_tmp = common.MakeTempDir()
     os.mkdir(os.path.join(input_tmp, 'IMAGES'))
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index e0607c8..1cc539f 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -24,7 +24,8 @@
 import common
 import sparse_img
 from rangelib import RangeSet
-from test_utils import get_testdata_dir, ReleaseToolsTestCase
+from test_utils import (
+    get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
 from verity_utils import (
     CreateHashtreeInfoGenerator, CreateVerityImageBuilder, HashtreeInfo,
     VerifiedBootVersion1HashtreeInfoGenerator)
@@ -89,6 +90,7 @@
 
     return output_file
 
+  @SkipIfExternalToolsUnavailable()
   def test_CreateHashtreeInfoGenerator(self):
     image_file = sparse_img.SparseImage(self._generate_image())
 
@@ -99,6 +101,7 @@
     self.assertEqual(self.partition_size, generator.partition_size)
     self.assertTrue(generator.fec_supported)
 
+  @SkipIfExternalToolsUnavailable()
   def test_DecomposeSparseImage(self):
     image_file = sparse_img.SparseImage(self._generate_image())
 
@@ -109,6 +112,7 @@
     self.assertEqual(12288, generator.hashtree_size)
     self.assertEqual(32768, generator.metadata_size)
 
+  @SkipIfExternalToolsUnavailable()
   def test_ParseHashtreeMetadata(self):
     image_file = sparse_img.SparseImage(self._generate_image())
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
@@ -123,6 +127,7 @@
     self.assertEqual(self.fixed_salt, generator.hashtree_info.salt)
     self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash)
 
+  @SkipIfExternalToolsUnavailable()
   def test_ValidateHashtree_smoke(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
@@ -138,6 +143,7 @@
 
     self.assertTrue(generator.ValidateHashtree())
 
+  @SkipIfExternalToolsUnavailable()
   def test_ValidateHashtree_failure(self):
     generator = VerifiedBootVersion1HashtreeInfoGenerator(
         self.partition_size, 4096, True)
@@ -153,6 +159,7 @@
 
     self.assertFalse(generator.ValidateHashtree())
 
+  @SkipIfExternalToolsUnavailable()
   def test_Generate(self):
     image_file = sparse_img.SparseImage(self._generate_image())
     generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
@@ -193,6 +200,7 @@
     del prop_dict['verity_block_device']
     self.assertIsNone(CreateVerityImageBuilder(prop_dict))
 
+  @SkipIfExternalToolsUnavailable()
   def test_CalculateMaxImageSize(self):
     verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
     size = verity_image_builder.CalculateMaxImageSize()
@@ -221,11 +229,13 @@
     cmd = ['verity_verifier', image, '-mincrypt', verify_key]
     common.RunAndCheckOutput(cmd)
 
+  @SkipIfExternalToolsUnavailable()
   def test_Build(self):
     self._BuildAndVerify(
         self.DEFAULT_PROP_DICT,
         os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
 
+  @SkipIfExternalToolsUnavailable()
   def test_Build_SanityCheck(self):
     # A sanity check for the test itself: the image shouldn't be verifiable
     # with wrong key.
@@ -235,6 +245,7 @@
         self.DEFAULT_PROP_DICT,
         os.path.join(get_testdata_dir(), 'verity_mincrypt'))
 
+  @SkipIfExternalToolsUnavailable()
   def test_Build_FecDisabled(self):
     prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
     del prop_dict['verity_fec']
@@ -242,6 +253,7 @@
         prop_dict,
         os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
 
+  @SkipIfExternalToolsUnavailable()
   def test_Build_SquashFs(self):
     verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
     verity_image_builder.CalculateMaxImageSize()
@@ -282,6 +294,7 @@
     verity_image_builder = CreateVerityImageBuilder(prop_dict)
     self.assertIsNone(verity_image_builder)
 
+  @SkipIfExternalToolsUnavailable()
   def test_Build(self):
     prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
     verity_image_builder = CreateVerityImageBuilder(prop_dict)
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 3a58755..3063800 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -52,7 +52,7 @@
 
 
 def GetVerityMetadataSize(image_size):
-  cmd = ["build_verity_metadata.py", "size", str(image_size)]
+  cmd = ["build_verity_metadata", "size", str(image_size)]
   output = common.RunAndCheckOutput(cmd, verbose=False)
   return int(output)
 
@@ -97,7 +97,7 @@
 def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                         block_device, signer_path, key, signer_args,
                         verity_disable):
-  cmd = ["build_verity_metadata.py", "build", str(image_size),
+  cmd = ["build_verity_metadata", "build", str(image_size),
          verity_metadata_path, root_hash, salt, block_device, signer_path, key]
   if signer_args:
     cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))