Merge "Dexpreopt: prepare to merge class loader context from all deps."
diff --git a/core/Makefile b/core/Makefile
index 72929bf..a6f6dd2 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -694,8 +694,8 @@
 	@rm -f $@
 	echo "# Modules using -Wno-error" >> $@
 	for m in $(sort $(SOONG_MODULES_USING_WNO_ERROR) $(MODULES_USING_WNO_ERROR)); do echo $$m >> $@; done
-	echo "# Modules added default -Wall" >> $@
-	for m in $(sort $(SOONG_MODULES_ADDED_WALL) $(MODULES_ADDED_WALL)); do echo $$m >> $@; done
+	echo "# Modules that allow warnings" >> $@
+	for m in $(sort $(SOONG_MODULES_WARNINGS_ALLOWED) $(MODULES_WARNINGS_ALLOWED)); do echo $$m >> $@; done
 
 $(call declare-0p-target,$(WALL_WERROR))
 
@@ -842,10 +842,6 @@
 $(call declare-0p-target,$(INSTALLED_FILES_FILE_ROOT))
 $(call declare-0p-target,$(INSTALLED_FILES_JSON_ROOT))
 
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
-endif
-
 #------------------------------------------------------------------
 # dtb
 ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
@@ -877,9 +873,6 @@
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RAMDISK)))
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_RAMDISK)))
 
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
-endif
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
 
 ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
@@ -1831,6 +1824,7 @@
 define add-common-ro-flags-to-image-props
 $(eval _var := $(call to-upper,$(1)))
 $(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR),$(hide) echo "$(1)_erofs_compressor=$(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS),$(hide) echo "$(1)_erofs_compress_hints=$(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE),$(hide) echo "$(1)_erofs_pcluster_size=$(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
@@ -1916,6 +1910,7 @@
 $(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
 $(if $(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG),$(hide) echo "f2fs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG)" >> $(1))
 $(if $(BOARD_EROFS_COMPRESSOR),$(hide) echo "erofs_default_compressor=$(BOARD_EROFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_EROFS_COMPRESS_HINTS),$(hide) echo "erofs_default_compress_hints=$(BOARD_EROFS_COMPRESS_HINTS)" >> $(1))
 $(if $(BOARD_EROFS_PCLUSTER_SIZE),$(hide) echo "erofs_pcluster_size=$(BOARD_EROFS_PCLUSTER_SIZE)" >> $(1))
 $(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
@@ -3108,10 +3103,6 @@
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
 
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE))
-endif
-
 systemimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,systemimage)
 BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
@@ -3515,7 +3506,7 @@
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
 $(if $(strip $(ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC)),,\
     $(if $(strip $(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
-        $(eval ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC:=$(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
+        $(call declare-copy-target-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_PREBUILT_VENDORIMAGE)),\
         $(call declare-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)))
 endif
 
@@ -5374,7 +5365,7 @@
 tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
-updaer_dep :=
+updater_dep :=
 ifeq ($(AB_OTA_UPDATER),true)
 updater_dep += system/update_engine/update_engine.conf
 $(call declare-1p-target,system/update_engine/update_engine.conf,system/update_engine)
@@ -5974,6 +5965,8 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 	$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
+	$(hide) mkdir -p $(zip_root)/PVMFW
+	$(hide) cp $(PREBUILT_PVMFWIMAGE_TARGET) $(zip_root)/PVMFW/
 endif
 ifdef BOARD_PREBUILT_BOOTLOADER
 	$(hide) mkdir -p $(zip_root)/IMAGES
@@ -6798,8 +6791,6 @@
 # if we don't have a real list, then use "everything"
 ifeq ($(strip $(ATREE_FILES)),)
 ATREE_FILES := \
-	$(ALL_DEFAULT_INSTALLED_MODULES) \
-	$(INSTALLED_RAMDISK_TARGET) \
 	$(ALL_DOCS) \
 	$(ALL_SDK_FILES)
 endif
@@ -6828,18 +6819,7 @@
 deps := \
 	$(OUT_DOCS)/offline-sdk-timestamp \
 	$(SDK_METADATA_FILES) \
-	$(SYMBOLS_ZIP) \
-	$(COVERAGE_ZIP) \
-	$(APPCOMPAT_ZIP) \
-	$(INSTALLED_SYSTEMIMAGE_TARGET) \
-	$(INSTALLED_QEMU_SYSTEMIMAGE) \
-	$(INSTALLED_QEMU_RAMDISKIMAGE) \
-	$(INSTALLED_QEMU_VENDORIMAGE) \
-	$(QEMU_VERIFIED_BOOT_PARAMS) \
-	$(INSTALLED_USERDATAIMAGE_TARGET) \
-	$(INSTALLED_RAMDISK_TARGET) \
-	$(INSTALLED_SDK_BUILD_PROP_TARGET) \
-	$(INSTALLED_BUILD_PROP_TARGET) \
+  $(INSTALLED_SDK_BUILD_PROP_TARGET) \
 	$(ATREE_FILES) \
 	$(sdk_atree_files) \
 	$(HOST_OUT_EXECUTABLES)/atree \
diff --git a/core/OWNERS b/core/OWNERS
index dae34ff..980186c 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,4 +1,4 @@
-per-file dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
+per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
 per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
 
 # For version updates
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 7ea9b52..355a22e 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -20,7 +20,11 @@
 # Users can define base-rules-hook in their buildspec.mk to perform
 # arbitrary operations as each module is included.
 ifdef base-rules-hook
-$(if $(base-rules-hook),)
+  ifndef _has_warned_about_base_rules_hook
+    $(warning base-rules-hook is deprecated, please remove usages of it and/or convert to Soong.)
+    _has_warned_about_base_rules_hook := true
+  endif
+  $(if $(base-rules-hook),)
 endif
 
 ###########################################################
diff --git a/core/binary.mk b/core/binary.mk
index 665270e..3f32fa9 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1506,7 +1506,7 @@
         ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
           my_cflags := -Wall -Werror $(my_cflags)
         else
-          $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+          $(eval MODULES_WARNINGS_ALLOWED := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
           my_cflags := -Wall $(my_cflags)
         endif
       endif
diff --git a/core/board_config.mk b/core/board_config.mk
index dc50a68..8074225 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -234,10 +234,7 @@
   .KATI_READONLY := TARGET_DEVICE_DIR
 endif
 
-# TODO(colefaust) change this if to RBC_PRODUCT_CONFIG when
-# the board configuration is known to work on everything
-# the product config works on.
-ifndef RBC_BOARD_CONFIG
+ifndef RBC_PRODUCT_CONFIG
 include $(board_config_mk)
 else
   $(shell mkdir -p $(OUT_DIR)/rbc)
diff --git a/core/config.mk b/core/config.mk
index e9dedfd..247103d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -226,8 +226,6 @@
 BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
 
 BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
-BUILD_HOST_DALVIK_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
 
 include $(BUILD_SYSTEM)/deprecation.mk
 
@@ -861,6 +859,7 @@
     30.0 \
     31.0 \
     32.0 \
+    33.0 \
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
diff --git a/core/definitions.mk b/core/definitions.mk
index a3f12e7..c5423e7 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -41,6 +41,9 @@
 ALL_NON_MODULES:=
 NON_MODULES_WITHOUT_LICENSE_METADATA:=
 
+# List of copied targets that need license metadata copied.
+ALL_COPIED_TARGETS:=
+
 # Full paths to targets that should be added to the "make droid"
 # set of installed targets.
 ALL_DEFAULT_INSTALLED_MODULES:=
@@ -583,6 +586,19 @@
 endef
 
 ###########################################################
+## Record a target $(1) copied from another target(s) $(2) that will need
+## license metadata.
+###########################################################
+define declare-copy-target-license-metadata
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir))\
+  $(eval _tgt:=$(strip $(1)))\
+  $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
+  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+  $(eval ALL_COPIED_TARGETS += $(_tgt)),\
+  $(eval ALL_TARGETS.$(1).META_LIC:=$(module_license_metadata))))
+endef
+
+###########################################################
 ## License metadata build rule for my_register_name $(1)
 ###########################################################
 define license-metadata-rule
@@ -661,13 +677,6 @@
 $(strip $(eval _notices := $(sort $(ALL_NON_MODULES.$(_tgt).NOTICES))))
 $(strip $(eval _path := $(sort $(ALL_NON_MODULES.$(_tgt).PATH))))
 $(strip $(eval _install_map := $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS)))
-$(strip $(eval \
-  $$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
-    $$(if $$(strip $$(ALL_TARGETS.$$(d).META_LIC)), \
-      , \
-      $$(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $$(d))) \
-  )) \
-)
 
 $(_meta): PRIVATE_KINDS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_KINDS))
 $(_meta): PRIVATE_CONDITIONS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS))
@@ -705,6 +714,60 @@
 endef
 
 ###########################################################
+## Record missing dependencies for non-module target $(1)
+###########################################################
+define record-missing-non-module-dependencies
+$(strip $(eval _tgt := $(strip $(1))))
+$(strip $(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
+  $(if $(strip $(ALL_TARGETS.$(d).META_LIC)), \
+    , \
+    $(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $(d))) \
+))
+endef
+
+###########################################################
+## License metadata build rule for copied target $(1)
+###########################################################
+define copied-target-license-metadata-rule
+$(if $(strip $(ALL_TARGETS.$(1).META_LIC)),,$(call _copied-target-license-metadata-rule,$(1)))
+endef
+
+define _copied-target-license-metadata-rule
+$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(1).meta_lic))))
+$(strip $(eval ALL_TARGETS.$(1).META_LIC:=$(_meta)))
+$(strip $(eval _dep:=))
+$(strip $(foreach s,$(ALL_COPIED_TARGETS.$(1).SOURCES),\
+  $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+  $(if $(filter 0p,$(_dmeta)),\
+    $(if $(filter-out 0p,$(_dep)),,$(eval ALL_TARGETS.$(1).META_LIC:=0p)),\
+    $(if $(_dep),\
+      $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),
+      $(eval _dep:=$(_dmeta))))))
+$(strip $(if $(strip $(_dep)),,$(error cannot copy target from unknown module: $(1) from $(ALL_COPIED_TARGETS.$(1).SOURCES))))
+
+ifneq (0p,$(ALL_TARGETS.$(1).META_LIC))
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(ALL_COPIED_TARGETS.$(1).SOURCES)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,copynotice)/$(_meta)/arguments
+$(_meta) : $(_dep)
+	rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+	    $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
+	OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
+	  -o $$@
+
+endif
+endef
+
+###########################################################
 ## Declare the license metadata for non-module target $(1).
 ##
 ## $(2) -- license kinds e.g. SPDX-license-identifier-Apache-2.0
@@ -919,6 +982,8 @@
   ) \
   $(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
   $(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
+  $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
+  $(foreach t,$(sort $(ALL_NON_MODULES)),$(call record-missing-non-module-dependencies,$(t))) \
   $(eval $(call report-missing-licenses-rule)) \
   $(eval $(call report-all-notice-library-names-rule)) \
   $(eval $(call build-all-license-metadata-rule)))
@@ -2609,7 +2674,7 @@
 @mkdir -p $(dir $@)tmp
 $(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
 $(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
-$(hide) $(D8_WRAPPER) $(DX_COMMAND) $(D8_FLAGS) \
+$(hide) $(D8_WRAPPER) $(D8_COMMAND) \
     --output $(dir $@)tmp \
     $(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
     --min-api $(PRIVATE_MIN_SDK_VERSION) \
@@ -3213,7 +3278,7 @@
 define transform-jar-to-dex-r8
 @echo R8: $@
 $(hide) rm -f $(PRIVATE_PROGUARD_DICTIONARY)
-$(hide) $(R8_WRAPPER) $(R8_COMPAT_PROGUARD) $(R8_FLAGS) \
+$(hide) $(R8_WRAPPER) $(R8_COMMAND) \
     -injars '$<' \
     --min-api $(PRIVATE_MIN_SDK_VERSION) \
     --no-data-resources \
@@ -3360,8 +3425,6 @@
   STATIC_TEST_LIBRARY \
   HOST_STATIC_TEST_LIBRARY \
   NOTICE_FILE \
-  HOST_DALVIK_JAVA_LIBRARY \
-  HOST_DALVIK_STATIC_JAVA_LIBRARY \
   base_rules \
   HEADER_LIBRARY \
   HOST_TEST_CONFIG \
@@ -3404,11 +3467,11 @@
 define create-suite-dependencies
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+    $$(foreach f,$$(my_compat_dist_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
     $$(eval my_test_data += $$(foreach f,$$(my_compat_dist_$(suite)), $$(call word-colon,2,$$(f)))) \
   )) \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
     $$(eval my_test_config += $$(foreach f,$$(my_compat_dist_config_$(suite)), $$(call word-colon,2,$$(f)))) \
   )) \
   $(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
diff --git a/core/deprecation.mk b/core/deprecation.mk
index 2b7a869..ed4215e 100644
--- a/core/deprecation.mk
+++ b/core/deprecation.mk
@@ -3,8 +3,6 @@
   BUILD_EXECUTABLE \
   BUILD_FUZZ_TEST \
   BUILD_HEADER_LIBRARY \
-  BUILD_HOST_DALVIK_JAVA_LIBRARY \
-  BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
   BUILD_HOST_JAVA_LIBRARY \
   BUILD_HOST_PREBUILT \
   BUILD_JAVA_LIBRARY \
@@ -39,6 +37,8 @@
 OBSOLETE_BUILD_MODULE_TYPES :=$= \
   BUILD_AUX_EXECUTABLE \
   BUILD_AUX_STATIC_LIBRARY \
+  BUILD_HOST_DALVIK_JAVA_LIBRARY \
+  BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
   BUILD_HOST_FUZZ_TEST \
   BUILD_HOST_NATIVE_TEST \
   BUILD_HOST_SHARED_TEST_LIBRARY \
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
deleted file mode 100644
index 5eeb8ac..0000000
--- a/core/host_dalvik_java_library.mk
+++ /dev/null
@@ -1,191 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik java library. These libraries
-# are meant to be used by a dalvik VM instance running on the host.
-# They will be compiled against libcore and not the host JRE.
-#
-
-ifeq ($(HOST_OS),linux)
-USE_CORE_LIB_BOOTCLASSPATH := true
-
-#######################################
-include $(BUILD_SYSTEM)/host_java_library_common.mk
-#######################################
-
-full_classes_turbine_jar := $(intermediates.COMMON)/classes-turbine.jar
-full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
-full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
-full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
-full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
-full_classes_jar := $(intermediates.COMMON)/classes.jar
-built_dex := $(intermediates.COMMON)/classes.dex
-java_source_list_file := $(intermediates.COMMON)/java-source-list
-
-LOCAL_INTERMEDIATE_TARGETS += \
-    $(full_classes_turbine_jar) \
-    $(full_classes_compiled_jar) \
-    $(full_classes_combined_jar) \
-    $(full_classes_jarjar_jar) \
-    $(full_classes_jar) \
-    $(built_dex) \
-    $(java_source_list_file)
-
-# See comment in java.mk
-ifndef LOCAL_CHECKED_MODULE
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
-else
-LOCAL_CHECKED_MODULE := $(built_dex)
-endif
-endif
-
-#######################################
-include $(BUILD_SYSTEM)/base_rules.mk
-#######################################
-java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) \
-                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
-all_java_sources := $(java_sources)
-
-include $(BUILD_SYSTEM)/java_common.mk
-
-include $(BUILD_SYSTEM)/sdk_check.mk
-
-$(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
-
-# List of dependencies for anything that needs all java sources in place
-java_sources_deps := \
-    $(java_sources) \
-    $(java_resource_sources) \
-    $(LOCAL_SRCJARS) \
-    $(LOCAL_ADDITIONAL_DEPENDENCIES)
-
-$(java_source_list_file): $(java_sources_deps)
-	$(write-java-source-list)
-
-# TODO(b/143658984): goma can't handle the --system argument to javac.
-#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
-$(full_classes_compiled_jar): \
-    $(java_source_list_file) \
-    $(java_sources_deps) \
-    $(full_java_header_libs) \
-    $(full_java_bootclasspath_libs) \
-    $(full_java_system_modules_deps) \
-    $(annotation_processor_deps) \
-    $(NORMALIZE_PATH) \
-    $(JAR_ARGS) \
-    $(ZIPSYNC) \
-    $(SOONG_ZIP) \
-    | $(SOONG_JAVAC_WRAPPER)
-	$(transform-host-java-to-dalvik-package)
-
-ifneq ($(TURBINE_ENABLED),false)
-
-$(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_turbine_jar): \
-    $(java_source_list_file) \
-    $(java_sources_deps) \
-    $(full_java_header_libs) \
-    $(full_java_bootclasspath_libs) \
-    $(NORMALIZE_PATH) \
-    $(JAR_ARGS) \
-    $(ZIPTIME) \
-    | $(TURBINE) \
-    $(MERGE_ZIPS)
-	$(transform-java-to-header.jar)
-
-.KATI_RESTAT: $(full_classes_turbine_jar)
-
-# Run jarjar before generate classes-header.jar if necessary.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	$(call transform-jarjar)
-else
-full_classes_header_jarjar := $(full_classes_turbine_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_header_jarjar),$(full_classes_header_jar)))
-
-endif # TURBINE_ENABLED != false
-
-$(full_classes_combined_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
-$(full_classes_combined_jar): $(full_classes_compiled_jar) \
-                              $(jar_manifest_file) \
-                              $(full_static_java_libs)  | $(MERGE_ZIPS)
-	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
-            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
-            $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
-            $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
-
-# Run jarjar if necessary, otherwise just copy the file.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	$(call transform-jarjar)
-else
-full_classes_jarjar_jar := $(full_classes_combined_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
-
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-# No dex; all we want are the .class files with resources.
-$(LOCAL_BUILT_MODULE) : $(java_resource_sources)
-$(LOCAL_BUILT_MODULE) : $(full_classes_jar)
-	@echo "host Static Jar: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
-
-else # !LOCAL_IS_STATIC_JAVA_LIBRARY
-$(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
-$(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_jar) $(DX) $(ZIP2ZIP)
-	$(transform-classes.jar-to-dex)
-
-$(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(LOCAL_BUILT_MODULE): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
-$(LOCAL_BUILT_MODULE): $(built_dex) $(java_resource_sources)
-	@echo "Host Jar: $(PRIVATE_MODULE) ($@)"
-	rm -rf $@.parts
-	mkdir -p $@.parts
-	$(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
-	$(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
-	$(MERGE_ZIPS) -j $@ $@.parts/dex.zip $@.parts/res.zip
-	rm -rf $@.parts
-
-endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(call module-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(call codename-or-sdk-to-sdk,$(call module-min-sdk-version))
-
-USE_CORE_LIB_BOOTCLASSPATH :=
-
-endif
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
deleted file mode 100644
index 78faf73..0000000
--- a/core/host_dalvik_static_java_library.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_STATIC_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik static java library.
-# These libraries will be compiled against libcore and not the host
-# JRE.
-#
-LOCAL_UNINSTALLABLE_MODULE := true
-LOCAL_IS_STATIC_JAVA_LIBRARY := true
-
-include $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-
-LOCAL_IS_STATIC_JAVA_LIBRARY :=
diff --git a/core/java.mk b/core/java.mk
index a29f820..01951c0 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -494,13 +494,13 @@
   $(built_dex_intermediate): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
   $(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
   $(built_dex_intermediate): PRIVATE_PROGUARD_DICTIONARY := $(proguard_dictionary)
-  $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD) $(LOCAL_PROGUARD_FLAGS_DEPS)
+  $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8) $(LOCAL_PROGUARD_FLAGS_DEPS)
 	$(transform-jar-to-dex-r8)
 else # !LOCAL_PROGUARD_ENABLED
   $(built_dex_intermediate): .KATI_NINJA_POOL := $(D8_NINJA_POOL)
   $(built_dex_intermediate): PRIVATE_D8_LIBS := $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
   $(built_dex_intermediate): $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
-  $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(DX) $(ZIP2ZIP)
+  $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(D8) $(ZIP2ZIP)
 	$(transform-classes.jar-to-dex)
 endif
 
diff --git a/core/main.mk b/core/main.mk
index e0efdad..2cfea45 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -931,11 +931,11 @@
     $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
     $(foreach dep,$(my_deps),\
       $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
-        $(if $(filter $(suite),device-tests general-tests),\
+        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
-        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(eval ALL_TARGETS.$(target).META_LIC:=$(module_license_metadata)))\
+        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
           $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
@@ -1936,10 +1936,6 @@
 sdk: $(ALL_SDK_TARGETS)
 $(call dist-for-goals,sdk, \
     $(ALL_SDK_TARGETS) \
-    $(SYMBOLS_ZIP) \
-    $(SYMBOLS_MAPPING) \
-    $(COVERAGE_ZIP) \
-    $(APPCOMPAT_ZIP) \
     $(INSTALLED_BUILD_PROP_TARGET) \
 )
 endif
diff --git a/core/notice_files.mk b/core/notice_files.mk
index c05d4ea..84523af 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -127,7 +127,8 @@
 ifdef my_register_name
   module_license_metadata := $(call local-intermediates-dir)/$(my_register_name).meta_lic
 
-  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(my_test_data) $(my_test_config),\
+  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))) \
+      $(my_test_data) $(my_test_config),\
     $(eval ALL_TARGETS.$(target).META_LIC := $(module_license_metadata)))
 
   ALL_MODULES.$(my_register_name).META_LIC := $(strip $(ALL_MODULES.$(my_register_name).META_LIC) $(module_license_metadata))
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 379110e..4a44837 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -25,7 +25,7 @@
 		$(if $(filter $(p),$(_all_products_visited)),, \
 			$(p) \
 			$(eval _all_products_visited += $(p)) \
-			$(call all-products-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
+			$(call gather-all-makefiles-for-current-product-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
 		) \
 	)
 endef
diff --git a/core/product_config.mk b/core/product_config.mk
index 1e74fa9..540289a 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -210,7 +210,6 @@
 # Dedup, extract product names, etc.
 product_paths := $(sort $(product_paths))
 all_named_products := $(sort $(call _first,$(product_paths),:))
-all_product_makefiles := $(sort $(call _second,$(product_paths),:))
 current_product_makefile := $(call _second,$(filter $(TARGET_PRODUCT):%,$(product_paths)),:)
 COMMON_LUNCH_CHOICES := $(sort $(common_lunch_choices))
 
@@ -230,7 +229,6 @@
 
 ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
   RBC_PRODUCT_CONFIG := true
-  RBC_BOARD_CONFIG := true
 endif
 
 ifndef RBC_PRODUCT_CONFIG
@@ -274,8 +272,6 @@
 ############################################################################
 
 current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
 
 #############################################################################
 # Quick check and assign default values
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 7ee3dc7..7a5e501 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -536,8 +536,11 @@
     """If from file exists, returns [from:to] pair."""
     value = path_pair.split(":", 2)
 
+    if value[0].find('*') != -1:
+        fail("copy_if_exists: input file cannot contain *")
+
     # Check that l[0] exists
-    return [":".join(value)] if rblf_file_exists(value[0]) else []
+    return [":".join(value)] if rblf_wildcard(value[0]) else []
 
 def _enforce_product_packages_exist(handle, pkg_string_or_list=[]):
     """Makes including non-existent modules in PRODUCT_PACKAGES an error."""
@@ -552,10 +555,6 @@
     _setdefault(handle, "PRODUCT_DEX_PREOPT_MODULE_CONFIGS")
     handle.cfg["PRODUCT_DEX_PREOPT_MODULE_CONFIGS"] += [m + "=" + config for m in modules]
 
-def _file_wildcard_exists(file_pattern):
-    """Return True if there are files matching given bash pattern."""
-    return len(rblf_wildcard(file_pattern)) > 0
-
 def _find_and_copy(pattern, from_dir, to_dir):
     """Return a copy list for the files matching the pattern."""
     return sorted([("%s/%s:%s/%s" % (from_dir, f, to_dir, f))
@@ -605,6 +604,27 @@
                 break
     return res
 
+def _first_word(input):
+    """Equivalent to the GNU make function $(firstword)."""
+    input = __words(input)
+    if len(input) == 0:
+        return ""
+    return input[0]
+
+def _last_word(input):
+    """Equivalent to the GNU make function $(lastword)."""
+    input = __words(input)
+    l = len(input)
+    if l == 0:
+        return ""
+    return input[l-1]
+
+def _flatten_2d_list(list):
+    result = []
+    for x in list:
+        result += x
+    return result
+
 def _dir(paths):
     """Equivalent to the GNU make function $(dir).
 
@@ -771,7 +791,7 @@
     if t == "list":
         s = " ".join(s)
     elif t != "string":
-        fail("Argument to mkstrip must be a string or list.")
+        fail("Argument to mkstrip must be a string or list, got: "+t)
     result = ""
     was_space = False
     for ch in s.strip().elems():
@@ -859,12 +879,13 @@
     dir = _dir,
     enforce_product_packages_exist = _enforce_product_packages_exist,
     expand_wildcard = _expand_wildcard,
-    file_exists = rblf_file_exists,
-    file_wildcard_exists = _file_wildcard_exists,
     filter = _filter,
     filter_out = _filter_out,
     find_and_copy = _find_and_copy,
     findstring = _findstring,
+    first_word = _first_word,
+    last_word = _last_word,
+    flatten_2d_list = _flatten_2d_list,
     inherit = _inherit,
     indirect = _indirect,
     mk2rbc_error = _mk2rbc_error,
diff --git a/core/proguard.flags b/core/proguard.flags
index 185275e..aee5271 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -15,6 +15,12 @@
 @**.VisibleForTesting *;
 }
 
+# Keep rule for members that are needed solely to keep alive downstream weak
+# references, and could otherwise be removed after tree shaking optimizations.
+-keepclassmembers,allowaccessmodification,allowobfuscation,allowshrinking class * {
+  @com.android.internal.annotations.KeepForWeakReference <fields>;
+}
+
 # Understand the common @Keep annotation from various Android packages:
 #  * android.support.annotation
 #  * androidx.annotation
diff --git a/core/rbe.mk b/core/rbe.mk
index 370d4bd..90328d3 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -87,11 +87,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   rbe_dir :=
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index 07e577a..05b4b6b 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -50,6 +50,28 @@
 # to avoid checkbuilds making an extra copy of every module.
 LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
 
+my_check_same_vndk_variants :=
+same_vndk_variants_stamp :=
+ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
+  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
+    ifneq ($(CLANG_COVERAGE),true)
+      # Do not compare VNDK variant for special cases e.g. coverage builds.
+      ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
+        my_check_same_vndk_variants := true
+        same_vndk_variants_stamp := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/same_vndk_variants.timestamp
+      endif
+    endif
+  endif
+endif
+
+ifeq ($(my_check_same_vndk_variants),true)
+  # Add the timestamp to the CHECKED list so that `checkbuild` can run it.
+  # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
+  # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
+  # isn't used at all and it may break in the downstream trees.
+  LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+endif
+
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
@@ -125,21 +147,7 @@
   endif
 endif
 
-my_check_same_vndk_variants :=
-ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
-  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
-    ifneq ($(CLANG_COVERAGE),true)
-        # Do not compare VNDK variant for special cases e.g. coverage builds.
-        ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
-            my_check_same_vndk_variants := true
-        endif
-    endif
-  endif
-endif
-
 ifeq ($(my_check_same_vndk_variants),true)
-  same_vndk_variants_stamp := $(intermediates)/same_vndk_variants.timestamp
-
   my_core_register_name := $(subst .vendor,,$(subst .product,,$(my_register_name)))
   my_core_variant_files := $(call module-target-built-files,$(my_core_register_name))
   my_core_shared_lib := $(sort $(filter %.so,$(my_core_variant_files)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index d03b687..c84676b 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -170,6 +170,8 @@
 $(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
 $(call add_json_bool, HostFakeSnapshotEnabled,           $(HOST_FAKE_SNAPSHOT_ENABLE))
 
+$(call add_json_bool, MultitreeUpdateMeta,               $(filter true,$(TARGET_MULTITREE_UPDATE_META)))
+
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
 $(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
 
diff --git a/core/tasks/README.dex_preopt_check.md b/core/tasks/README.dex_preopt_check.md
new file mode 100644
index 0000000..b0baa9e
--- /dev/null
+++ b/core/tasks/README.dex_preopt_check.md
@@ -0,0 +1,43 @@
+# `dex_preopt_check`
+
+`dex_preopt_check` is a build-time check to make sure that all system server
+jars are dexpreopted. When the check fails, you will see the following error
+message:
+
+```
+FAILED:
+build/make/core/tasks/dex_preopt_check.mk:13: warning:  Missing compilation artifacts. Dexpreopting is not working for some system server jars
+Offending entries:
+```
+
+Possible causes are:
+
+1.  There is an APEX/SDK mismatch. (E.g., the APEX is built from source while
+    the SDK is built from prebuilt.)
+
+1.  The `systemserverclasspath_fragment` is not added as
+    `systemserverclasspath_fragments` of the corresponding `apex` module, or not
+    added as `exported_systemserverclasspath_fragments` of the corresponding
+    `prebuilt_apex`/`apex_set` module when building from prebuilt.
+
+1.  The expected version of the system server java library is not preferred.
+    (E.g., the `java_import` module has `prefer: false` when building from
+    prebuilt.)
+
+1.  Dexpreopting is disabled for the system server java library. This can be due
+    to various reasons including but not limited to:
+
+    - The java library has `dex_preopt: { enabled: false }` in the Android.bp
+      file.
+
+    - The java library is listed in `DEXPREOPT_DISABLED_MODULES` in a Makefile.
+
+    - The java library is missing `installable: true` in the Android.bp
+      file when building from source.
+
+    - Sanitizer is enabled.
+
+1.  `PRODUCT_SYSTEM_SERVER_JARS`, `PRODUCT_APEX_SYSTEM_SERVER_JARS`,
+    `PRODUCT_STANDALONE_SYSTEM_SERVER_JARS`, or
+    `PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS` has an extra entry that is not
+    needed by the product.
diff --git a/core/tasks/dex_preopt_check.mk b/core/tasks/dex_preopt_check.mk
index bfa1ec5..5fd60c8 100644
--- a/core/tasks/dex_preopt_check.mk
+++ b/core/tasks/dex_preopt_check.mk
@@ -12,7 +12,8 @@
   ifneq (,$(filter services,$(PRODUCT_PACKAGES)))
     $(call maybe-print-list-and-error,\
       $(filter-out $(ALL_DEFAULT_INSTALLED_MODULES),$(DEXPREOPT_SYSTEMSERVER_ARTIFACTS)),\
-      Missing compilation artifacts. Dexpreopting is not working for some system server jars \
+      Missing compilation artifacts. Dexpreopting is not working for some system server jars. See \
+      https://cs.android.com/android/platform/superproject/+/master:build/make/core/tasks/README.dex_preopt_check.md \
     )
   endif
 endif
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index 4453c29..ed2f2a6 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -39,7 +39,7 @@
 	  echo $$shared_lib >> $@-host-libs.list; \
 	done
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -L 0 -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
 	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
 	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
diff --git a/core/tasks/multitree.mk b/core/tasks/multitree.mk
new file mode 100644
index 0000000..225477e
--- /dev/null
+++ b/core/tasks/multitree.mk
@@ -0,0 +1,16 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.PHONY: update-meta
+update-meta: $(SOONG_MULTITREE_METADATA)
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index f1159b3..06161f0 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,6 +29,6 @@
       $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
                                $(patsubst system/%,DATA/%,\
                                    $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
-      $(eval ALL_TARGETS.$(2)/$(my_copy_dest).META_LIC := $(if $(strip $(ALL_MODULES.$(m).META_LIC)),$(ALL_MODULES.$(m).META_LIC),$(ALL_MODULES.$(m).DELAYED_META_LIC)))\
+      $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\
       $(bui):$(2)/$(my_copy_dest))))
 endef
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index d129aa4..af7d1c0 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -104,7 +104,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2022-04-05
+    PLATFORM_SECURITY_PATCH := 2022-05-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/core/version_util.mk b/core/version_util.mk
index 3a0d4b5..cbfef96 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -56,36 +56,34 @@
 # unreleased API level targetable by this branch, not just those that are valid
 # lunch targets for this branch.
 
+PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
 ifndef PLATFORM_VERSION_CODENAME
-  PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
-  ifndef PLATFORM_VERSION_CODENAME
-    # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
-    PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
-  endif
-
-  # This is all of the *active* development codenames.
-  # This confusing name is needed because
-  # all_codenames has been baked into build.prop for ages.
-  #
-  # Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
-  # list of additional codenames after PLATFORM_VERSION_CODENAME.
-  PLATFORM_VERSION_ALL_CODENAMES :=
-
-  # Build a list of all active code names. Avoid duplicates, and stop when we
-  # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
-  # that is not included in our build).
-  _versions_in_target := \
-    $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
-  $(foreach version,$(_versions_in_target),\
-    $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
-    $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
-      $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
-
-  # And convert from space separated to comma separated.
-  PLATFORM_VERSION_ALL_CODENAMES := \
-    $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
-
+  # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
+  PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
 endif
+
+# This is all of the *active* development codenames.
+# This confusing name is needed because
+# all_codenames has been baked into build.prop for ages.
+#
+# Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
+# list of additional codenames after PLATFORM_VERSION_CODENAME.
+PLATFORM_VERSION_ALL_CODENAMES :=
+
+# Build a list of all active code names. Avoid duplicates, and stop when we
+# reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+# that is not included in our build).
+_versions_in_target := \
+  $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+$(foreach version,$(_versions_in_target),\
+  $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+  $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+    $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+# And convert from space separated to comma separated.
+PLATFORM_VERSION_ALL_CODENAMES := \
+  $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
 .KATI_READONLY := \
   PLATFORM_VERSION_CODENAME \
   PLATFORM_VERSION_ALL_CODENAMES
diff --git a/envsetup.sh b/envsetup.sh
index e7b8538..c9b1b54 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -395,7 +395,7 @@
     fi
 
     local completion_files=(
-      system/core/adb/adb.bash
+      packages/modules/adb/adb.bash
       system/core/fastboot/fastboot.bash
       tools/asuite/asuite.sh
     )
@@ -403,8 +403,12 @@
     # e.g.
     # ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
     # ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+    local T=$(gettop)
     for f in ${completion_files[*]}; do
-        if [ -f "$f" ] && should_add_completion "$f"; then
+        f="$T/$f"
+        if [ ! -f "$f" ]; then
+          echo "Warning: completion file $f not found"
+        elif should_add_completion "$f"; then
             . $f
         fi
     done
@@ -454,7 +458,7 @@
     if $(echo "$1" | grep -q '^-') ; then
         # Calls starting with a -- argument are passed directly and the function
         # returns with the lunch.py exit code.
-        build/make/orchestrator/core/lunch.py "$@"
+        build/build/make/orchestrator/core/lunch.py "$@"
         code=$?
         if [[ $code -eq 2 ]] ; then
           echo 1>&2
@@ -465,7 +469,7 @@
         fi
     else
         # All other calls go through the --lunch variant of lunch.py
-        results=($(build/make/orchestrator/core/lunch.py --lunch "$@"))
+        results=($(build/build/make/orchestrator/core/lunch.py --lunch "$@"))
         code=$?
         if [[ $code -eq 2 ]] ; then
           echo 1>&2
@@ -876,7 +880,7 @@
     fi
 
     if [ -z "$product" ]; then
-        product=arm
+        product=arm64
     elif [ $(echo $product | wc -w) -gt 1 ]; then
         echo "banchan: Error: Multiple build archs or products supplied: $products"
         return
@@ -942,6 +946,34 @@
     fi
 }
 
+# TODO: Merge into gettop as part of launching multitree
+function multitree_gettop
+{
+    local TOPFILE=build/build/make/core/envsetup.mk
+    if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
+        # The following circumlocution ensures we remove symlinks from TOP.
+        (cd "$TOP"; PWD= /bin/pwd)
+    else
+        if [ -f $TOPFILE ] ; then
+            # The following circumlocution (repeated below as well) ensures
+            # that we record the true directory name and not one that is
+            # faked up with symlink names.
+            PWD= /bin/pwd
+        else
+            local HERE=$PWD
+            local T=
+            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
+                \cd ..
+                T=`PWD= /bin/pwd -P`
+            done
+            \cd "$HERE"
+            if [ -f "$T/$TOPFILE" ]; then
+                echo "$T"
+            fi
+        fi
+    fi
+}
+
 function croot()
 {
     local T=$(gettop)
@@ -1065,7 +1097,7 @@
         return;
     fi;
     echo "Setting core limit for $PID to infinite...";
-    adb shell /system/bin/ulimit -p $PID -c unlimited
+    adb shell /system/bin/ulimit -P $PID -c unlimited
 }
 
 # core - send SIGV and pull the core for process
@@ -1824,6 +1856,21 @@
     _wrap_build $(get_make_command "$@") "$@"
 }
 
+function _multitree_lunch_error()
+{
+      >&2 echo "Couldn't locate the top of the tree. Please run \'source build/envsetup.sh\' and multitree_lunch from the root of your workspace."
+}
+
+function multitree_build()
+{
+    if T="$(multitree_gettop)"; then
+      "$T/build/build/orchestrator/core/orchestrator.py" "$@"
+    else
+      _multitree_lunch_error
+      return 1
+    fi
+}
+
 function provision()
 {
     if [ ! "$ANDROID_PRODUCT_OUT" ]; then
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
index 972ada1..c942eb2 100755
--- a/finalize_branch_for_release.sh
+++ b/finalize_branch_for_release.sh
@@ -21,8 +21,9 @@
 
 m check-vndk-list || update-vndk-list.sh # for new versions of AIDL interfaces
 
-# TODO(b/229413853): test while simulating 'rel' for more requirements AIDL_FROZEN_REL=true
-m # test build
+# for now, we simulate the release state for AIDL, but in the future, we would want
+# to actually turn the branch into the REL state and test with that
+AIDL_FROZEN_REL=true m # test build
 
 # Build SDK (TODO)
 # lunch sdk...
diff --git a/orchestrator/README b/orchestrator/README
new file mode 100644
index 0000000..9a1e302
--- /dev/null
+++ b/orchestrator/README
@@ -0,0 +1,8 @@
+DEMO
+
+from the root of the workspace
+
+multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
+
+rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
+
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
new file mode 100644
index 0000000..d7abef7
--- /dev/null
+++ b/orchestrator/core/api_assembly.py
@@ -0,0 +1,156 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import json
+import os
+import sys
+
+import api_assembly_cc
+import ninja_tools
+
+
+ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
+
+def assemble_apis(context, inner_trees):
+    # Find all of the contributions from the inner tree
+    contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
+
+    # Load and validate the contribution files
+    # TODO: Check timestamps and skip unnecessary work
+    contributions = []
+    for tree_key, filenames in contribution_files_dict.items():
+        for filename in filenames:
+            json_data = load_contribution_file(context, filename)
+            if not json_data:
+                continue
+            # TODO: Validate the configs, especially that the domains match what we asked for
+            # from the lunch config.
+            contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
+
+    # Group contributions by language and API surface
+    stub_libraries = collate_contributions(contributions)
+
+    # Initialize the ninja file writer
+    with open(context.out.api_ninja_file(), "w") as ninja_file:
+        ninja = ninja_tools.Ninja(context, ninja_file)
+
+        # Initialize the build file writer
+        build_file = BuildFile() # TODO: parameters?
+
+        # Iterate through all of the stub libraries and generate rules to assemble them
+        # and Android.bp/BUILD files to make those available to inner trees.
+        # TODO: Parallelize? Skip unnecessary work?
+        for stub_library in stub_libraries:
+            STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
+
+        # TODO: Handle host_executables separately or as a StubLibrary language?
+
+        # Finish writing the ninja file
+        ninja.write()
+
+
+def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
+    "Scan an inner_tree's out dir for the api contribution files."
+    directory = inner_tree.out.api_contributions_dir()
+    result = []
+    with os.scandir(directory) as it:
+        for dirent in it:
+            if not dirent.is_file():
+                break
+            if dirent.name.endswith(".json"):
+                result.append(os.path.join(directory, dirent.name))
+    return result
+
+
+def load_contribution_file(context, filename):
+    "Load and return the API contribution at filename. On error report error and return None."
+    with open(filename) as f:
+        try:
+            return json.load(f)
+        except json.decoder.JSONDecodeError as ex:
+            # TODO: Error reporting
+            context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
+            raise ex
+
+
+class StubLibraryContribution(object):
+    def __init__(self, inner_tree, api_domain, library_contribution):
+        self.inner_tree = inner_tree
+        self.api_domain = api_domain
+        self.library_contribution = library_contribution
+
+
+class StubLibrary(object):
+    def __init__(self, language, api_surface, api_surface_version, name):
+        self.language = language
+        self.api_surface = api_surface
+        self.api_surface_version = api_surface_version
+        self.name = name
+        self.contributions = []
+
+    def add_contribution(self, contrib):
+        self.contributions.append(contrib)
+
+
+def collate_contributions(contributions):
+    """Take the list of parsed API contribution files, and group targets by API Surface, version,
+    language and library name, and return a StubLibrary object for each of those.
+    """
+    grouped = {}
+    for contribution in contributions:
+        for language in STUB_LANGUAGE_HANDLERS.keys():
+            for library in contribution.json_data.get(language, []):
+                key = (language, contribution.json_data["name"],
+                        contribution.json_data["version"], library["name"])
+                stub_library = grouped.get(key)
+                if not stub_library:
+                    stub_library = StubLibrary(language, contribution.json_data["name"],
+                            contribution.json_data["version"], library["name"])
+                    grouped[key] = stub_library
+                stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
+                        contribution.json_data["api_domain"], library))
+    return list(grouped.values())
+
+
+def assemble_java_api_library(context, ninja, build_file, stub_library):
+    print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name))
+    for contrib in stub_library.contributions:
+        print("  %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
+    # TODO: Implement me
+
+
+def assemble_resource_api_library(context, ninja, build_file, stub_library):
+    print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name))
+    for contrib in stub_library.contributions:
+        print("  %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
+    # TODO: Implement me
+
+
+STUB_LANGUAGE_HANDLERS = {
+    "cc_libraries": api_assembly_cc.assemble_cc_api_library,
+    "java_libraries": assemble_java_api_library,
+    "resource_libraries": assemble_resource_api_library,
+}
+
+
+class BuildFile(object):
+    "Abstract generator for Android.bp files and BUILD files."
+    pass
+
+
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
new file mode 100644
index 0000000..ca9b2a4
--- /dev/null
+++ b/orchestrator/core/api_assembly_cc.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+def assemble_cc_api_library(context, ninja, build_file, stub_library):
+    staging_dir = context.out.api_library_dir(stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name)
+    work_dir = context.out.api_library_work_dir(stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name)
+
+    # Generate rules to copy headers
+    includes = []
+    include_dir = os.path.join(staging_dir, "include")
+    for contrib in stub_library.contributions:
+        for headers in contrib.library_contribution["headers"]:
+            root = headers["root"]
+            for file in headers["files"]:
+                # TODO: Deal with collisions of the same name from multiple contributions
+                include = os.path.join(include_dir, file)
+                ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
+                includes.append(include)
+
+    # Generate rule to run ndkstubgen
+
+
+    # Generate rule to compile stubs to library
+
+    # Generate phony rule to build the library
+    # TODO: This name probably conflictgs with something
+    ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
+            stub_library.name)), includes)
+
+    # Generate build files
+
diff --git a/orchestrator/core/api_domain.py b/orchestrator/core/api_domain.py
new file mode 100644
index 0000000..bb7306c
--- /dev/null
+++ b/orchestrator/core/api_domain.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class ApiDomain(object):
+    def __init__(self, name, tree, product):
+        # Product will be null for modules
+        self.name = name
+        self.tree = tree
+        self.product = product
+
+    def __str__(self):
+        return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
+                self.name, self.tree.root,
+                "None" if self.product is None else "\"%s\"" % self.product)
+
diff --git a/orchestrator/core/api_export.py b/orchestrator/core/api_export.py
new file mode 100644
index 0000000..2f26b02
--- /dev/null
+++ b/orchestrator/core/api_export.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def export_apis_from_tree(tree_key, inner_tree, cookie):
+    inner_tree.invoke(["export_api_contributions"])
+
+
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
new file mode 100644
index 0000000..03fe890
--- /dev/null
+++ b/orchestrator/core/final_packaging.py
@@ -0,0 +1,117 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import sys
+
+import ninja_tools
+import ninja_syntax # Has to be after ninja_tools because of the path hack
+
+def final_packaging(context, inner_trees):
+    """Pull together all of the previously defined rules into the final build stems."""
+
+    with open(context.out.outer_ninja_file(), "w") as ninja_file:
+        ninja = ninja_tools.Ninja(context, ninja_file)
+
+        # Add the api surfaces file
+        ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
+
+        # For each inner tree
+        for tree in inner_trees.keys():
+            # TODO: Verify that inner_tree.ninja was generated
+
+            # Read and verify file
+            build_targets = read_build_targets_json(context, tree)
+            if not build_targets:
+                continue
+
+            # Generate the ninja and build files for this inner tree
+            generate_cross_domain_build_rules(context, ninja, tree, build_targets)
+
+        # Finish writing the ninja file
+        ninja.write()
+
+
+def read_build_targets_json(context, tree):
+    """Read and validate the build_targets.json file for the given tree."""
+    try:
+        f = open(tree.out.build_targets_file())
+    except FileNotFoundError:
+        # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
+        return None
+
+    data = None
+    with f:
+        try:
+            data = json.load(f)
+        except json.decoder.JSONDecodeError as ex:
+            sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
+            # TODO: Error reporting
+            raise ex
+
+    # TODO: Better error handling
+    # TODO: Validate json schema
+    return data
+
+
+def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
+    "Generate the ninja and build files for the inner tree."
+    # Include the inner tree's inner_tree.ninja
+    ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
+
+    # Generate module rules and files
+    for module in build_targets.get("modules", []):
+        generate_shared_module(context, ninja, tree, module)
+
+    # Generate staging rules
+    staging_dir = context.out.staging_dir()
+    for staged in build_targets.get("staging", []):
+        # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
+        dest = staged["dest"]
+        dest = os.path.join(staging_dir, dest)
+        if "src" in staged and "obj" in staged:
+            context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
+                    ) # TODO: Filename and line if possible
+        if "src" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
+        elif "obj" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
+        ninja.add_global_phony("staging", [dest])
+
+    # Generate dist rules
+    dist_dir = context.out.dist_dir()
+    for disted in build_targets.get("dist", []):
+        # TODO: Enforce that dest absolute
+        dest = disted["dest"]
+        dest = os.path.join(dist_dir, dest)
+        ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
+        ninja.add_global_phony("dist", [dest])
+
+
+def generate_shared_module(context, ninja, tree, module):
+    """Generate ninja rules for the given build_targets.json defined module."""
+    module_name = module["name"]
+    module_type = module["type"]
+    share_dir = context.out.module_share_dir(module_type, module_name)
+    src_file = os.path.join(tree.root, module["file"])
+
+    if module_type == "apex":
+        ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
+        # TODO: Generate build file
+
+    else:
+        # TODO: Better error handling
+        raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
new file mode 100644
index 0000000..d348ee7
--- /dev/null
+++ b/orchestrator/core/inner_tree.py
@@ -0,0 +1,193 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+import textwrap
+
+class InnerTreeKey(object):
+    """Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
+    If a single tree uses two different prdoucts, then we won't make assumptions about
+    them sharing _anything_.
+    TODO: This is true for soong. It's more likely that bazel could do analysis for two
+    products at the same time in a single tree, so there's an optimization there to do
+    eventually."""
+    def __init__(self, root, product):
+        self.root = root
+        self.product = product
+
+    def __str__(self):
+        return "TreeKey(root=%s product=%s)" % (enquote(self.root), enquote(self.product))
+
+    def __hash__(self):
+        return hash((self.root, self.product))
+
+    def _cmp(self, other):
+        if self.root < other.root:
+            return -1
+        if self.root > other.root:
+            return 1
+        if self.product == other.product:
+            return 0
+        if self.product is None:
+            return -1
+        if other.product is None:
+            return 1
+        if self.product < other.product:
+            return -1
+        return 1
+
+    def __eq__(self, other):
+        return self._cmp(other) == 0
+
+    def __ne__(self, other):
+        return self._cmp(other) != 0
+
+    def __lt__(self, other):
+        return self._cmp(other) < 0
+
+    def __le__(self, other):
+        return self._cmp(other) <= 0
+
+    def __gt__(self, other):
+        return self._cmp(other) > 0
+
+    def __ge__(self, other):
+        return self._cmp(other) >= 0
+
+
+class InnerTree(object):
+    def __init__(self, context, root, product):
+        """Initialize with the inner tree root (relative to the workspace root)"""
+        self.root = root
+        self.product = product
+        self.domains = {}
+        # TODO: Base directory on OUT_DIR
+        out_root = context.out.inner_tree_dir(root)
+        if product:
+            out_root += "_" + product
+        else:
+            out_root += "_unbundled"
+        self.out = OutDirLayout(out_root)
+
+    def __str__(self):
+        return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
+                enquote(self.product),
+                " ".join([enquote(d) for d in sorted(self.domains.keys())]))
+
+    def invoke(self, args):
+        """Call the inner tree command for this inner tree. Exits on failure."""
+        # TODO: Build time tracing
+
+        # Validate that there is a .inner_build command to run at the root of the tree
+        # so we can print a good error message
+        inner_build_tool = os.path.join(self.root, ".inner_build")
+        if not os.access(inner_build_tool, os.X_OK):
+            sys.stderr.write(("Unable to execute %s. Is there an inner tree or lunch combo"
+                    + " misconfiguration?\n") % inner_build_tool)
+            sys.exit(1)
+
+        # TODO: This is where we should set up the shared trees
+
+        # Build the command
+        cmd = [inner_build_tool, "--out_dir", self.out.root()]
+        for domain_name in sorted(self.domains.keys()):
+            cmd.append("--api_domain")
+            cmd.append(domain_name)
+        cmd += args
+
+        # Run the command
+        process = subprocess.run(cmd, shell=False)
+
+        # TODO: Probably want better handling of inner tree failures
+        if process.returncode:
+            sys.stderr.write("Build error in inner tree: %s\nstopping multitree build.\n"
+                    % self.root)
+            sys.exit(1)
+
+
+class InnerTrees(object):
+    def __init__(self, trees, domains):
+        self.trees = trees
+        self.domains = domains
+
+    def __str__(self):
+        "Return a debugging dump of this object"
+        return textwrap.dedent("""\
+        InnerTrees {
+            trees: [
+                %(trees)s
+            ]
+            domains: [
+                %(domains)s
+            ]
+        }""" % {
+            "trees": "\n        ".join(sorted([str(t) for t in self.trees.values()])),
+            "domains": "\n        ".join(sorted([str(d) for d in self.domains.values()])),
+        })
+
+
+    def for_each_tree(self, func, cookie=None):
+        """Call func for each of the inner trees once for each product that will be built in it.
+
+        The calls will be in a stable order.
+
+        Return a map of the InnerTreeKey to any results returned from func().
+        """
+        result = {}
+        for key in sorted(self.trees.keys()):
+            result[key] = func(key, self.trees[key], cookie)
+        return result
+
+
+    def get(self, tree_key):
+        """Get an inner tree for tree_key"""
+        return self.trees.get(tree_key)
+
+    def keys(self):
+        "Get the keys for the inner trees in name order."
+        return [self.trees[k] for k in sorted(self.trees.keys())]
+
+
+class OutDirLayout(object):
+    """Encapsulates the logic about the layout of the inner tree out directories.
+    See also context.OutDir for outer tree out dir contents."""
+
+    def __init__(self, root):
+        "Initialize with the root of the OUT_DIR for the inner tree."
+        self._root = root
+
+    def root(self):
+        return self._root
+
+    def tree_info_file(self):
+        return os.path.join(self._root, "tree_info.json")
+
+    def api_contributions_dir(self):
+        return os.path.join(self._root, "api_contributions")
+
+    def build_targets_file(self):
+        return os.path.join(self._root, "build_targets.json")
+
+    def main_ninja_file(self):
+        return os.path.join(self._root, "inner_tree.ninja")
+
+
+def enquote(s):
+    return "None" if s is None else "\"%s\"" % s
+
+
diff --git a/orchestrator/core/interrogate.py b/orchestrator/core/interrogate.py
new file mode 100644
index 0000000..9fe769e
--- /dev/null
+++ b/orchestrator/core/interrogate.py
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+
+def interrogate_tree(tree_key, inner_tree, cookie):
+    inner_tree.invoke(["describe"])
+
+    info_json_filename = inner_tree.out.tree_info_file()
+
+    # TODO: Error handling
+    with open(info_json_filename) as f:
+        info_json = json.load(f)
+
+    # TODO: Check orchestrator protocol
+
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
index 35dac73..a648478 100755
--- a/orchestrator/core/lunch.py
+++ b/orchestrator/core/lunch.py
@@ -24,8 +24,10 @@
 EXIT_STATUS_ERROR = 1
 EXIT_STATUS_NEED_HELP = 2
 
-def FindDirs(path, name, ttl=6):
-    """Search at most ttl directories deep inside path for a directory called name."""
+
+def find_dirs(path, name, ttl=6):
+    """Search at most ttl directories deep inside path for a directory called name
+    and yield directories that match."""
     # The dance with subdirs is so that we recurse in sorted order.
     subdirs = []
     with os.scandir(path) as it:
@@ -40,10 +42,10 @@
                 # Consume filesystem errors, e.g. too many links, permission etc.
                 pass
     for subdir in subdirs:
-        yield from FindDirs(os.path.join(path, subdir), name, ttl-1)
+        yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
 
 
-def WalkPaths(path, matcher, ttl=10):
+def walk_paths(path, matcher, ttl=10):
     """Do a traversal of all files under path yielding each file that matches
     matcher."""
     # First look for files, then recurse into directories as needed.
@@ -62,22 +64,22 @@
                 # Consume filesystem errors, e.g. too many links, permission etc.
                 pass
     for subdir in sorted(subdirs):
-        yield from WalkPaths(os.path.join(path, subdir), matcher, ttl-1)
+        yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
 
 
-def FindFile(path, filename):
+def find_file(path, filename):
     """Return a file called filename inside path, no more than ttl levels deep.
 
     Directories are searched alphabetically.
     """
-    for f in WalkPaths(path, lambda x: x == filename):
+    for f in walk_paths(path, lambda x: x == filename):
         return f
 
 
-def FindConfigDirs(workspace_root):
+def find_config_dirs(workspace_root):
     """Find the configuration files in the well known locations inside workspace_root
 
-        <workspace_root>/build/orchestrator/multitree_combos
+        <workspace_root>/build/build/orchestrator/multitree_combos
            (AOSP devices, such as cuttlefish)
 
         <workspace_root>/vendor/**/multitree_combos
@@ -89,29 +91,30 @@
     Directories are returned specifically in this order, so that aosp can't be
     overridden, but vendor overrides device.
     """
+    # TODO: This is not looking in inner trees correctly.
 
     # TODO: When orchestrator is in its own git project remove the "make/" here
-    yield os.path.join(workspace_root, "build/make/orchestrator/multitree_combos")
+    yield os.path.join(workspace_root, "build/build/make/orchestrator/multitree_combos")
 
     dirs = ["vendor", "device"]
     for d in dirs:
-        yield from FindDirs(os.path.join(workspace_root, d), "multitree_combos")
+        yield from find_dirs(os.path.join(workspace_root, d), "multitree_combos")
 
 
-def FindNamedConfig(workspace_root, shortname):
+def find_named_config(workspace_root, shortname):
     """Find the config with the given shortname inside workspace_root.
 
-    Config directories are searched in the order described in FindConfigDirs,
+    Config directories are searched in the order described in find_config_dirs,
     and inside those directories, alphabetically."""
     filename = shortname + ".mcombo"
-    for config_dir in FindConfigDirs(workspace_root):
-        found = FindFile(config_dir, filename)
+    for config_dir in find_config_dirs(workspace_root):
+        found = find_file(config_dir, filename)
         if found:
             return found
     return None
 
 
-def ParseProductVariant(s):
+def parse_product_variant(s):
     """Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
     split = s.split("-")
     if len(split) != 2:
@@ -119,15 +122,15 @@
     return split
 
 
-def ChooseConfigFromArgs(workspace_root, args):
+def choose_config_from_args(workspace_root, args):
     """Return the config file we should use for the given argument,
     or null if there's no file that matches that."""
     if len(args) == 1:
         # Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
         # file we don't match that.
-        pv = ParseProductVariant(args[0])
+        pv = parse_product_variant(args[0])
         if pv:
-            config = FindNamedConfig(workspace_root, pv[0])
+            config = find_named_config(workspace_root, pv[0])
             if config:
                 return (config, pv[1])
             return None, None
@@ -139,10 +142,12 @@
 
 
 class ConfigException(Exception):
+    ERROR_IDENTIFY = "identify"
     ERROR_PARSE = "parse"
     ERROR_CYCLE = "cycle"
+    ERROR_VALIDATE = "validate"
 
-    def __init__(self, kind, message, locations, line=0):
+    def __init__(self, kind, message, locations=[], line=0):
         """Error thrown when loading and parsing configurations.
 
         Args:
@@ -169,13 +174,13 @@
         self.line = line
 
 
-def LoadConfig(filename):
+def load_config(filename):
     """Load a config, including processing the inherits fields.
 
     Raises:
         ConfigException on errors
     """
-    def LoadAndMerge(fn, visited):
+    def load_and_merge(fn, visited):
         with open(fn) as f:
             try:
                 contents = json.load(f)
@@ -191,34 +196,74 @@
                 if parent in visited:
                     raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
                             visited)
-                DeepMerge(inherited_data, LoadAndMerge(parent, [parent,] + visited))
+                deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
             # Then merge inherited_data into contents, but what's already there will win.
-            DeepMerge(contents, inherited_data)
+            deep_merge(contents, inherited_data)
             contents.pop("inherits", None)
         return contents
-    return LoadAndMerge(filename, [filename,])
+    return load_and_merge(filename, [filename,])
 
 
-def DeepMerge(merged, addition):
+def deep_merge(merged, addition):
     """Merge all fields of addition into merged. Pre-existing fields win."""
     for k, v in addition.items():
         if k in merged:
             if isinstance(v, dict) and isinstance(merged[k], dict):
-                DeepMerge(merged[k], v)
+                deep_merge(merged[k], v)
         else:
             merged[k] = v
 
 
-def Lunch(args):
+def make_config_header(config_file, config, variant):
+    def make_table(rows):
+        maxcols = max([len(row) for row in rows])
+        widths = [0] * maxcols
+        for row in rows:
+            for i in range(len(row)):
+                widths[i] = max(widths[i], len(row[i]))
+        text = []
+        for row in rows:
+            rowtext = []
+            for i in range(len(row)):
+                cell = row[i]
+                rowtext.append(str(cell))
+                rowtext.append(" " * (widths[i] - len(cell)))
+                rowtext.append("  ")
+            text.append("".join(rowtext))
+        return "\n".join(text)
+
+    trees = [("Component", "Path", "Product"),
+             ("---------", "----", "-------")]
+    entry = config.get("system", None)
+    def add_config_tuple(trees, entry, name):
+        if entry:
+            trees.append((name, entry.get("tree"), entry.get("product", "")))
+    add_config_tuple(trees, config.get("system"), "system")
+    add_config_tuple(trees, config.get("vendor"), "vendor")
+    for k, v in config.get("modules", {}).items():
+        add_config_tuple(trees, v, k)
+
+    return """========================================
+TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
+TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
+
+%(trees)s
+========================================\n""" % {
+        "TARGET_BUILD_COMBO": config_file,
+        "TARGET_BUILD_VARIANT": variant,
+        "trees": make_table(trees),
+    }
+
+
+def do_lunch(args):
     """Handle the lunch command."""
-    # Check that we're at the top of a multitree workspace
-    # TODO: Choose the right sentinel file
-    if not os.path.exists("build/make/orchestrator"):
+    # Check that we're at the top of a multitree workspace by seeing if this script exists.
+    if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
         sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
         return EXIT_STATUS_ERROR
 
     # Choose the config file
-    config_file, variant = ChooseConfigFromArgs(".", args)
+    config_file, variant = choose_config_from_args(".", args)
 
     if config_file == None:
         sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
@@ -229,7 +274,7 @@
 
     # Parse the config file
     try:
-        config = LoadConfig(config_file)
+        config = load_config(config_file)
     except ConfigException as ex:
         sys.stderr.write(str(ex))
         return EXIT_STATUS_ERROR
@@ -244,47 +289,81 @@
     sys.stdout.write("%s\n" % config_file)
     sys.stdout.write("%s\n" % variant)
 
+    # Write confirmation message to stderr
+    sys.stderr.write(make_config_header(config_file, config, variant))
+
     return EXIT_STATUS_OK
 
 
-def FindAllComboFiles(workspace_root):
+def find_all_combo_files(workspace_root):
     """Find all .mcombo files in the prescribed locations in the tree."""
-    for dir in FindConfigDirs(workspace_root):
-        for file in WalkPaths(dir, lambda x: x.endswith(".mcombo")):
+    for dir in find_config_dirs(workspace_root):
+        for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
             yield file
 
 
-def IsFileLunchable(config_file):
+def is_file_lunchable(config_file):
     """Parse config_file, flatten the inheritance, and return whether it can be
     used as a lunch target."""
     try:
-        config = LoadConfig(config_file)
+        config = load_config(config_file)
     except ConfigException as ex:
         sys.stderr.write("%s" % ex)
         return False
     return config.get("lunchable", False)
 
 
-def FindAllLunchable(workspace_root):
+def find_all_lunchable(workspace_root):
     """Find all mcombo files in the tree (rooted at workspace_root) that when
     parsed (and inheritance is flattened) have lunchable: true."""
-    for f in [x for x in FindAllComboFiles(workspace_root) if IsFileLunchable(x)]:
+    for f in [x for x in find_all_combo_files(workspace_root) if is_file_lunchable(x)]:
         yield f
 
 
-def List():
+def load_current_config():
+    """Load, validate and return the config as specified in TARGET_BUILD_COMBO.  Throws
+    ConfigException if there is a problem."""
+
+    # Identify the config file
+    config_file = os.environ.get("TARGET_BUILD_COMBO")
+    if not config_file:
+        raise ConfigException(ConfigException.ERROR_IDENTIFY,
+                "TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
+
+    # Parse the config file
+    config = load_config(config_file)
+
+    # Validate the config file
+    if not config.get("lunchable", False):
+        raise ConfigException(ConfigException.ERROR_VALIDATE,
+                "Lunch config file (or inherited files) does not have the 'lunchable'"
+                    + " flag set, which means it is probably not a complete lunch spec.",
+                [config_file,])
+
+    # TODO: Validate that:
+    #   - there are no modules called system or vendor
+    #   - everything has all the required files
+
+    variant = os.environ.get("TARGET_BUILD_VARIANT")
+    if not variant:
+        variant = "eng" # TODO: Is this the right default?
+    # Validate variant is user, userdebug or eng
+
+    return config_file, config, variant
+
+def do_list():
     """Handle the --list command."""
-    for f in sorted(FindAllLunchable(".")):
+    for f in sorted(find_all_lunchable(".")):
         print(f)
 
 
-def Print(args):
+def do_print(args):
     """Handle the --print command."""
     # Parse args
     if len(args) == 0:
         config_file = os.environ.get("TARGET_BUILD_COMBO")
         if not config_file:
-            sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.\n")
+            sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
             return EXIT_STATUS_NEED_HELP
     elif len(args) == 1:
         config_file = args[0]
@@ -293,7 +372,7 @@
 
     # Parse the config file
     try:
-        config = LoadConfig(config_file)
+        config = load_config(config_file)
     except ConfigException as ex:
         sys.stderr.write(str(ex))
         return EXIT_STATUS_ERROR
@@ -309,15 +388,15 @@
         return EXIT_STATUS_NEED_HELP
 
     if len(argv) == 2 and argv[1] == "--list":
-        List()
+        do_list()
         return EXIT_STATUS_OK
 
     if len(argv) == 2 and argv[1] == "--print":
-        return Print(argv[2:])
+        return do_print(argv[2:])
         return EXIT_STATUS_OK
 
-    if (len(argv) == 2 or len(argv) == 3) and argv[1] == "--lunch":
-        return Lunch(argv[2:])
+    if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
+        return do_lunch(argv[2:])
 
     sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
     return EXIT_STATUS_NEED_HELP
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
new file mode 100644
index 0000000..ab81d66
--- /dev/null
+++ b/orchestrator/core/ninja_runner.py
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import sys
+
+def run_ninja(context, targets):
+    """Run ninja.
+    """
+
+    # Construct the command
+    cmd = [
+            context.tools.ninja(),
+            "-f",
+            context.out.outer_ninja_file(),
+        ] + targets
+
+    # Run the command
+    process = subprocess.run(cmd, shell=False)
+
+    # TODO: Probably want better handling of inner tree failures
+    if process.returncode:
+        sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
+        sys.exit(1)
+
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
new file mode 100644
index 0000000..16101ea
--- /dev/null
+++ b/orchestrator/core/ninja_tools.py
@@ -0,0 +1,59 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+# Workaround for python include path
+_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
+if _ninja_dir not in sys.path:
+    sys.path.append(_ninja_dir)
+import ninja_writer
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+
+class Ninja(ninja_writer.Writer):
+    """Some higher level constructs on top of raw ninja writing.
+    TODO: Not sure where these should be."""
+    def __init__(self, context, file):
+        super(Ninja, self).__init__(file)
+        self._context = context
+        self._did_copy_file = False
+        self._phonies = {}
+
+    def add_copy_file(self, copy_to, copy_from):
+        if not self._did_copy_file:
+            self._did_copy_file = True
+            rule = Rule("copy_file")
+            rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
+                    + " -f ${in} ${out}")
+            self.add_rule(rule)
+        build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
+                implicits=[self._context.tools.acp()])
+        build_action.add_variable("out_dir", os.path.dirname(copy_to))
+        self.add_build_action(build_action)
+
+    def add_global_phony(self, name, deps):
+        """Add a phony target where there are multiple places that will want to add to
+        the same phony. If you can, to save memory, use add_phony instead of this function."""
+        if type(deps) not in (list, tuple):
+            raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
+        self._phonies.setdefault(name, []).extend(deps)
+
+    def write(self):
+        for phony, deps in self._phonies.items():
+            self.add_phony(phony, deps)
+        super(Ninja, self).write()
+
+
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
new file mode 100755
index 0000000..508f73a
--- /dev/null
+++ b/orchestrator/core/orchestrator.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+sys.dont_write_bytecode = True
+import api_assembly
+import api_domain
+import api_export
+import final_packaging
+import inner_tree
+import tree_analysis
+import interrogate
+import lunch
+import ninja_runner
+import utils
+
+EXIT_STATUS_OK = 0
+EXIT_STATUS_ERROR = 1
+
+API_DOMAIN_SYSTEM = "system"
+API_DOMAIN_VENDOR = "vendor"
+API_DOMAIN_MODULE = "module"
+
+def process_config(context, lunch_config):
+    """Returns a InnerTrees object based on the configuration requested in the lunch config."""
+    def add(domain_name, tree_root, product):
+        tree_key = inner_tree.InnerTreeKey(tree_root, product)
+        if tree_key in trees:
+            tree = trees[tree_key]
+        else:
+            tree = inner_tree.InnerTree(context, tree_root, product)
+            trees[tree_key] = tree
+        domain = api_domain.ApiDomain(domain_name, tree, product)
+        domains[domain_name] = domain
+        tree.domains[domain_name] = domain
+
+    trees = {}
+    domains = {}
+
+    system_entry = lunch_config.get("system")
+    if system_entry:
+        add(API_DOMAIN_SYSTEM, system_entry["tree"], system_entry["product"])
+
+    vendor_entry = lunch_config.get("vendor")
+    if vendor_entry:
+        add(API_DOMAIN_VENDOR, vendor_entry["tree"], vendor_entry["product"])
+
+    for module_name, module_entry in lunch_config.get("modules", []).items():
+        add(module_name, module_entry["tree"], None)
+
+    return inner_tree.InnerTrees(trees, domains)
+
+
+def build():
+    # Choose the out directory, set up error handling, etc.
+    context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
+
+    # Read the lunch config file
+    try:
+        config_file, config, variant = lunch.load_current_config()
+    except lunch.ConfigException as ex:
+        sys.stderr.write("%s\n" % ex)
+        return EXIT_STATUS_ERROR
+    sys.stdout.write(lunch.make_config_header(config_file, config, variant))
+
+    # Construct the trees and domains dicts
+    inner_trees = process_config(context, config)
+
+    # 1. Interrogate the trees
+    inner_trees.for_each_tree(interrogate.interrogate_tree)
+    # TODO: Detect bazel-only mode
+
+    # 2a. API Export
+    inner_trees.for_each_tree(api_export.export_apis_from_tree)
+
+    # 2b. API Surface Assembly
+    api_assembly.assemble_apis(context, inner_trees)
+
+    # 3a. Inner tree analysis
+    tree_analysis.analyze_trees(context, inner_trees)
+
+    # 3b. Final Packaging Rules
+    final_packaging.final_packaging(context, inner_trees)
+
+    # 4. Build Execution
+    # TODO: Decide what we want the UX for selecting targets to be across
+    # branches... since there are very likely to be conflicting soong short
+    # names.
+    print("Running ninja...")
+    targets = ["staging", "system"]
+    ninja_runner.run_ninja(context, targets)
+
+    # Success!
+    return EXIT_STATUS_OK
+
+def main(argv):
+    return build()
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
+
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
index 3c39493..2d85d05 100755
--- a/orchestrator/core/test_lunch.py
+++ b/orchestrator/core/test_lunch.py
@@ -23,73 +23,73 @@
 class TestStringMethods(unittest.TestCase):
 
     def test_find_dirs(self):
-        self.assertEqual([x for x in lunch.FindDirs("test/configs", "multitree_combos")], [
+        self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
                     "test/configs/build/make/orchestrator/multitree_combos",
                     "test/configs/device/aa/bb/multitree_combos",
                     "test/configs/vendor/aa/bb/multitree_combos"])
 
     def test_find_file(self):
         # Finds the one in device first because this is searching from the root,
-        # not using FindNamedConfig.
-        self.assertEqual(lunch.FindFile("test/configs", "v.mcombo"),
+        # not using find_named_config.
+        self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
                    "test/configs/device/aa/bb/multitree_combos/v.mcombo")
 
     def test_find_config_dirs(self):
-        self.assertEqual([x for x in lunch.FindConfigDirs("test/configs")], [
+        self.assertEqual([x for x in lunch.find_config_dirs("test/configs")], [
                     "test/configs/build/make/orchestrator/multitree_combos",
                     "test/configs/vendor/aa/bb/multitree_combos",
                     "test/configs/device/aa/bb/multitree_combos"])
 
     def test_find_named_config(self):
         # Inside build/orchestrator, overriding device and vendor
-        self.assertEqual(lunch.FindNamedConfig("test/configs", "b"),
+        self.assertEqual(lunch.find_named_config("test/configs", "b"),
                     "test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
 
         # Nested dir inside a combo dir
-        self.assertEqual(lunch.FindNamedConfig("test/configs", "nested"),
+        self.assertEqual(lunch.find_named_config("test/configs", "nested"),
                     "test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
 
         # Inside vendor, overriding device
-        self.assertEqual(lunch.FindNamedConfig("test/configs", "v"),
+        self.assertEqual(lunch.find_named_config("test/configs", "v"),
                     "test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
 
         # Inside device
-        self.assertEqual(lunch.FindNamedConfig("test/configs", "d"),
+        self.assertEqual(lunch.find_named_config("test/configs", "d"),
                     "test/configs/device/aa/bb/multitree_combos/d.mcombo")
 
         # Make sure we don't look too deep (for performance)
-        self.assertIsNone(lunch.FindNamedConfig("test/configs", "too_deep"))
+        self.assertIsNone(lunch.find_named_config("test/configs", "too_deep"))
 
 
     def test_choose_config_file(self):
         # Empty string argument
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", [""]),
+        self.assertEqual(lunch.choose_config_from_args("test/configs", [""]),
                     (None, None))
 
         # A PRODUCT-VARIANT name
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["v-eng"]),
+        self.assertEqual(lunch.choose_config_from_args("test/configs", ["v-eng"]),
                     ("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
 
         # A PRODUCT-VARIANT name that conflicts with a file
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["b-eng"]),
+        self.assertEqual(lunch.choose_config_from_args("test/configs", ["b-eng"]),
                     ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
 
         # A PRODUCT-VARIANT that doesn't exist
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["z-user"]),
+        self.assertEqual(lunch.choose_config_from_args("test/configs", ["z-user"]),
                     (None, None))
 
         # An explicit file
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+        self.assertEqual(lunch.choose_config_from_args("test/configs",
                         ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
                     ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
 
         # An explicit file that doesn't exist
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+        self.assertEqual(lunch.choose_config_from_args("test/configs",
                         ["test/configs/doesnt_exist.mcombo", "eng"]),
                     (None, None))
 
         # An explicit file without a variant should fail
-        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+        self.assertEqual(lunch.choose_config_from_args("test/configs",
                         ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
                     ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
 
@@ -97,12 +97,12 @@
     def test_config_cycles(self):
         # Test that we catch cycles
         with self.assertRaises(lunch.ConfigException) as context:
-            lunch.LoadConfig("test/configs/parsing/cycles/1.mcombo")
+            lunch.load_config("test/configs/parsing/cycles/1.mcombo")
         self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
 
     def test_config_merge(self):
         # Test the merge logic
-        self.assertEqual(lunch.LoadConfig("test/configs/parsing/merge/1.mcombo"), {
+        self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
                             "in_1": "1",
                             "in_1_2": "1",
                             "merged": {"merged_1": "1",
@@ -119,7 +119,7 @@
                         })
 
     def test_list(self):
-        self.assertEqual(sorted(lunch.FindAllLunchable("test/configs")),
+        self.assertEqual(sorted(lunch.find_all_lunchable("test/configs")),
                 ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
 
 if __name__ == "__main__":
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
new file mode 100644
index 0000000..052cad6
--- /dev/null
+++ b/orchestrator/core/tree_analysis.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def analyze_trees(context, inner_trees):
+    inner_trees.for_each_tree(run_analysis)
+
+def run_analysis(tree_key, inner_tree, cookie):
+    inner_tree.invoke(["analyze"])
+
+
+
+
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
new file mode 100644
index 0000000..41310e0
--- /dev/null
+++ b/orchestrator/core/utils.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import platform
+
+class Context(object):
+    """Mockable container for global state."""
+    def __init__(self, out_root, errors):
+        self.out = OutDir(out_root)
+        self.errors = errors
+        self.tools = HostTools()
+
+class TestContext(Context):
+    "Context for testing. The real Context is manually constructed in orchestrator.py."
+
+    def __init__(self, test_work_dir, test_name):
+        super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
+                Errors(None))
+
+
+class OutDir(object):
+    """Encapsulates the logic about the out directory at the outer-tree level.
+    See also inner_tree.OutDirLayout for inner tree out dir contents."""
+
+    def __init__(self, root):
+        "Initialize with the root of the OUT_DIR for the outer tree."
+        self._out_root = root
+        self._intermediates = "intermediates"
+
+    def root(self):
+        return self._out_root
+
+    def inner_tree_dir(self, tree_root):
+        """Root directory for inner tree inside the out dir."""
+        return os.path.join(self._out_root, "trees", tree_root)
+
+    def api_ninja_file(self):
+        """The ninja file that assembles API surfaces."""
+        return os.path.join(self._out_root, "api_surfaces.ninja")
+
+    def api_library_dir(self, surface, version, library):
+        """Directory for all the contents of a library inside an API surface, including
+        the build files.  Any intermediates should go in api_library_work_dir."""
+        return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
+
+    def api_library_work_dir(self, surface, version, library):
+        """Intermediates / scratch directory for library inside an API surface."""
+        return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
+                str(version), library)
+
+    def outer_ninja_file(self):
+        return os.path.join(self._out_root, "multitree.ninja")
+
+    def module_share_dir(self, module_type, module_name):
+        return os.path.join(self._out_root, "shared", module_type, module_name)
+
+    def staging_dir(self):
+        return os.path.join(self._out_root, "staging")
+
+    def dist_dir(self):
+        "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
+        return os.path.join(self._out_root, "dist")
+
+class Errors(object):
+    """Class for reporting and tracking errors."""
+    def __init__(self, stream):
+        """Initialize Error reporter with a file-like object."""
+        self._stream = stream
+        self._all = []
+
+    def error(self, message, file=None, line=None, col=None):
+        """Record the error message."""
+        s = ""
+        if file:
+            s += str(file)
+            s += ":"
+        if line:
+            s += str(line)
+            s += ":"
+        if col:
+            s += str(col)
+            s += ":"
+        if s:
+            s += " "
+        s += str(message)
+        if s[-1] != "\n":
+            s += "\n"
+        self._all.append(s)
+        if self._stream:
+            self._stream.write(s)
+
+    def had_error(self):
+        """Return if there were any errors reported."""
+        return len(self._all)
+
+    def get_errors(self):
+        """Get all errors that were reported."""
+        return self._all
+
+
+class HostTools(object):
+    def __init__(self):
+        if platform.system() == "Linux":
+            self._arch = "linux-x86"
+        else:
+            raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
+
+        # Some of these are called a lot, so pre-compute the strings to save memory
+        self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
+        self._acp = os.path.join(self._prebuilts, "acp")
+        self._ninja = os.path.join(self._prebuilts, "ninja")
+
+    def acp(self):
+        return self._acp
+
+    def ninja(self):
+        return self._ninja
+
+
+def choose_out_dir():
+    """Get the root of the out dir, either from the environment or by picking
+    a default."""
+    result = os.environ.get("OUT_DIR")
+    if result:
+        return result
+    else:
+        return "out"
diff --git a/orchestrator/demo/buffet_helper.py b/orchestrator/demo/buffet_helper.py
new file mode 100644
index 0000000..fa29aeb
--- /dev/null
+++ b/orchestrator/demo/buffet_helper.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+import os
+import sys
+import yaml
+
+from hierarchy import parse_hierarchy
+
+
+def main():
+  if len(sys.argv) != 2:
+    print('usage: %s target' % sys.argv[0])
+    exit(1)
+
+  args = sys.argv[1].split('-')
+  if len(args) != 2:
+    print('target format: {target}-{variant}')
+    exit(1)
+
+  target, variant = args
+
+  if variant not in ['eng', 'user', 'userdebug']:
+    print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
+          variant)
+    exit(1)
+
+  build_top = os.getenv('BUFFET_BUILD_TOP')
+  if not build_top:
+    print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
+    exit(1)
+
+  hierarchy_map = parse_hierarchy(build_top)
+
+  if target not in hierarchy_map:
+    raise RuntimeError(
+        "unknown target '%s': couldn't find the target. Supported targets are: %s"
+        % (target, list(hierarchy_map.keys())))
+
+  hierarchy = [target]
+  while hierarchy_map[hierarchy[-1]]:
+    hierarchy.append(hierarchy_map[hierarchy[-1]])
+
+  print('Target hierarchy for %s: %s' % (target, hierarchy))
+
+
+if __name__ == '__main__':
+  main()
diff --git a/orchestrator/demo/build_helper.py b/orchestrator/demo/build_helper.py
new file mode 100644
index 0000000..c481f80
--- /dev/null
+++ b/orchestrator/demo/build_helper.py
@@ -0,0 +1,367 @@
+#!/usr/bin/env python3
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import copy
+import hierarchy
+import json
+import logging
+import filecmp
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import collect_metadata
+import utils
+
+BUILD_CMD_TO_ALL = (
+  'clean',
+  'installclean',
+  'update-meta',
+)
+BUILD_ALL_EXEMPTION = (
+  'art',
+)
+
+def get_supported_product(ctx, supported_products):
+  hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
+  target = ctx.target_product()
+
+  while target not in supported_products:
+    if target not in hierarchy_map:
+      return None
+    target = hierarchy_map[target]
+  return target
+
+
+def parse_goals(ctx, metadata, goals):
+  """Parse goals and returns a map from each component to goals.
+
+    e.g.
+
+    "m main art timezone:foo timezone:bar" will return the following dict: {
+        "main": {"all"},
+        "art": {"all"},
+        "timezone": {"foo", "bar"},
+    }
+  """
+  # for now, goal should look like:
+  # {component} or {component}:{subgoal}
+
+  ret = collections.defaultdict(set)
+
+  for goal in goals:
+    # check if the command is for all components
+    if goal in BUILD_CMD_TO_ALL:
+      ret['all'].add(goal)
+      continue
+
+    # should be {component} or {component}:{subgoal}
+    try:
+      component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
+    except ValueError:
+      raise RuntimeError(
+          'unknown goal: %s: should be {component} or {component}:{subgoal}' %
+          goal)
+    if component not in metadata:
+      raise RuntimeError('unknown goal: %s: component %s not found' %
+                         (goal, component))
+    if not get_supported_product(ctx, metadata[component]['lunch_targets']):
+      raise RuntimeError("can't find matching target. Supported targets are: " +
+                         str(metadata[component]['lunch_targets']))
+
+    ret[component].add(subgoal)
+
+  return ret
+
+
+def find_cycle(metadata):
+  """ Finds a cyclic dependency among components.
+
+  This is for debugging.
+  """
+  visited = set()
+  parent_node = dict()
+  in_stack = set()
+
+  # Returns a cycle if one is found
+  def dfs(node):
+    # visit_order[visit_time[node] - 1] == node
+    nonlocal visited, parent_node, in_stack
+
+    visited.add(node)
+    in_stack.add(node)
+    if 'deps' not in metadata[node]:
+      in_stack.remove(node)
+      return None
+    for next in metadata[node]['deps']:
+      # We found a cycle (next ~ node) if next is still in the stack
+      if next in in_stack:
+        cycle = [node]
+        while cycle[-1] != next:
+          cycle.append(parent_node[cycle[-1]])
+        return cycle
+
+      # Else, continue searching
+      if next in visited:
+        continue
+
+      parent_node[next] = node
+      result = dfs(next)
+      if result:
+        return result
+
+    in_stack.remove(node)
+    return None
+
+  for component in metadata:
+    if component in visited:
+      continue
+
+    result = dfs(component)
+    if result:
+      return result
+
+  return None
+
+
+def topological_sort_components(metadata):
+  """ Performs topological sort on components.
+
+  If A depends on B, B appears first.
+  """
+  # If A depends on B, we want B to appear before A. But the graph in metadata
+  # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
+  # reverse order, and then reverse the result again to get the desired order.
+  indegree = collections.defaultdict(int)
+  for component in metadata:
+    if 'deps' not in metadata[component]:
+      continue
+    for dep in metadata[component]['deps']:
+      indegree[dep] += 1
+
+  component_queue = collections.deque()
+  for component in metadata:
+    if indegree[component] == 0:
+      component_queue.append(component)
+
+  result = []
+  while component_queue:
+    component = component_queue.popleft()
+    result.append(component)
+    if 'deps' not in metadata[component]:
+      continue
+    for dep in metadata[component]['deps']:
+      indegree[dep] -= 1
+      if indegree[dep] == 0:
+        component_queue.append(dep)
+
+  # If topological sort fails, there must be a cycle.
+  if len(result) != len(metadata):
+    cycle = find_cycle(metadata)
+    raise RuntimeError('circular dependency found among metadata: %s' % cycle)
+
+  return result[::-1]
+
+
+def add_dependency_goals(ctx, metadata, component, goals):
+  """ Adds goals that given component depends on."""
+  # For now, let's just add "all"
+  # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
+  if 'deps' not in metadata[component]:
+    return
+
+  for dep in metadata[component]['deps']:
+    goals[dep].add('all')
+
+
+def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
+  """ Analyzes the dependency graph among components, adds build commands for
+
+  dependencies, and then sorts the goals.
+
+  Returns a list of tuples: (component_name, set of subgoals).
+  Builds should be run in the list's order.
+  """
+  # TODO(inseob@): after topological sort, some components may be built in
+  # parallel.
+
+  topological_order = topological_sort_components(metadata)
+  combined_goals = copy.deepcopy(parsed_goals)
+
+  # Add build rules for each component's dependencies
+  # We do this in reverse order, so it can be transitive.
+  # e.g. if A depends on B and B depends on C, and we build A,
+  # C should also be built, in addition to B.
+  for component in topological_order[::-1]:
+    if component in combined_goals:
+      add_dependency_goals(ctx, metadata, component, combined_goals)
+
+  ret = []
+  for component in ['all'] + topological_order:
+    if component in combined_goals:
+      ret.append((component, combined_goals[component]))
+
+  return ret
+
+
+def run_build(ctx, metadata, component, subgoals):
+  build_cmd = metadata[component]['build_cmd']
+  out_dir = metadata[component]['out_dir']
+  default_goals = ''
+  if 'default_goals' in metadata[component]:
+    default_goals = metadata[component]['default_goals']
+
+  if 'all' in subgoals:
+    goal = default_goals
+  else:
+    goal = ' '.join(subgoals)
+
+  build_vars = ''
+  if 'update-meta' in subgoals:
+    build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
+  # TODO(inseob@): shell escape
+  cmd = [
+      '/bin/bash', '-c',
+      'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
+      (get_supported_product(ctx, metadata[component]['lunch_targets']),
+       ctx.target_build_variant(), build_vars, build_cmd, goal)
+  ]
+  logging.debug('cwd: ' + metadata[component]['path'])
+  logging.debug('running build: ' + str(cmd))
+
+  subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
+
+
+def run_build_all(ctx, metadata, subgoals):
+  for component in metadata:
+    if component in BUILD_ALL_EXEMPTION:
+      continue
+    run_build(ctx, metadata, component, subgoals)
+
+
+def find_components(metadata, predicate):
+  for component in metadata:
+    if predicate(component):
+      yield component
+
+
+def import_filegroups(metadata, component, exporting_component, target_file_pairs):
+  imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
+
+  bp_content = ''
+  for name, outpaths in target_file_pairs:
+    bp_content += ('filegroup {{\n'
+                   '    name: "{fname}",\n'
+                   '    srcs: [\n'.format(fname=name))
+    for outpath in outpaths:
+      bp_content += '        "{outfile}",\n'.format(outfile=os.path.basename(outpath))
+    bp_content += ('    ],\n'
+                   '}\n')
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+      with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
+        fout.write(bp_content)
+      for _, outpaths in target_file_pairs:
+        for outpath in outpaths:
+          os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
+                    os.path.join(tmp_dir, os.path.basename(outpath)))
+      cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
+      if os.path.exists(imported_filegroup_dir) and len(
+          cmp_result.left_only) + len(cmp_result.right_only) + len(
+              cmp_result.diff_files) == 0:
+        # Files are identical, it doesn't need to be written
+        logging.info(
+            'imported files exists and the contents are identical: {} -> {}'
+            .format(component, exporting_component))
+        continue
+      logging.info('creating symlinks for imported files: {} -> {}'.format(
+          component, exporting_component))
+      os.makedirs(imported_filegroup_dir, exist_ok=True)
+      shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
+      shutil.move(tmp_dir, imported_filegroup_dir)
+
+
+def prepare_build(metadata, component):
+  imported_dir = os.path.join(metadata[component]['path'], 'imported')
+  if utils.META_DEPS not in metadata[component]:
+    if os.path.exists(imported_dir):
+      logging.debug('remove {}'.format(imported_dir))
+      shutil.rmtree(imported_dir)
+    return
+
+  imported_components = set()
+  for exp_comp in metadata[component][utils.META_DEPS]:
+    if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
+      filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
+      target_file_pairs = []
+      for name in filegroups:
+        target_file_pairs.append((name, filegroups[name]))
+      import_filegroups(metadata, component, exp_comp, target_file_pairs)
+      imported_components.add(exp_comp)
+
+  # Remove directories that are not generated this time.
+  if os.path.exists(imported_dir):
+    if len(imported_components) == 0:
+      shutil.rmtree(imported_dir)
+    else:
+      for remove_target in set(os.listdir(imported_dir)) - imported_components:
+        logging.info('remove unnecessary imported dir: {}'.format(remove_target))
+        shutil.rmtree(os.path.join(imported_dir, remove_target))
+
+
+def main():
+  utils.set_logging_config(logging.DEBUG)
+  ctx = utils.get_build_context()
+
+  logging.info('collecting metadata')
+
+  utils.set_logging_config(True)
+
+  goals = sys.argv[1:]
+  if not goals:
+    logging.debug('empty goals. defaults to main')
+    goals = ['main']
+
+  logging.debug('goals: ' + str(goals))
+
+  # Force update the metadata for the 'update-meta' build
+  metadata_collector = collect_metadata.MetadataCollector(
+      ctx.components_top(), ctx.out_dir(),
+      collect_metadata.COMPONENT_METADATA_DIR,
+      collect_metadata.COMPONENT_METADATA_FILE,
+      force_update='update-meta' in goals)
+  metadata_collector.collect()
+
+  metadata = metadata_collector.get_metadata()
+  logging.debug('metadata: ' + str(metadata))
+
+  parsed_goals = parse_goals(ctx, metadata, goals)
+  logging.debug('parsed goals: ' + str(parsed_goals))
+
+  sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
+  logging.debug('sorted goals with deps: ' + str(sorted_goals))
+
+  for component, subgoals in sorted_goals:
+    if component == 'all':
+      run_build_all(ctx, metadata, subgoals)
+      continue
+    prepare_build(metadata, component)
+    run_build(ctx, metadata, component, subgoals)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/orchestrator/demo/collect_metadata.py b/orchestrator/demo/collect_metadata.py
new file mode 100755
index 0000000..148167d
--- /dev/null
+++ b/orchestrator/demo/collect_metadata.py
@@ -0,0 +1,428 @@
+#!/usr/bin/env python3
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import copy
+import json
+import logging
+import os
+import sys
+import yaml
+from collections import defaultdict
+from typing import (
+  List,
+  Set,
+)
+
+import utils
+
+# SKIP_COMPONENT_SEARCH = (
+#    'tools',
+# )
+COMPONENT_METADATA_DIR = '.repo'
+COMPONENT_METADATA_FILE = 'treeinfo.yaml'
+GENERATED_METADATA_FILE = 'metadata.json'
+COMBINED_METADATA_FILENAME = 'multitree_meta.json'
+
+
+class Dep(object):
+  def __init__(self, name, component, deps_type):
+    self.name = name
+    self.component = component
+    self.type = deps_type
+    self.out_paths = list()
+
+
+class ExportedDep(Dep):
+  def __init__(self, name, component, deps_type):
+    super().__init__(name, component, deps_type)
+
+  def setOutputPaths(self, output_paths: list):
+    self.out_paths = output_paths
+
+
+class ImportedDep(Dep):
+  required_type_map = {
+    # import type: (required type, get imported module list)
+    utils.META_FILEGROUP: (utils.META_MODULES, True),
+  }
+
+  def __init__(self, name, component, deps_type, import_map):
+    super().__init__(name, component, deps_type)
+    self.exported_deps: Set[ExportedDep] = set()
+    self.imported_modules: List[str] = list()
+    self.required_type = deps_type
+    get_imported_module = False
+    if deps_type in ImportedDep.required_type_map:
+      self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
+    if get_imported_module:
+      self.imported_modules = import_map[name]
+    else:
+      self.imported_modules.append(name)
+
+  def verify_and_add(self, exported: ExportedDep):
+    if self.required_type != exported.type:
+      raise RuntimeError(
+          '{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
+          .format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
+    self.exported_deps.add(exported)
+    self.out_paths.extend(exported.out_paths)
+    # Remove duplicates. We may not use set() which is not JSON serializable
+    self.out_paths = list(dict.fromkeys(self.out_paths))
+
+
+class MetadataCollector(object):
+  """Visit all component directories and collect the metadata from them.
+
+Example of metadata:
+==========
+build_cmd: m    # build command for this component. 'm' if omitted
+out_dir: out    # out dir of this component. 'out' if omitted
+exports:
+  libraries:
+    - name: libopenjdkjvm
+    - name: libopenjdkjvmd
+      build_cmd: mma      # build command for libopenjdkjvmd if specified
+      out_dir: out/soong  # out dir for libopenjdkjvmd if specified
+    - name: libctstiagent
+  APIs:
+    - api1
+    - api2
+imports:
+  libraries:
+    - lib1
+    - lib2
+  APIs:
+    - import_api1
+    - import_api2
+lunch_targets:
+  - arm64
+  - x86_64
+"""
+
+  def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
+    if not os.path.exists(out_dir):
+      os.makedirs(out_dir)
+
+    self.__component_top = component_top
+    self.__out_dir = out_dir
+    self.__metadata_path = os.path.join(meta_dir, meta_file)
+    self.__combined_metadata_path = os.path.join(self.__out_dir,
+                                                 COMBINED_METADATA_FILENAME)
+    self.__force_update = force_update
+
+    self.__metadata = dict()
+    self.__map_exports = dict()
+    self.__component_set = set()
+
+  def collect(self):
+    """ Read precomputed combined metadata from the json file.
+
+    If any components have updated their metadata, update the metadata
+    information and the json file.
+    """
+    timestamp = self.__restore_metadata()
+    if timestamp and os.path.getmtime(__file__) > timestamp:
+      logging.info('Update the metadata as the orchestrator has been changed')
+      self.__force_update = True
+    self.__collect_from_components(timestamp)
+
+  def get_metadata(self):
+    """ Returns collected metadata from all components"""
+    if not self.__metadata:
+      logging.warning('Metadata is empty')
+    return copy.deepcopy(self.__metadata)
+
+  def __collect_from_components(self, timestamp):
+    """ Read metadata from all components
+
+    If any components have newer metadata files or are removed, update the
+    combined metadata.
+    """
+    metadata_updated = False
+    for component in os.listdir(self.__component_top):
+      # if component in SKIP_COMPONENT_SEARCH:
+      #     continue
+      if self.__read_component_metadata(timestamp, component):
+        metadata_updated = True
+      if self.__read_generated_metadata(timestamp, component):
+        metadata_updated = True
+
+    deleted_components = set()
+    for meta in self.__metadata:
+      if meta not in self.__component_set:
+        logging.info('Component {} is removed'.format(meta))
+        deleted_components.add(meta)
+        metadata_updated = True
+    for meta in deleted_components:
+      del self.__metadata[meta]
+
+    if metadata_updated:
+      self.__update_dependencies()
+      self.__store_metadata()
+      logging.info('Metadata updated')
+
+  def __read_component_metadata(self, timestamp, component):
+    """ Search for the metadata file from a component.
+
+    If the metadata is modified, read the file and update the metadata.
+    """
+    component_path = os.path.join(self.__component_top, component)
+    metadata_file = os.path.join(component_path, self.__metadata_path)
+    logging.info(
+        'Reading a metadata file from {} component ...'.format(component))
+    if not os.path.isfile(metadata_file):
+      logging.warning('Metadata file {} not found!'.format(metadata_file))
+      return False
+
+    self.__component_set.add(component)
+    if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
+      logging.info('... yaml not changed. Skip')
+      return False
+
+    with open(metadata_file) as f:
+      meta = yaml.load(f, Loader=yaml.SafeLoader)
+
+    meta['path'] = component_path
+    if utils.META_BUILDCMD not in meta:
+      meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
+    if utils.META_OUTDIR not in meta:
+      meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
+
+    if utils.META_IMPORTS not in meta:
+      meta[utils.META_IMPORTS] = defaultdict(dict)
+    if utils.META_EXPORTS not in meta:
+      meta[utils.META_EXPORTS] = defaultdict(dict)
+
+    self.__metadata[component] = meta
+    return True
+
+  def __read_generated_metadata(self, timestamp, component):
+    """ Read a metadata gerated by 'update-meta' build command from the soong build system
+
+    Soong generate the metadata that has the information of import/export module/files.
+    Build orchestrator read the generated metadata to collect the dependency information.
+
+    Generated metadata has the following format:
+    {
+      "Imported": {
+        "FileGroups": {
+          "<name_of_filegroup>": [
+            "<exported_module_name>",
+            ...
+          ],
+          ...
+        }
+      }
+      "Exported": {
+        "<exported_module_name>": [
+          "<output_file_path>",
+          ...
+        ],
+        ...
+      }
+    }
+    """
+    if component not in self.__component_set:
+      # skip reading generated metadata if the component metadata file was missing
+      return False
+    component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
+    generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
+    if not os.path.isfile(generated_metadata_file):
+      logging.info('... Soong did not generated the metadata file. Skip')
+      return False
+    if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
+      logging.info('... Soong generated metadata not changed. Skip')
+      return False
+
+    with open(generated_metadata_file, 'r') as gen_meta_json:
+      try:
+        gen_metadata = json.load(gen_meta_json)
+      except json.decoder.JSONDecodeError:
+        logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
+            generated_metadata_file))
+        return False
+
+    if utils.SOONG_IMPORTED in gen_metadata:
+      imported = gen_metadata[utils.SOONG_IMPORTED]
+      if utils.SOONG_IMPORTED_FILEGROUPS in imported:
+        self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
+    if utils.SOONG_EXPORTED in gen_metadata:
+      self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
+
+    return True
+
+  def __update_export_map(self):
+    """ Read metadata of all components and update the export map
+
+    'libraries' and 'APIs' are special exproted types that are provided manually
+    from the .yaml metadata files. These need to be replaced with the implementation
+    in soong gerated metadata.
+    The export type 'module' is generated from the soong build system from the modules
+    with 'export: true' property. This export type includes a dictionary with module
+    names as keys and their output files as values. These output files will be used as
+    prebuilt sources when generating the imported modules.
+    """
+    self.__map_exports = dict()
+    for comp in self.__metadata:
+      if utils.META_EXPORTS not in self.__metadata[comp]:
+        continue
+      exports = self.__metadata[comp][utils.META_EXPORTS]
+
+      for export_type in exports:
+        for module in exports[export_type]:
+          if export_type == utils.META_LIBS:
+            name = module[utils.META_LIB_NAME]
+          else:
+            name = module
+
+          if name in self.__map_exports:
+            raise RuntimeError(
+                'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
+                .format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
+          exported_deps = ExportedDep(name, comp, export_type)
+          if export_type == utils.META_MODULES:
+            exported_deps.setOutputPaths(exports[export_type][module])
+          self.__map_exports[name] = exported_deps
+
+  def __verify_and_add_dependencies(self, component):
+    """ Search all imported items from the export_map.
+
+    If any imported items are not provided by the other components, report
+    an error.
+    Otherwise, add the component dependency and update the exported information to the
+    import maps.
+    """
+    def verify_and_add_dependencies(imported_dep: ImportedDep):
+      for module in imported_dep.imported_modules:
+        if module not in self.__map_exports:
+          raise RuntimeError(
+              'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
+              .format(module=module, comp=imported_dep.component))
+        imported_dep.verify_and_add(self.__map_exports[module])
+
+        deps = self.__metadata[component][utils.META_DEPS]
+        exp_comp = self.__map_exports[module].component
+        if exp_comp not in deps:
+          deps[exp_comp] = defaultdict(defaultdict)
+        deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
+
+    self.__metadata[component][utils.META_DEPS] = defaultdict()
+    imports = self.__metadata[component][utils.META_IMPORTS]
+    for import_type in imports:
+      for module in imports[import_type]:
+        verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
+
+  def __check_imports(self):
+    """ Search the export map to find the component to import libraries or APIs.
+
+    Update the 'deps' field that includes the dependent components.
+    """
+    for component in self.__metadata:
+      self.__verify_and_add_dependencies(component)
+      if utils.META_DEPS in self.__metadata[component]:
+        logging.debug('{comp} depends on {list} components'.format(
+            comp=component, list=self.__metadata[component][utils.META_DEPS]))
+
+  def __update_dependencies(self):
+    """ Generate a dependency graph for the components
+
+    Update __map_exports and the dependency graph with the maps.
+    """
+    self.__update_export_map()
+    self.__check_imports()
+
+  def __store_metadata(self):
+    """ Store the __metadata dictionary as json format"""
+    with open(self.__combined_metadata_path, 'w') as json_file:
+      json.dump(self.__metadata, json_file, indent=2)
+
+  def __restore_metadata(self):
+    """ Read the stored json file and return the time stamps of the
+
+        metadata file.
+        """
+    if not os.path.exists(self.__combined_metadata_path):
+      return None
+
+    with open(self.__combined_metadata_path, 'r') as json_file:
+      try:
+        self.__metadata = json.load(json_file)
+      except json.decoder.JSONDecodeError:
+        logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
+            self.__combined_metadata_path))
+        return None
+
+    logging.info('Metadata restored from {}'.format(
+        self.__combined_metadata_path))
+    self.__update_export_map()
+    return os.path.getmtime(self.__combined_metadata_path)
+
+
+def get_args():
+
+  def check_dir(path):
+    if os.path.exists(path) and os.path.isdir(path):
+      return os.path.normpath(path)
+    else:
+      raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
+
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--component-top',
+      help='Scan all components under this directory.',
+      default=os.path.join(os.path.dirname(__file__), '../../../components'),
+      type=check_dir)
+  parser.add_argument(
+      '--meta-file',
+      help='Name of the metadata file.',
+      default=COMPONENT_METADATA_FILE,
+      type=str)
+  parser.add_argument(
+      '--meta-dir',
+      help='Each component has the metadata in this directory.',
+      default=COMPONENT_METADATA_DIR,
+      type=str)
+  parser.add_argument(
+      '--out-dir',
+      help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
+      default=os.path.join(os.path.dirname(__file__), '../../../out'),
+      type=os.path.normpath)
+  parser.add_argument(
+      '--force',
+      '-f',
+      action='store_true',
+      help='Force to collect metadata',
+  )
+  parser.add_argument(
+      '--verbose',
+      '-v',
+      help='Increase output verbosity, e.g. "-v", "-vv".',
+      action='count',
+      default=0)
+  return parser.parse_args()
+
+
+def main():
+  args = get_args()
+  utils.set_logging_config(args.verbose)
+
+  metadata_collector = MetadataCollector(args.component_top, args.out_dir,
+                                         args.meta_dir, args.meta_file, args.force)
+  metadata_collector.collect()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/orchestrator/demo/envsetup.sh b/orchestrator/demo/envsetup.sh
new file mode 100644
index 0000000..902a37c
--- /dev/null
+++ b/orchestrator/demo/envsetup.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+function buffet()
+{
+    local product variant selection
+    if [[ $# -ne 1 ]]; then
+        echo "usage: buffet [target]" >&2
+        return 1
+    fi
+
+    selection=$1
+    product=${selection%%-*} # Trim everything after first dash
+    variant=${selection#*-} # Trim everything up to first dash
+
+    if [ -z "$product" ]
+    then
+        echo
+        echo "Invalid lunch combo: $selection"
+        return 1
+    fi
+
+    if [ -z "$variant" ]
+    then
+        if [[ "$product" =~ .*_(eng|user|userdebug) ]]
+        then
+            echo "Did you mean -${product/*_/}? (dash instead of underscore)"
+        fi
+        return 1
+    fi
+
+    BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
+
+    export BUFFET_BUILD_TOP=$(pwd)
+    export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
+    export BUFFET_TARGET_PRODUCT=$product
+    export BUFFET_TARGET_BUILD_VARIANT=$variant
+    export BUFFET_TARGET_BUILD_TYPE=release
+}
+
+function m()
+{
+    if [ -z "$BUFFET_BUILD_TOP" ]
+    then
+        echo "Run \"buffet [target]\" first"
+        return 1
+    fi
+    python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
+}
diff --git a/orchestrator/demo/hierarchy.py b/orchestrator/demo/hierarchy.py
new file mode 100644
index 0000000..ae1825c
--- /dev/null
+++ b/orchestrator/demo/hierarchy.py
@@ -0,0 +1,79 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import yaml
+
+
+def parse_hierarchy(build_top):
+  """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
+
+  Example of hierarchy file:
+  ==========
+  aosp_arm64:
+  - armv8
+  - aosp_cf_arm64_phone
+
+  armv8:
+  - aosp_oriole
+  - aosp_sunfish
+
+  aosp_oriole:
+  - oriole
+
+  aosp_sunfish:
+  - sunfish
+
+  oriole:
+  # leaf
+
+  sunfish:
+  # leaf
+  ==========
+
+  If we parse this yaml, we get a dict looking like:
+
+  {
+      "sunfish": "aosp_sunfish",
+      "oriole": "aosp_oriole",
+      "aosp_oriole": "armv8",
+      "aosp_sunfish": "armv8",
+      "armv8": "aosp_arm64",
+      "aosp_cf_arm64_phone": "aosp_arm64",
+      "aosp_arm64": None, # no parent
+  }
+  """
+  metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
+  if not os.path.isfile(metadata_path):
+    raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
+
+  with open(metadata_path, 'r') as f:
+    hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
+
+  hierarchy_map = dict()
+
+  for parent_target, child_targets in hierarchy_yaml.items():
+    if not child_targets:
+      # leaf
+      continue
+    for child_target in child_targets:
+      hierarchy_map[child_target] = parent_target
+
+  for parent_target in hierarchy_yaml:
+    # targets with no parent
+    if parent_target not in hierarchy_map:
+      hierarchy_map[parent_target] = None
+
+  return hierarchy_map
diff --git a/orchestrator/demo/hierarchy.yaml b/orchestrator/demo/hierarchy.yaml
new file mode 100644
index 0000000..cc6de4d
--- /dev/null
+++ b/orchestrator/demo/hierarchy.yaml
@@ -0,0 +1,37 @@
+# hierarchy of targets
+
+aosp_arm64:
+- armv8
+- aosp_cf_arm64_phone
+
+armv8:
+- mainline_modules_arm64
+
+mainline_modules_arm64:
+- aosp_oriole
+- aosp_sunfish
+- aosp_raven
+
+aosp_oriole:
+- oriole
+
+aosp_sunfish:
+- sunfish
+
+aosp_raven:
+- raven
+
+oriole:
+# leaf
+
+sunfish:
+# leaf
+
+raven:
+# leaf
+
+aosp_cf_arm64_phone:
+- cf_arm64_phone
+
+cf_arm64_phone:
+# leaf
diff --git a/orchestrator/demo/utils.py b/orchestrator/demo/utils.py
new file mode 100644
index 0000000..5dbbe4a
--- /dev/null
+++ b/orchestrator/demo/utils.py
@@ -0,0 +1,89 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import logging
+import os
+
+# default build configuration for each component
+DEFAULT_BUILDCMD = 'm'
+DEFAULT_OUTDIR = 'out'
+
+# yaml fields
+META_BUILDCMD = 'build_cmd'
+META_OUTDIR = 'out_dir'
+META_EXPORTS = 'exports'
+META_IMPORTS = 'imports'
+META_TARGETS = 'lunch_targets'
+META_DEPS = 'deps'
+# fields under 'exports' and 'imports'
+META_LIBS = 'libraries'
+META_APIS = 'APIs'
+META_FILEGROUP = 'filegroup'
+META_MODULES = 'modules'
+# fields under 'libraries'
+META_LIB_NAME = 'name'
+
+# fields for generated metadata file
+SOONG_IMPORTED = 'Imported'
+SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
+SOONG_EXPORTED = 'Exported'
+
+# export map items
+EXP_COMPONENT = 'component'
+EXP_TYPE = 'type'
+EXP_OUTPATHS = 'outpaths'
+
+class BuildContext:
+
+  def __init__(self):
+    self._build_top = os.getenv('BUFFET_BUILD_TOP')
+    self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
+    self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
+    self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
+    self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
+    self._out_dir = os.path.join(self._build_top, 'out')
+
+    if not self._build_top:
+      raise RuntimeError("Can't find root. Did you run buffet?")
+
+  def build_top(self):
+    return self._build_top
+
+  def components_top(self):
+    return self._components_top
+
+  def target_product(self):
+    return self._target_product
+
+  def target_build_variant(self):
+    return self._target_build_variant
+
+  def target_build_type(self):
+    return self._target_build_type
+
+  def out_dir(self):
+    return self._out_dir
+
+
+def get_build_context():
+  return BuildContext()
+
+
+def set_logging_config(verbose_level):
+  verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
+  verbosity = min(verbose_level, 2)
+  logging.basicConfig(
+      format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
+      level=verbose_map[verbosity])
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
new file mode 100644
index 0000000..382844b
--- /dev/null
+++ b/orchestrator/inner_build/common.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+
+def _parse_arguments(argv):
+    argv = argv[1:]
+    """Return an argparse options object."""
+    # Top-level parser
+    parser = argparse.ArgumentParser(prog=".inner_build")
+
+    parser.add_argument("--out_dir", action="store", required=True,
+            help="root of the output directory for this inner tree's API contributions")
+
+    parser.add_argument("--api_domain", action="append", required=True,
+            help="which API domains are to be built in this inner tree")
+
+    subparsers = parser.add_subparsers(required=True, dest="command",
+            help="subcommands")
+
+    # inner_build describe command
+    describe_parser = subparsers.add_parser("describe",
+            help="describe the capabilities of this inner tree's build system")
+
+    # create the parser for the "b" command
+    export_parser = subparsers.add_parser("export_api_contributions",
+            help="export the API contributions of this inner tree")
+
+    # create the parser for the "b" command
+    export_parser = subparsers.add_parser("analyze",
+            help="main build analysis for this inner tree")
+
+    # Parse the arguments
+    return parser.parse_args(argv)
+
+
+class Commands(object):
+    def Run(self, argv):
+        """Parse the command arguments and call the corresponding subcommand method on
+        this object.
+
+        Throws AttributeError if the method for the command wasn't found.
+        """
+        args = _parse_arguments(argv)
+        return getattr(self, args.command)(args)
+
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
new file mode 100755
index 0000000..264739b
--- /dev/null
+++ b/orchestrator/inner_build/inner_build_demo.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import textwrap
+
+sys.dont_write_bytecode = True
+import common
+
+def mkdirs(path):
+    try:
+        os.makedirs(path)
+    except FileExistsError:
+        pass
+
+
+class InnerBuildSoong(common.Commands):
+    def describe(self, args):
+        mkdirs(args.out_dir)
+
+        with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
+            f.write(textwrap.dedent("""\
+            {
+                "requires_ninja": true,
+                "orchestrator_protocol_version": 1
+            }"""))
+
+    def export_api_contributions(self, args):
+        contributions_dir = os.path.join(args.out_dir, "api_contributions")
+        mkdirs(contributions_dir)
+
+        if "system" in args.api_domain:
+            with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
+                # 'name: android' is android.jar
+                f.write(textwrap.dedent("""\
+                {
+                    "name": "api_a",
+                    "version": 1,
+                    "api_domain": "system",
+                    "cc_libraries": [
+                        {
+                            "name": "libhello1",
+                            "headers": [
+                                {
+                                    "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+                                    "files": [
+                                        "hello1.h"
+                                    ]
+                                }
+                            ],
+                            "api": [
+                                "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
+                            ]
+                        }
+                    ]
+                }"""))
+
+    def analyze(self, args):
+        if "system" in args.api_domain:
+            # Nothing to export in this demo
+            # Write a fake inner_tree.ninja; what the inner tree would have generated
+            with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
+                # TODO: Note that this uses paths relative to the workspace not the iner tree
+                # for demo purposes until we get the ninja chdir change in.
+                f.write(textwrap.dedent("""\
+                    rule compile_c
+                        command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
+                    rule link_so
+                        command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
+                    build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+                        out_dir = %(OUT_DIR)s/libhello1
+                        cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
+                    build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
+                        out_dir = %(OUT_DIR)s/libhello1
+                    build system: phony %(OUT_DIR)s/libhello1/libhello1.so
+                """ % { "OUT_DIR": args.out_dir }))
+            with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
+                f.write(textwrap.dedent("""\
+                {
+                    "staging": [
+                        {
+                            "dest": "staging/system/lib/libhello1.so",
+                            "obj": "libhello1/libhello1.so"
+                        }
+                    ]
+                }""" % { "OUT_DIR": args.out_dir }))
+
+def main(argv):
+    return InnerBuildSoong().Run(argv)
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
+
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/inner_build/inner_build_soong.py b/orchestrator/inner_build/inner_build_soong.py
new file mode 100755
index 0000000..a653dcc
--- /dev/null
+++ b/orchestrator/inner_build/inner_build_soong.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+
+sys.dont_write_bytecode = True
+import common
+
+class InnerBuildSoong(common.Commands):
+    def describe(self, args):
+        pass
+
+
+    def export_api_contributions(self, args):
+        pass
+
+
+def main(argv):
+    return InnerBuildSoong().Run(argv)
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
diff --git a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
new file mode 100644
index 0000000..0790226
--- /dev/null
+++ b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
@@ -0,0 +1,16 @@
+{
+    "lunchable": true,
+    "system": {
+        "tree": "master",
+        "product": "aosp_cf_arm64_phone"
+    },
+    "vendor": {
+        "tree": "master",
+        "product": "aosp_cf_arm64_phone"
+    },
+    "modules": {
+        "com.android.bionic": {
+            "tree": "sc-mainline-prod"
+        }
+    }
+}
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
new file mode 100644
index 0000000..df97b68
--- /dev/null
+++ b/orchestrator/ninja/ninja_syntax.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABC, abstractmethod
+
+from collections.abc import Iterator
+from typing import List
+
+TAB = "  "
+
+class Node(ABC):
+  '''An abstract class that can be serialized to a ninja file
+  All other ninja-serializable classes inherit from this class'''
+
+  @abstractmethod
+  def stream(self) -> Iterator[str]:
+    pass
+
+class Variable(Node):
+  '''A ninja variable that can be reused across build actions
+  https://ninja-build.org/manual.html#_variables'''
+
+  def __init__(self, name:str, value:str, indent=0):
+    self.name = name
+    self.value = value
+    self.indent = indent
+
+  def stream(self) -> Iterator[str]:
+    indent = TAB * self.indent
+    yield f"{indent}{self.name} = {self.value}"
+
+class RuleException(Exception):
+  pass
+
+# Ninja rules recognize a limited set of variables
+# https://ninja-build.org/manual.html#ref_rule
+# Keep this list sorted
+RULE_VARIABLES = ["command",
+                  "depfile",
+                  "deps",
+                  "description",
+                  "dyndep",
+                  "generator",
+                  "msvc_deps_prefix",
+                  "restat",
+                  "rspfile",
+                  "rspfile_content"]
+
+class Rule(Node):
+  '''A shorthand for a command line that can be reused
+  https://ninja-build.org/manual.html#_rules'''
+
+  def __init__(self, name:str):
+    self.name = name
+    self.variables = []
+
+  def add_variable(self, name: str, value: str):
+    if name not in RULE_VARIABLES:
+      raise RuleException(f"{name} is not a recognized variable in a ninja rule")
+
+    self.variables.append(Variable(name=name, value=value, indent=1))
+
+  def stream(self) -> Iterator[str]:
+    self._validate_rule()
+
+    yield f"rule {self.name}"
+    # Yield rule variables sorted by `name`
+    for var in sorted(self.variables, key=lambda x: x.name):
+      # variables yield a single item, next() is sufficient
+      yield next(var.stream())
+
+  def _validate_rule(self):
+    # command is a required variable in a ninja rule
+    self._assert_variable_is_not_empty(variable_name="command")
+
+  def _assert_variable_is_not_empty(self, variable_name: str):
+    if not any(var.name == variable_name for var in self.variables):
+      raise RuleException(f"{variable_name} is required in a ninja rule")
+
+class BuildActionException(Exception):
+  pass
+
+class BuildAction(Node):
+  '''Describes the dependency edge between inputs and output
+  https://ninja-build.org/manual.html#_build_statements'''
+
+  def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
+    self.output = output
+    self.rule = rule
+    self.inputs = self._as_list(inputs)
+    self.implicits = self._as_list(implicits)
+    self.order_only = self._as_list(order_only)
+    self.variables = []
+
+  def add_variable(self, name: str, value: str):
+    '''Variables limited to the scope of this build action'''
+    self.variables.append(Variable(name=name, value=value, indent=1))
+
+  def stream(self) -> Iterator[str]:
+    self._validate()
+
+    build_statement = f"build {self.output}: {self.rule}"
+    if len(self.inputs) > 0:
+      build_statement += " "
+      build_statement += " ".join(self.inputs)
+    if len(self.implicits) > 0:
+      build_statement += " | "
+      build_statement += " ".join(self.implicits)
+    if len(self.order_only) > 0:
+      build_statement += " || "
+      build_statement += " ".join(self.order_only)
+    yield build_statement
+    # Yield variables sorted by `name`
+    for var in sorted(self.variables, key=lambda x: x.name):
+      # variables yield a single item, next() is sufficient
+      yield next(var.stream())
+
+  def _validate(self):
+    if not self.output:
+      raise BuildActionException("Output is required in a ninja build statement")
+    if not self.rule:
+      raise BuildActionException("Rule is required in a ninja build statement")
+
+  def _as_list(self, list_like):
+    if list_like is None:
+      return []
+    if isinstance(list_like, list):
+      return list_like
+    return [list_like]
+
+class Pool(Node):
+  '''https://ninja-build.org/manual.html#ref_pool'''
+
+  def __init__(self, name: str, depth: int):
+    self.name = name
+    self.depth = Variable(name="depth", value=depth, indent=1)
+
+  def stream(self) -> Iterator[str]:
+    yield f"pool {self.name}"
+    yield next(self.depth.stream())
+
+class Subninja(Node):
+
+  def __init__(self, subninja: str, chDir: str):
+    self.subninja = subninja
+    self.chDir = chDir
+
+  # TODO(spandandas): Update the syntax when aosp/2064612 lands
+  def stream(self) -> Iterator[str]:
+    yield f"subninja {self.subninja}"
+
+class Line(Node):
+  '''Generic class that can be used for comments/newlines/default_target etc'''
+
+  def __init__(self, value:str):
+    self.value = value
+
+  def stream(self) -> Iterator[str]:
+    yield self.value
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
new file mode 100644
index 0000000..9e80b4b
--- /dev/null
+++ b/orchestrator/ninja/ninja_writer.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+# TODO: Format the output according to a configurable width variable
+# This will ensure that the generated content fits on a screen and does not
+# require horizontal scrolling
+class Writer:
+
+  def __init__(self, file):
+    self.file = file
+    self.nodes = [] # type Node
+
+  def add_variable(self, variable: Variable):
+    self.nodes.append(variable)
+
+  def add_rule(self, rule: Rule):
+    self.nodes.append(rule)
+
+  def add_build_action(self, build_action: BuildAction):
+    self.nodes.append(build_action)
+
+  def add_pool(self, pool: Pool):
+    self.nodes.append(pool)
+
+  def add_comment(self, comment: str):
+    self.nodes.append(Line(value=f"# {comment}"))
+
+  def add_default(self, default: str):
+    self.nodes.append(Line(value=f"default {default}"))
+
+  def add_newline(self):
+    self.nodes.append(Line(value=""))
+
+  def add_subninja(self, subninja: Subninja):
+    self.nodes.append(subninja)
+
+  def add_phony(self, name, deps):
+    build_action = BuildAction(name, "phony", inputs=deps)
+    self.add_build_action(build_action)
+
+  def write(self):
+    for node in self.nodes:
+      for line in node.stream():
+        print(line, file=self.file)
diff --git a/orchestrator/ninja/test_ninja_syntax.py b/orchestrator/ninja/test_ninja_syntax.py
new file mode 100644
index 0000000..d922fd2
--- /dev/null
+++ b/orchestrator/ninja/test_ninja_syntax.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
+
+class TestVariable(unittest.TestCase):
+
+  def test_assignment(self):
+    variable = Variable(name="key", value="value")
+    self.assertEqual("key = value", next(variable.stream()))
+    variable = Variable(name="key", value="value with spaces")
+    self.assertEqual("key = value with spaces", next(variable.stream()))
+    variable = Variable(name="key", value="$some_other_variable")
+    self.assertEqual("key = $some_other_variable", next(variable.stream()))
+
+  def test_indentation(self):
+    variable = Variable(name="key", value="value", indent=0)
+    self.assertEqual("key = value", next(variable.stream()))
+    variable = Variable(name="key", value="value", indent=1)
+    self.assertEqual("  key = value", next(variable.stream()))
+
+class TestRule(unittest.TestCase):
+
+  def test_rulename_comes_first(self):
+    rule = Rule(name="myrule")
+    rule.add_variable("command", "/bin/bash echo")
+    self.assertEqual("rule myrule", next(rule.stream()))
+
+  def test_command_is_a_required_variable(self):
+    rule = Rule(name="myrule")
+    with self.assertRaises(RuleException):
+      next(rule.stream())
+
+  def test_bad_rule_variable(self):
+    rule = Rule(name="myrule")
+    with self.assertRaises(RuleException):
+      rule.add_variable(name="unrecognize_rule_variable", value="value")
+
+  def test_rule_variables_are_indented(self):
+    rule = Rule(name="myrule")
+    rule.add_variable("command", "/bin/bash echo")
+    stream = rule.stream()
+    self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
+    self.assertEqual("  command = /bin/bash echo", next(stream))
+
+  def test_rule_variables_are_sorted(self):
+    rule = Rule(name="myrule")
+    rule.add_variable("description", "Adding description before command")
+    rule.add_variable("command", "/bin/bash echo")
+    stream = rule.stream()
+    self.assertEqual("rule myrule", next(stream)) # rule always comes first
+    self.assertEqual("  command = /bin/bash echo", next(stream))
+    self.assertEqual("  description = Adding description before command", next(stream))
+
+class TestBuildAction(unittest.TestCase):
+
+  def test_no_inputs(self):
+    build = BuildAction(output="out", rule="phony")
+    stream = build.stream()
+    self.assertEqual("build out: phony", next(stream))
+    # Empty output
+    build = BuildAction(output="", rule="phony")
+    with self.assertRaises(BuildActionException):
+      next(build.stream())
+    # Empty rule
+    build = BuildAction(output="out", rule="")
+    with self.assertRaises(BuildActionException):
+      next(build.stream())
+
+  def test_inputs(self):
+    build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
+    self.assertEqual("build out: cat input1 input2", next(build.stream()))
+    build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
+    self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
+
+  def test_variables(self):
+    build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
+    build.add_variable(name="myvar", value="myval")
+    stream = build.stream()
+    next(stream)
+    self.assertEqual("  myvar = myval", next(stream))
+
+class TestPool(unittest.TestCase):
+
+  def test_pool(self):
+    pool = Pool(name="mypool", depth=10)
+    stream = pool.stream()
+    self.assertEqual("pool mypool", next(stream))
+    self.assertEqual("  depth = 10", next(stream))
+
+if __name__ == "__main__":
+  unittest.main()
diff --git a/orchestrator/ninja/test_ninja_writer.py b/orchestrator/ninja/test_ninja_writer.py
new file mode 100644
index 0000000..703dd4d
--- /dev/null
+++ b/orchestrator/ninja/test_ninja_writer.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from io import StringIO
+
+from ninja_writer import Writer
+from ninja_syntax import Variable, Rule, BuildAction
+
+class TestWriter(unittest.TestCase):
+
+  def test_simple_writer(self):
+    with StringIO() as f:
+      writer = Writer(f)
+      writer.add_variable(Variable(name="cflags", value="-Wall"))
+      writer.add_newline()
+      cc = Rule(name="cc")
+      cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
+      writer.add_rule(cc)
+      writer.add_newline()
+      build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
+      writer.add_build_action(build_action)
+      writer.write()
+      self.assertEqual('''cflags = -Wall
+
+rule cc
+  command = gcc $cflags -c $in -o $out
+
+build foo.o: cc foo.c
+''', f.getvalue())
+
+  def test_comment(self):
+    with StringIO() as f:
+      writer = Writer(f)
+      writer.add_comment("This is a comment in a ninja file")
+      writer.write()
+      self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
+
+if __name__ == "__main__":
+  unittest.main()
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
new file mode 100644
index 0000000..8200dc0
--- /dev/null
+++ b/orchestrator/test_workspace/combo.mcombo
@@ -0,0 +1,17 @@
+{
+    "lunchable": true,
+    "system": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product1"
+    },
+    "vendor": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product2"
+    },
+    "modules": {
+        "module_1": {
+            "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
+        }
+    }
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
new file mode 120000
index 0000000..d8f235f
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/.inner_build
@@ -0,0 +1 @@
+../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
new file mode 100644
index 0000000..1415082
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+#include "hello1.h"
+
+void hello1(void) {
+    printf("hello1");
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
new file mode 100644
index 0000000..0309c1c
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
@@ -0,0 +1,4 @@
+#pragma once
+
+extern "C" void hello1(void);
+
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 05ddfe5..2c2b5a9 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -24,7 +24,7 @@
     android.hidl.manager-V1.0-java \
     android.hidl.memory@1.0-impl \
     android.hidl.memory@1.0-impl.vendor \
-    android.system.suspend@1.0-service \
+    android.system.suspend-service \
     android.test.base \
     android.test.mock \
     android.test.runner \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 851a2cb..5695803 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -76,7 +76,11 @@
     com.android.media:service-media-s \
     com.android.permission:service-permission \
 
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
+# art project.
+ifneq (,$(wildcard art))
+  PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+endif
 
 # List of jars on the platform that system_server loads dynamically using separate classloaders.
 # Keep the list sorted library names.
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index fb0370e..c7dcd60 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,10 +22,7 @@
 # Ramdisk
 PRODUCT_PACKAGES += \
     init_first_stage \
-    e2fsck.ramdisk \
-    fsck.f2fs.ramdisk \
-    tune2fs.ramdisk \
-    snapuserd.ramdisk \
+    snapuserd_ramdisk \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 96d8cc9..fa7e1ad 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,8 +14,11 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_armv7.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 
 PRODUCT_NAME := sdk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 18e08e4..716c8e0 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -38,15 +38,3 @@
 PRODUCT_PACKAGES += \
     snapuserd \
 
-# For dedicated recovery partitions, we need to include snapuserd
-# For GKI devices, BOARD_USES_RECOVERY_AS_BOOT is empty, but
-# so is BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
-ifdef BUILDING_RECOVERY_IMAGE
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-PRODUCT_PACKAGES += \
-    snapuserd.recovery
-endif
-endif
-endif
-
diff --git a/tests/run.rbc b/tests/run.rbc
index 2d35e85..c6dfeba 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -43,7 +43,7 @@
 
 assert_eq("", rblf.mkstrip(" \n \t    "))
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
-assert_eq(1, rblf.mkstrip(1))
+assert_eq("1", rblf.mkstrip("1 "))
 
 assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
 assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
@@ -81,6 +81,19 @@
 assert_eq(cwd+"/foo/bar "+cwd+"/foo/baz", rblf.abspath("foo/bar foo/baz"))
 assert_eq("/baz", rblf.abspath("/../../../../../../../../../../../../../../../../baz"))
 
+assert_eq("foo", rblf.first_word("foo bar"))
+assert_eq("foo", rblf.first_word(["foo", "bar"]))
+assert_eq("", rblf.first_word(""))
+assert_eq("", rblf.first_word([]))
+assert_eq("bar", rblf.last_word("foo bar"))
+assert_eq("bar", rblf.last_word(["foo", "bar"]))
+assert_eq("", rblf.last_word(""))
+assert_eq("", rblf.last_word([]))
+
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo", "bar"]]))
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo"], ["bar"]]))
+assert_eq([], rblf.flatten_2d_list([]))
+
 assert_eq(
     ["build/make/tests/board.rbc", "build/make/tests/board_input_vars.rbc"],
     rblf.expand_wildcard("build/make/tests/board*.rbc")
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index c6e89f0..32afa45 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -20,6 +20,7 @@
 	"os"
 	"os/exec"
 	"path/filepath"
+	"sort"
 	"strings"
 
 	"go.starlark.net/starlark"
@@ -111,19 +112,6 @@
 	return e.globals, e.err
 }
 
-// fileExists returns True if file with given name exists.
-func fileExists(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
-	kwargs []starlark.Tuple) (starlark.Value, error) {
-	var path string
-	if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &path); err != nil {
-		return starlark.None, err
-	}
-	if _, err := os.Stat(path); err != nil {
-		return starlark.False, nil
-	}
-	return starlark.True, nil
-}
-
 // wildcard(pattern, top=None) expands shell's glob pattern. If 'top' is present,
 // the 'top/pattern' is globbed and then 'top/' prefix is removed.
 func wildcard(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
@@ -150,6 +138,10 @@
 			files[i] = strings.TrimPrefix(files[i], prefix)
 		}
 	}
+	// Kati uses glob(3) with no flags, which means it's sorted
+	// because GLOB_NOSORT is not passed. Go's glob is not
+	// guaranteed to sort the results.
+	sort.Strings(files)
 	return makeStringList(files), nil
 }
 
@@ -269,8 +261,6 @@
 		"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
 		"rblf_cli": structFromEnv(env),
 		"rblf_env": structFromEnv(os.Environ()),
-		// To convert makefile's $(wildcard foo)
-		"rblf_file_exists": starlark.NewBuiltin("rblf_file_exists", fileExists),
 		// To convert find-copy-subdir and product-copy-files-by pattern
 		"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
 		// To convert makefile's $(shell cmd)
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 50e39bf..2ee78fc 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -4,9 +4,6 @@
 
 def test():
     myname = "file_ops.star"
-    assert.true(rblf_file_exists("."), "./ exists ")
-    assert.true(rblf_file_exists(myname), "the file %s does exist" % myname)
-    assert.true(not rblf_file_exists("no_such_file"), "the file no_such_file does not exist")
     files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in  %s" % (myname, files))
     files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
index 913fb7d..be04f75 100644
--- a/tools/rbcrun/testdata/module1.star
+++ b/tools/rbcrun/testdata/module1.star
@@ -2,6 +2,6 @@
 load("assert.star", "assert")
 
 # Make sure that builtins are defined for the loaded module, too
-assert.true(rblf_file_exists("module1.star"))
-assert.true(not rblf_file_exists("no_such file"))
+assert.true(rblf_wildcard("module1.star"))
+assert.true(not rblf_wildcard("no_such file"))
 test = "module1"
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3f13a4a..941edc6 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -54,7 +54,7 @@
 class ApexApkSigner(object):
   """Class to sign the apk files and other files in an apex payload image and repack the apex"""
 
-  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None, fsverity_tool=None):
+  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None):
     self.apex_path = apex_path
     if not key_passwords:
       self.key_passwords = dict()
@@ -65,9 +65,8 @@
         OPTIONS.search_path, "bin", "debugfs_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
-    self.fsverity_tool = fsverity_tool if fsverity_tool else "fsverity"
 
-  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None):
+  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
     """Scans and signs the payload files and repack the apex
 
     Args:
@@ -85,14 +84,10 @@
                 self.debugfs_path, 'list', self.apex_path]
     entries_names = common.RunAndCheckOutput(list_cmd).split()
     apk_entries = [name for name in entries_names if name.endswith('.apk')]
-    sepolicy_entries = []
-    if is_sepolicy:
-      sepolicy_entries = [name for name in entries_names if
-          name.startswith('./etc/SEPolicy') and name.endswith('.zip')]
 
     # No need to sign and repack, return the original apex path.
-    if not apk_entries and not sepolicy_entries and self.sign_tool is None:
-      logger.info('No payload (apk or zip) file to sign in %s', self.apex_path)
+    if not apk_entries and self.sign_tool is None:
+      logger.info('No apk file to sign in %s', self.apex_path)
       return self.apex_path
 
     for entry in apk_entries:
@@ -106,16 +101,15 @@
         logger.warning('Apk path does not contain the intended directory name:'
                        ' %s', entry)
 
-    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(apk_entries,
-        apk_keys, payload_key, sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args)
+    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
+        apk_entries, apk_keys, payload_key, signing_args)
     if not has_signed_content:
       logger.info('No contents has been signed in %s', self.apex_path)
       return self.apex_path
 
     return self.RepackApexPayload(payload_dir, payload_key, signing_args)
 
-  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key,
-  sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args):
+  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args):
     """Extracts the payload image and signs the containing apk files."""
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
@@ -147,11 +141,6 @@
           codename_to_api_level_map=self.codename_to_api_level_map)
       has_signed_content = True
 
-    for entry in sepolicy_entries:
-      sepolicy_key = sepolicy_key if sepolicy_key else payload_key
-      self.SignSePolicy(payload_dir, entry, sepolicy_key, sepolicy_cert)
-      has_signed_content = True
-
     if self.sign_tool:
       logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
       # Pass avbtool to the custom signing tool
@@ -165,36 +154,6 @@
 
     return payload_dir, has_signed_content
 
-  def SignSePolicy(self, payload_dir, sepolicy_zip, sepolicy_key, sepolicy_cert):
-    sepolicy_sig = sepolicy_zip + '.sig'
-    sepolicy_fsv_sig = sepolicy_zip + '.fsv_sig'
-
-    policy_zip_path = os.path.join(payload_dir, sepolicy_zip)
-    sig_out_path = os.path.join(payload_dir, sepolicy_sig)
-    sig_old = sig_out_path + '.old'
-    if os.path.exists(sig_out_path):
-      os.rename(sig_out_path, sig_old)
-    sign_cmd = ['openssl', 'dgst', '-sign', sepolicy_key, '-keyform', 'PEM', '-sha256',
-        '-out', sig_out_path, '-binary', policy_zip_path]
-    common.RunAndCheckOutput(sign_cmd)
-    if os.path.exists(sig_old):
-      os.remove(sig_old)
-
-    if not sepolicy_cert:
-      logger.info('No cert provided for SEPolicy, skipping fsverity sign')
-      return
-
-    fsv_sig_out_path = os.path.join(payload_dir, sepolicy_fsv_sig)
-    fsv_sig_old = fsv_sig_out_path + '.old'
-    if os.path.exists(fsv_sig_out_path):
-      os.rename(fsv_sig_out_path, fsv_sig_old)
-
-    fsverity_cmd = [self.fsverity_tool, 'sign', policy_zip_path, fsv_sig_out_path,
-        '--key=' + sepolicy_key, '--cert=' + sepolicy_cert]
-    common.RunAndCheckOutput(fsverity_cmd)
-    if os.path.exists(fsv_sig_old):
-      os.remove(fsv_sig_old)
-
   def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
     """Rebuilds the apex file with the updated payload directory."""
     apex_dir = common.MakeTempDir()
@@ -365,9 +324,7 @@
 
 def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
                          container_pw, apk_keys, codename_to_api_level_map,
-                         no_hashtree, signing_args=None, sign_tool=None,
-                         is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                         fsverity_tool=None):
+                         no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current uncompressed APEX with the given payload/container keys.
 
   Args:
@@ -380,10 +337,6 @@
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
     sign_tool: A tool to sign the contents of the APEX.
-    is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -392,9 +345,8 @@
   # the apex file after signing.
   apk_signer = ApexApkSigner(apex_file, container_pw,
                              codename_to_api_level_map,
-                             avbtool, sign_tool, fsverity_tool)
-  apex_file = apk_signer.ProcessApexFile(
-      apk_keys, payload_key, signing_args, is_sepolicy, sepolicy_key, sepolicy_cert)
+                             avbtool, sign_tool)
+  apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key, signing_args)
 
   # 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
   # payload_key.
@@ -448,9 +400,7 @@
 
 def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
                        container_pw, apk_keys, codename_to_api_level_map,
-                       no_hashtree, signing_args=None, sign_tool=None,
-                       is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                       fsverity_tool=None):
+                       no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current compressed APEX with the given payload/container keys.
 
   Args:
@@ -462,10 +412,6 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
-    is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -492,11 +438,7 @@
       codename_to_api_level_map,
       no_hashtree,
       signing_args,
-      sign_tool,
-      is_sepolicy,
-      sepolicy_key,
-      sepolicy_cert,
-      fsverity_tool)
+      sign_tool)
 
   # 3. Compress signed original apex.
   compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -524,8 +466,7 @@
 
 def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
              apk_keys, codename_to_api_level_map,
-             no_hashtree, signing_args=None, sign_tool=None,
-             is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+             no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current APEX with the given payload/container keys.
 
   Args:
@@ -537,9 +478,6 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -565,11 +503,7 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          sign_tool=sign_tool)
     elif apex_type == 'COMPRESSED':
       return SignCompressedApex(
           avbtool,
@@ -581,11 +515,7 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          sign_tool=sign_tool)
     else:
       # TODO(b/172912232): support signing compressed apex
       raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 7fdf4ba..9567fdc 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -331,6 +331,14 @@
     if compressor:
       build_command.extend(["-z", compressor])
 
+    compress_hints = None
+    if "erofs_default_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_default_compress_hints"]
+    if "erofs_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_compress_hints"]
+    if compress_hints:
+      build_command.extend(["--compress-hints", compress_hints])
+
     build_command.extend(["--mount-point", prop_dict["mount_point"]])
     if target_out:
       build_command.extend(["--product-out", target_out])
@@ -652,6 +660,7 @@
   common_props = (
       "extfs_sparse_flag",
       "erofs_default_compressor",
+      "erofs_default_compress_hints",
       "erofs_pcluster_size",
       "erofs_share_dup_blocks",
       "erofs_sparse_flag",
@@ -706,6 +715,7 @@
       (True, "{}_base_fs_file", "base_fs_file"),
       (True, "{}_disable_sparse", "disable_sparse"),
       (True, "{}_erofs_compressor", "erofs_compressor"),
+      (True, "{}_erofs_compress_hints", "erofs_compress_hints"),
       (True, "{}_erofs_pcluster_size", "erofs_pcluster_size"),
       (True, "{}_erofs_share_dup_blocks", "erofs_share_dup_blocks"),
       (True, "{}_extfs_inode_count", "extfs_inode_count"),
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 5f74e2b..caa4641 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -97,7 +97,6 @@
     self.stash_threshold = 0.8
     self.logfile = None
     self.host_tools = {}
-    self.sepolicy_name = 'sepolicy.apex'
 
 
 OPTIONS = Options()
@@ -455,6 +454,11 @@
     return vabc_enabled
 
   @property
+  def is_android_r(self):
+    system_prop = self.info_dict.get("system.build.prop")
+    return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
+
+  @property
   def is_vabc_xor(self):
     vendor_prop = self.info_dict.get("vendor.build.prop")
     vabc_xor_enabled = vendor_prop and \
@@ -725,7 +729,7 @@
   GZ = 2
 
 
-def _GetRamdiskFormat(info_dict):
+def GetRamdiskFormat(info_dict):
   if info_dict.get('lz4_ramdisks') == 'true':
     ramdisk_format = RamdiskFormat.LZ4
   else:
@@ -834,7 +838,7 @@
 
   # Load recovery fstab if applicable.
   d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
-  ramdisk_format = _GetRamdiskFormat(d)
+  ramdisk_format = GetRamdiskFormat(d)
 
   # Tries to load the build props for all partitions with care_map, including
   # system and vendor.
@@ -1188,10 +1192,14 @@
     return " ".join(sorted(combined))
 
   if (framework_dict.get("use_dynamic_partitions") !=
-          "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
+        "true") or (vendor_dict.get("use_dynamic_partitions") != "true"):
     raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
 
   merged_dict = {"use_dynamic_partitions": "true"}
+  # For keys-value pairs that are the same, copy to merged dict
+  for key in vendor_dict.keys():
+    if key in framework_dict and framework_dict[key] == vendor_dict[key]:
+      merged_dict[key] = vendor_dict[key]
 
   merged_dict["dynamic_partition_list"] = uniq_concat(
       framework_dict.get("dynamic_partition_list", ""),
@@ -1575,7 +1583,7 @@
   img = tempfile.NamedTemporaryFile()
 
   if has_ramdisk:
-    ramdisk_format = _GetRamdiskFormat(info_dict)
+    ramdisk_format = GetRamdiskFormat(info_dict)
     ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
                                ramdisk_format=ramdisk_format)
 
@@ -1856,7 +1864,7 @@
 
   img = tempfile.NamedTemporaryFile()
 
-  ramdisk_format = _GetRamdiskFormat(info_dict)
+  ramdisk_format = GetRamdiskFormat(info_dict)
   ramdisk_img = _MakeRamdisk(sourcedir, ramdisk_format=ramdisk_format)
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index f623ad2..e253b02 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -100,20 +100,16 @@
   has_error = False
 
   # Check that partitions only come from one input.
-  for partition in _FRAMEWORK_PARTITIONS.union(_VENDOR_PARTITIONS):
-    image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
-    in_framework = (
-        any(item.startswith(partition) for item in OPTIONS.framework_item_list)
-        or image_path in OPTIONS.framework_item_list)
-    in_vendor = (
-        any(item.startswith(partition) for item in OPTIONS.vendor_item_list) or
-        image_path in OPTIONS.vendor_item_list)
-    if in_framework and in_vendor:
-      logger.error(
-          'Cannot extract items from %s for both the framework and vendor'
-          ' builds. Please ensure only one merge config item list'
-          ' includes %s.', partition, partition)
-      has_error = True
+  framework_partitions = ItemListToPartitionSet(OPTIONS.framework_item_list)
+  vendor_partitions = ItemListToPartitionSet(OPTIONS.vendor_item_list)
+  from_both = framework_partitions.intersection(vendor_partitions)
+  if from_both:
+    logger.error(
+        'Cannot extract items from the same partition in both the '
+        'framework and vendor builds. Please ensure only one merge config '
+        'item list (or inferred list) includes each partition: %s' %
+        ','.join(from_both))
+    has_error = True
 
   if any([
       key in OPTIONS.framework_misc_info_keys
@@ -131,7 +127,8 @@
 # system partition). The following regex matches this and extracts the
 # partition name.
 
-_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
+_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
+_IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
 
 
 def ItemListToPartitionSet(item_list):
@@ -154,62 +151,89 @@
   partition_set = set()
 
   for item in item_list:
-    partition_match = _PARTITION_ITEM_PATTERN.search(item.strip())
-    partition_tag = partition_match.group(
-        1).lower() if partition_match else None
-
-    if partition_tag:
-      partition_set.add(partition_tag)
+    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+      partition_match = pattern.search(item.strip())
+      if partition_match:
+        partition = partition_match.group(1).lower()
+        # These directories in target-files are not actual partitions.
+        if partition not in ('meta', 'images'):
+          partition_set.add(partition)
 
   return partition_set
 
 
 # Partitions that are grabbed from the framework partial build by default.
 _FRAMEWORK_PARTITIONS = {
-    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm'
-}
-# Partitions that are grabbed from the vendor partial build by default.
-_VENDOR_PARTITIONS = {
-    'vendor', 'odm', 'oem', 'boot', 'vendor_boot', 'recovery',
-    'prebuilt_images', 'radio', 'data', 'vendor_dlkm', 'odm_dlkm'
+    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
+    'vbmeta_system'
 }
 
 
 def InferItemList(input_namelist, framework):
-  item_list = []
+  item_set = set()
 
-  # Some META items are grabbed from partial builds directly.
+  # Some META items are always grabbed from partial builds directly.
   # Others are combined in merge_meta.py.
   if framework:
-    item_list.extend([
+    item_set.update([
         'META/liblz4.so',
         'META/postinstall_config.txt',
         'META/update_engine_config.txt',
         'META/zucchini_config.txt',
     ])
   else:  # vendor
-    item_list.extend([
+    item_set.update([
         'META/kernel_configs.txt',
         'META/kernel_version.txt',
         'META/otakeys.txt',
+        'META/pack_radioimages.txt',
         'META/releasetools.py',
-        'OTA/android-info.txt',
     ])
 
   # Grab a set of items for the expected partitions in the partial build.
-  for partition in (_FRAMEWORK_PARTITIONS if framework else _VENDOR_PARTITIONS):
-    for namelist in input_namelist:
-      if namelist.startswith('%s/' % partition.upper()):
-        fs_config_prefix = '' if partition == 'system' else '%s_' % partition
-        item_list.extend([
-            '%s/*' % partition.upper(),
-            'IMAGES/%s.img' % partition,
-            'IMAGES/%s.map' % partition,
-            'META/%sfilesystem_config.txt' % fs_config_prefix,
-        ])
-        break
+  seen_partitions = []
+  for namelist in input_namelist:
+    if namelist.endswith('/'):
+      continue
 
-  return sorted(item_list)
+    partition = namelist.split('/')[0].lower()
+
+    # META items are grabbed above, or merged later.
+    if partition == 'meta':
+      continue
+
+    if partition == 'images':
+      image_partition, extension = os.path.splitext(os.path.basename(namelist))
+      if image_partition == 'vbmeta':
+        # Always regenerate vbmeta.img since it depends on hash information
+        # from both builds.
+        continue
+      if extension in ('.img', '.map'):
+        # Include image files in IMAGES/* if the partition comes from
+        # the expected set.
+        if (framework and image_partition in _FRAMEWORK_PARTITIONS) or (
+            not framework and image_partition not in _FRAMEWORK_PARTITIONS):
+          item_set.add(namelist)
+      elif not framework:
+        # Include all miscellaneous non-image files in IMAGES/* from
+        # the vendor build.
+        item_set.add(namelist)
+      continue
+
+    # Skip already-visited partitions.
+    if partition in seen_partitions:
+      continue
+    seen_partitions.append(partition)
+
+    if (framework and partition in _FRAMEWORK_PARTITIONS) or (
+        not framework and partition not in _FRAMEWORK_PARTITIONS):
+      fs_config_prefix = '' if partition == 'system' else '%s_' % partition
+      item_set.update([
+          '%s/*' % partition.upper(),
+          'META/%sfilesystem_config.txt' % fs_config_prefix,
+      ])
+
+  return sorted(item_set)
 
 
 def InferFrameworkMiscInfoKeys(input_namelist):
@@ -223,8 +247,8 @@
   ]
 
   for partition in _FRAMEWORK_PARTITIONS:
-    for namelist in input_namelist:
-      if namelist.startswith('%s/' % partition.upper()):
+    for partition_dir in ('%s/' % partition.upper(), 'SYSTEM/%s/' % partition):
+      if partition_dir in input_namelist:
         fs_type_prefix = '' if partition == 'system' else '%s_' % partition
         keys.extend([
             'avb_%s_hashtree_enable' % partition,
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index 1949050..eceb734 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -108,20 +108,27 @@
 
   def test_ItemListToPartitionSet(self):
     item_list = [
+        'IMAGES/system_ext.img',
         'META/apexkeys.txt',
         'META/apkcerts.txt',
         'META/filesystem_config.txt',
         'PRODUCT/*',
         'SYSTEM/*',
-        'SYSTEM_EXT/*',
+        'SYSTEM/system_ext/*',
     ]
     partition_set = merge_utils.ItemListToPartitionSet(item_list)
     self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
 
   def test_InferItemList_Framework(self):
     zip_namelist = [
+        'IMAGES/product.img',
+        'IMAGES/product.map',
+        'IMAGES/system.img',
+        'IMAGES/system.map',
         'SYSTEM/my_system_file',
         'PRODUCT/my_product_file',
+        # Device does not use a separate system_ext partition.
+        'SYSTEM/system_ext/system_ext_file',
     ]
 
     item_list = merge_utils.InferItemList(zip_namelist, framework=True)
@@ -147,37 +154,55 @@
     zip_namelist = [
         'VENDOR/my_vendor_file',
         'ODM/my_odm_file',
+        'IMAGES/odm.img',
+        'IMAGES/odm.map',
+        'IMAGES/vendor.img',
+        'IMAGES/vendor.map',
+        'IMAGES/my_custom_image.img',
+        'IMAGES/my_custom_file.txt',
+        'IMAGES/vbmeta.img',
+        'CUSTOM_PARTITION/my_custom_file',
+        # Leftover framework pieces that shouldn't be grabbed.
+        'IMAGES/system.img',
+        'SYSTEM/system_file',
     ]
 
     item_list = merge_utils.InferItemList(zip_namelist, framework=False)
 
     expected_vendor_item_list = [
+        'CUSTOM_PARTITION/*',
+        'IMAGES/my_custom_file.txt',
+        'IMAGES/my_custom_image.img',
         'IMAGES/odm.img',
         'IMAGES/odm.map',
         'IMAGES/vendor.img',
         'IMAGES/vendor.map',
+        'META/custom_partition_filesystem_config.txt',
         'META/kernel_configs.txt',
         'META/kernel_version.txt',
         'META/odm_filesystem_config.txt',
         'META/otakeys.txt',
+        'META/pack_radioimages.txt',
         'META/releasetools.py',
         'META/vendor_filesystem_config.txt',
         'ODM/*',
-        'OTA/android-info.txt',
         'VENDOR/*',
     ]
     self.assertEqual(item_list, expected_vendor_item_list)
 
   def test_InferFrameworkMiscInfoKeys(self):
     zip_namelist = [
-        'SYSTEM/my_system_file',
-        'SYSTEM_EXT/my_system_ext_file',
+        'PRODUCT/',
+        'SYSTEM/',
+        'SYSTEM/system_ext/',
     ]
 
     keys = merge_utils.InferFrameworkMiscInfoKeys(zip_namelist)
 
     expected_keys = [
         'ab_update',
+        'avb_product_add_hashtree_footer_args',
+        'avb_product_hashtree_enable',
         'avb_system_add_hashtree_footer_args',
         'avb_system_ext_add_hashtree_footer_args',
         'avb_system_ext_hashtree_enable',
@@ -186,10 +211,13 @@
         'avb_vbmeta_system_algorithm',
         'avb_vbmeta_system_key_path',
         'avb_vbmeta_system_rollback_index_location',
+        'building_product_image',
         'building_system_ext_image',
         'building_system_image',
         'default_system_dev_certificate',
         'fs_type',
+        'product_disable_sparse',
+        'product_fs_type',
         'system_disable_sparse',
         'system_ext_disable_sparse',
         'system_ext_fs_type',
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 522d489..5384699 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -1068,10 +1068,11 @@
         pre_partition_state, post_partition_state):
   assert pre_partition_state is not None
   partition_timestamps = {}
-  for part in pre_partition_state:
-    partition_timestamps[part.partition_name] = part.version
   for part in post_partition_state:
-    partition_timestamps[part.partition_name] = \
+    partition_timestamps[part.partition_name] = part.version
+  for part in pre_partition_state:
+    if part.partition_name in partition_timestamps:
+      partition_timestamps[part.partition_name] = \
         max(part.version, partition_timestamps[part.partition_name])
   return [
       "--partition_timestamps",
@@ -1145,6 +1146,14 @@
       logger.info("Either source or target does not support VABC, disabling.")
       OPTIONS.disable_vabc = True
 
+    # Virtual AB Compression was introduced in Androd S.
+    # Later, we backported VABC to Android R. But verity support was not
+    # backported, so if VABC is used and we are on Android R, disable
+    # verity computation.
+    if not OPTIONS.disable_vabc and source_info.is_android_r:
+      OPTIONS.disable_verity_computation = True
+      OPTIONS.disable_fec_computation = True
+
   else:
     assert "ab_partitions" in OPTIONS.info_dict, \
         "META/ab_partitions.txt is required for ab_update."
@@ -1208,6 +1217,8 @@
         metadata.postcondition.partition_state)
 
   if not ota_utils.IsZucchiniCompatible(source_file, target_file):
+    logger.warning(
+        "Builds doesn't support zucchini, or source/target don't have compatible zucchini versions. Disabling zucchini.")
     OPTIONS.enable_zucchini = False
 
   additional_args += ["--enable_zucchini",
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 5d403dc..ef1dca2 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,8 @@
 import ota_metadata_pb2
 from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
-                    SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps)
+                    SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
+                    GetRamdiskFormat)
 
 logger = logging.getLogger(__name__)
 
@@ -371,15 +372,18 @@
     for partition in PARTITIONS_WITH_BUILD_PROP:
       partition_prop_key = "{}.build.prop".format(partition)
       input_file = info_dict[partition_prop_key].input_file
+      ramdisk = GetRamdiskFormat(info_dict)
       if isinstance(input_file, zipfile.ZipFile):
         with zipfile.ZipFile(input_file.filename, allowZip64=True) as input_zip:
           info_dict[partition_prop_key] = \
               PartitionBuildProps.FromInputFile(input_zip, partition,
-                                                placeholder_values)
+                                                placeholder_values,
+                                                ramdisk)
       else:
         info_dict[partition_prop_key] = \
             PartitionBuildProps.FromInputFile(input_file, partition,
-                                              placeholder_values)
+                                              placeholder_values,
+                                              ramdisk)
     info_dict["build.prop"] = info_dict["system.build.prop"]
     build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
 
@@ -693,6 +697,7 @@
       if os.path.exists(entry_path):
         with open(entry_path, "r") as fp:
           return fp.read()
-      else:
-        return ""
-  return ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME) == ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+    return False
+  sourceEntry = ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+  targetEntry = ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+  return sourceEntry and targetEntry and sourceEntry == targetEntry
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index a68f1ec..6926467 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -42,15 +42,6 @@
 
   --sign_tool <sign_tool>
       Optional flag that specifies a custom signing tool for the contents of the apex.
-
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
 """
 
 import logging
@@ -61,12 +52,10 @@
 import common
 
 logger = logging.getLogger(__name__)
-OPTIONS = common.OPTIONS
 
 
 def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
-                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None,
-                 sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None):
   """Signs the given apex file."""
   with open(apex_file, 'rb') as input_fp:
     apex_data = input_fp.read()
@@ -81,11 +70,7 @@
       no_hashtree=no_hashtree,
       apk_keys=apk_keys,
       signing_args=signing_args,
-      sign_tool=sign_tool,
-      is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name),
-      sepolicy_key=sepolicy_key,
-      sepolicy_cert=sepolicy_cert,
-      fsverity_tool=fsverity_tool)
+      sign_tool=sign_tool)
 
 
 def main(argv):
@@ -121,12 +106,6 @@
         options['extra_apks'].update({n: key})
     elif o == '--sign_tool':
       options['sign_tool'] = a
-    elif o == '--sepolicy_key':
-      options['sepolicy_key'] = a
-    elif o == '--sepolicy_cert':
-      options['sepolicy_cert'] = a
-    elif o == '--fsverity_tool':
-      options['fsverity_tool'] = a
     else:
       return False
     return True
@@ -142,9 +121,6 @@
           'payload_key=',
           'extra_apks=',
           'sign_tool=',
-          'sepolicy_key=',
-          'sepolicy_cert=',
-          'fsverity_tool='
       ],
       extra_option_handler=option_handler)
 
@@ -165,10 +141,7 @@
       signing_args=options.get('payload_extra_args'),
       codename_to_api_level_map=options.get(
           'codename_to_api_level_map', {}),
-      sign_tool=options.get('sign_tool', None),
-      sepolicy_key=options.get('sepolicy_key', None),
-      sepolicy_cert=options.get('sepolicy_cert', None),
-      fsverity_tool=options.get('fsverity_tool', None))
+      sign_tool=options.get('sign_tool', None))
   shutil.copyfile(signed_apex, args[1])
   logger.info("done.")
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index f363afd..27e9dfb 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -137,15 +137,6 @@
   --android_jar_path <path>
       Path to the android.jar to repack the apex file.
 
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key for the sepolicy.apex.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
-
   --allow_gsi_debug_sepolicy
       Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
       (/system/system_ext|/system_ext)/etc/selinux.
@@ -205,9 +196,6 @@
 OPTIONS.android_jar_path = None
 OPTIONS.vendor_partitions = set()
 OPTIONS.vendor_otatools = None
-OPTIONS.sepolicy_key = None
-OPTIONS.sepolicy_cert = None
-OPTIONS.fsverity_tool = None
 OPTIONS.allow_gsi_debug_sepolicy = False
 
 
@@ -247,8 +235,6 @@
 def IsApexFile(filename):
   return filename.endswith(".apex") or filename.endswith(".capex")
 
-def IsSepolicyApex(filename):
-  return filename.endswith(OPTIONS.sepolicy_name)
 
 def GetApexFilename(filename):
   name = os.path.basename(filename)
@@ -271,24 +257,6 @@
 
   return certmap
 
-def GetSepolicyKeys(keys_info):
-  """Gets SEPolicy signing keys applying overrides from command line options.
-
-  Args:
-    keys_info: A dict that maps from the SEPolicy APEX filename to a tuple of
-    (sepolicy_key, sepolicy_cert, fsverity_tool).
-
-  Returns:
-    A dict that contains the updated APEX key mapping, which should be used for
-    the current signing.
-  """
-  for name in keys_info:
-      (sepolicy_key, sepolicy_cert, fsverity_tool) = keys_info[name]
-      sepolicy_key = OPTIONS.sepolicy_key if OPTIONS.sepolicy_key else sepolicy_key
-      sepolicy_cert = OPTIONS.sepolicy_cert if OPTIONS.sepolicy_cert else sepolicy_cert
-      fsverity_tool = OPTIONS.fsverity_tool if OPTIONS.fsverity_tool else fsverity_tool
-      keys_info[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-  return keys_info
 
 def GetApexKeys(keys_info, key_map):
   """Gets APEX payload and container signing keys by applying the mapping rules.
@@ -551,7 +519,7 @@
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
-                       compressed_extension, sepolicy_keys):
+                       compressed_extension):
   # maxsize measures the maximum filename length, including the ones to be
   # skipped.
   try:
@@ -619,17 +587,6 @@
         print("           : %-*s payload   (%s)" % (
             maxsize, name, payload_key))
 
-        sepolicy_key = None
-        sepolicy_cert = None
-        fsverity_tool = None
-
-        if IsSepolicyApex(name):
-          (sepolicy_key, sepolicy_cert, fsverity_tool) = sepolicy_keys[name]
-          print("           : %-*s sepolicy key   (%s)" % (
-            maxsize, name, sepolicy_key))
-          print("           : %-*s sepolicy cert  (%s)" % (
-            maxsize, name, sepolicy_cert))
-
         signed_apex = apex_utils.SignApex(
             misc_info['avb_avbtool'],
             data,
@@ -640,11 +597,7 @@
             codename_to_api_level_map,
             no_hashtree=None,  # Let apex_util determine if hash tree is needed
             signing_args=OPTIONS.avb_extra_args.get('apex'),
-            sign_tool=sign_tool,
-            is_sepolicy=IsSepolicyApex(name),
-            sepolicy_key=sepolicy_key,
-            sepolicy_cert=sepolicy_cert,
-            fsverity_tool=fsverity_tool)
+            sign_tool=sign_tool)
         common.ZipWrite(output_tf_zip, signed_apex, filename)
 
       else:
@@ -1254,24 +1207,20 @@
 def ReadApexKeysInfo(tf_zip):
   """Parses the APEX keys info from a given target-files zip.
 
-  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns
-  two dicts, the first one contains the mapping from APEX names
-  (e.g. com.android.tzdata) to a tuple of (payload_key, container_key,
-  sign_tool). The second one maps the sepolicy APEX name to a tuple containing
-  (sepolicy_key, sepolicy_cert, fsverity_tool).
+  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+  dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+  tuple of (payload_key, container_key, sign_tool).
 
   Args:
     tf_zip: The input target_files ZipFile (already open).
 
   Returns:
-    name : (payload_key, container_key, sign_tool)
+    (payload_key, container_key, sign_tool):
       - payload_key contains the path to the payload signing key
       - container_key contains the path to the container signing key
       - sign_tool is an apex-specific signing tool for its payload contents
-    name : (sepolicy_key, sepolicy_cert, fsverity_tool)
   """
   keys = {}
-  sepolicy_keys = {}
   for line in tf_zip.read('META/apexkeys.txt').decode().split('\n'):
     line = line.strip()
     if not line:
@@ -1282,9 +1231,6 @@
         r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
         r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
         r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*?)"'
-        r'(\s+sepolicy_key="(?P<SEPOLICY_KEY>.*?)")?'
-        r'(\s+sepolicy_certificate="(?P<SEPOLICY_CERT>.*?)")?'
-        r'(\s+fsverity_tool="(?P<FSVERITY_TOOL>.*?)")?'
         r'(\s+partition="(?P<PARTITION>.*?)")?'
         r'(\s+sign_tool="(?P<SIGN_TOOL>.*?)")?$',
         line)
@@ -1313,18 +1259,12 @@
             container_private_key, OPTIONS.private_key_suffix):
       container_key = container_cert[:-len(OPTIONS.public_key_suffix)]
     else:
-      raise ValueError("Failed to parse container keys: \n{} **** {}".format(container_cert, container_private_key))
+      raise ValueError("Failed to parse container keys: \n{}".format(line))
 
     sign_tool = matches.group("SIGN_TOOL")
     keys[name] = (payload_private_key, container_key, sign_tool)
 
-    if IsSepolicyApex(name):
-      sepolicy_key = matches.group('SEPOLICY_KEY')
-      sepolicy_cert = matches.group('SEPOLICY_CERT')
-      fsverity_tool = matches.group('FSVERITY_TOOL')
-      sepolicy_keys[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-
-  return keys, sepolicy_keys
+  return keys
 
 
 def BuildVendorPartitions(output_zip_path):
@@ -1415,7 +1355,8 @@
       img_file_path = "IMAGES/{}.img".format(p)
       map_file_path = "IMAGES/{}.map".format(p)
       common.ZipWrite(output_zip, os.path.join(vendor_tempdir, img_file_path), img_file_path)
-      common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
+      if os.path.exists(os.path.join(vendor_tempdir, map_file_path)):
+        common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
     # copy recovery.img, boot.img, recovery patch & install.sh
     if OPTIONS.rebuild_recovery:
       recovery_img = "IMAGES/recovery.img"
@@ -1541,12 +1482,6 @@
       OPTIONS.vendor_otatools = a
     elif o == "--vendor_partitions":
       OPTIONS.vendor_partitions = set(a.split(","))
-    elif o == '--sepolicy_key':
-      OPTIONS.sepolicy_key = a
-    elif o == '--sepolicy_cert':
-      OPTIONS.sepolicy_cert = a
-    elif o == '--fsverity_tool':
-      OPTIONS.fsverity_tool = a
     elif o == "--allow_gsi_debug_sepolicy":
       OPTIONS.allow_gsi_debug_sepolicy = True
     else:
@@ -1601,9 +1536,6 @@
           "gki_signing_extra_args=",
           "vendor_partitions=",
           "vendor_otatools=",
-          "sepolicy_key=",
-          "sepolicy_cert=",
-          "fsverity_tool=",
           "allow_gsi_debug_sepolicy",
       ],
       extra_option_handler=option_handler)
@@ -1626,9 +1558,8 @@
   apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
   apk_keys = GetApkCerts(apk_keys_info)
 
-  apex_keys_info, sepolicy_keys_info = ReadApexKeysInfo(input_zip)
+  apex_keys_info = ReadApexKeysInfo(input_zip)
   apex_keys = GetApexKeys(apex_keys_info, apk_keys)
-  sepolicy_keys = GetSepolicyKeys(sepolicy_keys_info)
 
   # TODO(xunchang) check for the apks inside the apex files, and abort early if
   # the keys are not available.
@@ -1646,7 +1577,7 @@
   ProcessTargetFiles(input_zip, output_zip, misc_info,
                      apk_keys, apex_keys, key_passwords,
                      platform_api_level, codename_to_api_level_map,
-                     compressed_extension, sepolicy_keys)
+                     compressed_extension)
 
   common.ZipClose(input_zip)
   common.ZipClose(output_zip)
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index c344e22..8470f20 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -71,21 +71,3 @@
         False,
         codename_to_api_level_map={'S': 31, 'Tiramisu' : 32})
     self.assertTrue(os.path.exists(signed_apex))
-
-  @test_utils.SkipIfExternalToolsUnavailable()
-  def test_SignApexWithSepolicy(self):
-    test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
-    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    container_key = os.path.join(self.testdata_dir, 'testkey')
-    sepolicy_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    sepolicy_cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
-    signed_test_apex = sign_apex.SignApexFile(
-        'avbtool',
-        test_apex,
-        payload_key,
-        container_key,
-        False,
-        None,
-        sepolicy_key=sepolicy_key,
-        sepolicy_cert=sepolicy_cert)
-    self.assertTrue(os.path.exists(signed_test_apex))
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 144a3cd..0f13add 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -476,7 +476,7 @@
       target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -486,7 +486,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
     # Mismatching payload public / private keys.
@@ -516,7 +515,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -526,7 +525,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
     # Invalid lines will be skipped.
@@ -540,7 +538,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -550,7 +548,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -564,7 +561,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -574,7 +571,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -588,7 +584,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -598,72 +594,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyKeys(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" '
-        'sepolicy_key="build/make/target/product/security/testkey.key" '
-        'sepolicy_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'fsverity_tool="fsverity"')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            'build/make/target/product/security/testkey.key',
-            'build/make/target/product/security/testkey.x509.pem',
-            'fsverity'),
-        }, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyApex(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" ')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            None,
-            None,
-            None),
-        }, sepolicy_keys_info)
 
   def test_ReplaceGkiSigningKey(self):
     common.OPTIONS.gki_signing_key = 'release_gki_key'
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
deleted file mode 100644
index f7d267d..0000000
--- a/tools/releasetools/testdata/sepolicy.apex
+++ /dev/null
Binary files differ
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index c127dbe..36a220c 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -901,7 +901,7 @@
      * Tries to load a JSE Provider by class name. This is for custom PrivateKey
      * types that might be stored in PKCS#11-like storage.
      */
-    private static void loadProviderIfNecessary(String providerClassName) {
+    private static void loadProviderIfNecessary(String providerClassName, String providerArg) {
         if (providerClassName == null) {
             return;
         }
@@ -920,27 +920,41 @@
             return;
         }
 
-        Constructor<?> constructor = null;
-        for (Constructor<?> c : klass.getConstructors()) {
-            if (c.getParameterTypes().length == 0) {
-                constructor = c;
-                break;
+        Constructor<?> constructor;
+        Object o = null;
+        if (providerArg == null) {
+            try {
+                constructor = klass.getConstructor();
+                o = constructor.newInstance();
+            } catch (ReflectiveOperationException e) {
+                e.printStackTrace();
+                System.err.println("Unable to instantiate " + providerClassName
+                        + " with a zero-arg constructor");
+                System.exit(1);
+            }
+        } else {
+            try {
+                constructor = klass.getConstructor(String.class);
+                o = constructor.newInstance(providerArg);
+            } catch (ReflectiveOperationException e) {
+                // This is expected from JDK 9+; the single-arg constructor accepting the
+                // configuration has been replaced with a configure(String) method to be invoked
+                // after instantiating the Provider with the zero-arg constructor.
+                try {
+                    constructor = klass.getConstructor();
+                    o = constructor.newInstance();
+                    // The configure method will return either the modified Provider or a new
+                    // Provider if this one cannot be configured in-place.
+                    o = klass.getMethod("configure", String.class).invoke(o, providerArg);
+                } catch (ReflectiveOperationException roe) {
+                    roe.printStackTrace();
+                    System.err.println("Unable to instantiate " + providerClassName
+                            + " with the provided argument " + providerArg);
+                    System.exit(1);
+                }
             }
         }
-        if (constructor == null) {
-            System.err.println("No zero-arg constructor found for " + providerClassName);
-            System.exit(1);
-            return;
-        }
 
-        final Object o;
-        try {
-            o = constructor.newInstance();
-        } catch (Exception e) {
-            e.printStackTrace();
-            System.exit(1);
-            return;
-        }
         if (!(o instanceof Provider)) {
             System.err.println("Not a Provider class: " + providerClassName);
             System.exit(1);
@@ -1049,6 +1063,7 @@
                            "[-a <alignment>] " +
                            "[--align-file-size] " +
                            "[-providerClass <className>] " +
+                           "[-providerArg <configureArg>] " +
                            "[-loadPrivateKeysFromKeyStore <keyStoreName>]" +
                            "[-keyStorePin <pin>]" +
                            "[--min-sdk-version <n>] " +
@@ -1073,6 +1088,7 @@
 
         boolean signWholeFile = false;
         String providerClass = null;
+        String providerArg = null;
         String keyStoreName = null;
         String keyStorePin = null;
         int alignment = 4;
@@ -1093,6 +1109,12 @@
                 }
                 providerClass = args[++argstart];
                 ++argstart;
+            } else if("-providerArg".equals(args[argstart])) {
+                if (argstart + 1 >= args.length) {
+                    usage();
+                }
+                providerArg = args[++argstart];
+                ++argstart;
             } else if ("-loadPrivateKeysFromKeyStore".equals(args[argstart])) {
                 if (argstart + 1 >= args.length) {
                     usage();
@@ -1153,7 +1175,7 @@
             System.exit(2);
         }
 
-        loadProviderIfNecessary(providerClass);
+        loadProviderIfNecessary(providerClass, providerArg);
 
         String inputFilename = args[numArgsExcludeV4FilePath - 2];
         String outputFilename = args[numArgsExcludeV4FilePath - 1];
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 3fa822a..46ba253 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -56,6 +56,7 @@
 
 from __future__ import print_function
 import csv
+import datetime
 import html
 import sys
 
@@ -258,7 +259,7 @@
 
 
 def dump_stats(writer, warn_patterns):
-  """Dump some stats about total number of warnings and such."""
+  """Dump some stats about total number of warnings and date."""
 
   known = 0
   skipped = 0
@@ -279,6 +280,8 @@
   if total < 1000:
     extra_msg = ' (low count may indicate incremental build)'
   writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+  date_time_str = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+  writer('<p>(generated on ' + date_time_str + ')')
 
 
 # New base table of warnings, [severity, warn_id, project, warning_message]
@@ -662,15 +665,26 @@
   var warningsOfFiles = {};
   var warningsOfDirs = {};
   var subDirs = {};
-  function addOneWarning(map, key) {
-    map[key] = 1 + ((key in map) ? map[key] : 0);
+  function addOneWarning(map, key, type, unique) {
+    function increaseCounter(idx) {
+      map[idx] = 1 + ((idx in map) ? map[idx] : 0);
+    }
+    increaseCounter(key)
+    if (type != "") {
+      increaseCounter(type + " " + key)
+      if (unique) {
+        increaseCounter(type + " *")
+      }
+    }
   }
   for (var i = 0; i < numWarnings; i++) {
-    var file = WarningMessages[i].replace(/:.*/, "");
-    addOneWarning(warningsOfFiles, file);
+    var message = WarningMessages[i]
+    var file = message.replace(/:.*/, "");
+    var warningType = message.endsWith("]") ? message.replace(/.*\[/, "[") : "";
+    addOneWarning(warningsOfFiles, file, warningType, true);
     var dirs = file.split("/");
     var dir = dirs[0];
-    addOneWarning(warningsOfDirs, dir);
+    addOneWarning(warningsOfDirs, dir, warningType, true);
     for (var d = 1; d < dirs.length - 1; d++) {
       var subDir = dir + "/" + dirs[d];
       if (!(dir in subDirs)) {
@@ -678,7 +692,7 @@
       }
       subDirs[dir][subDir] = 1;
       dir = subDir;
-      addOneWarning(warningsOfDirs, dir);
+      addOneWarning(warningsOfDirs, dir, warningType, false);
     }
   }
   var minDirWarnings = numWarnings*(LimitPercentWarnings/100);
@@ -725,27 +739,33 @@
         document.getElementById(divName));
     table.draw(view, {allowHtml: true, alternatingRowStyle: true});
   }
-  addTable("Directory", "top_dirs_table", TopDirs, "selectDir");
-  addTable("File", "top_files_table", TopFiles, "selectFile");
+  addTable("[Warning Type] Directory", "top_dirs_table", TopDirs, "selectDir");
+  addTable("[Warning Type] File", "top_files_table", TopFiles, "selectFile");
 }
 function selectDirFile(idx, rows, dirFile) {
   if (rows.length <= idx) {
     return;
   }
   var name = rows[idx][2];
+  var type = "";
+  if (name.startsWith("[")) {
+    type = " " + name.replace(/ .*/, "");
+    name = name.replace(/.* /, "");
+  }
   var spanName = "selected_" + dirFile + "_name";
-  document.getElementById(spanName).innerHTML = name;
+  document.getElementById(spanName).innerHTML = name + type;
   var divName = "selected_" + dirFile + "_warnings";
   var numWarnings = rows[idx][1].v;
   var prefix = name.replace(/\\.\\.\\.$/, "");
   var data = new google.visualization.DataTable();
-  data.addColumn('string', numWarnings + ' warnings in ' + name);
+  data.addColumn('string', numWarnings + type + ' warnings in ' + name);
   var getWarningMessage = (FlagPlatform == "chrome")
         ? ((x) => addURLToLine(WarningMessages[Warnings[x][2]],
                                WarningLinks[Warnings[x][3]]))
         : ((x) => addURL(WarningMessages[Warnings[x][2]]));
   for (var i = 0; i < Warnings.length; i++) {
-    if (WarningMessages[Warnings[i][2]].startsWith(prefix)) {
+    if ((prefix.startsWith("*") || WarningMessages[Warnings[i][2]].startsWith(prefix)) &&
+        (type == "" || WarningMessages[Warnings[i][2]].endsWith(type))) {
       data.addRow([getWarningMessage(i)]);
     }
   }
@@ -827,14 +847,14 @@
   def section2():
     dump_dir_file_section(
         writer, 'directory', 'top_dirs_table',
-        'Directories with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% warnings')
+        'Directories/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all cases')
   def section3():
     dump_dir_file_section(
         writer, 'file', 'top_files_table',
-        'Files with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% or ' +
-        str(LIMIT_WARNINGS_PER_FILE) + ' warnings')
+        'Files/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all or ' +
+        str(LIMIT_WARNINGS_PER_FILE) + ' cases')
   def section4():
     writer('<script>')
     emit_js_data(writer, flags, warning_messages, warning_links,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 61c8676..aa68313 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -64,6 +64,10 @@
 from . import tidy_warn_patterns as tidy_patterns
 
 
+# Location of this file is used to guess the root of Android source tree.
+THIS_FILE_PATH = 'build/make/tools/warn/warn_common.py'
+
+
 def parse_args(use_google3):
   """Define and parse the args. Return the parse_args() result."""
   parser = argparse.ArgumentParser(
@@ -217,17 +221,27 @@
   return link
 
 
-def find_warn_py_and_android_root(path):
-  """Return android source root path if warn.py is found."""
+def find_this_file_and_android_root(path):
+  """Return android source root path if this file is found."""
   parts = path.split('/')
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
-    if os.path.exists(root_path + '/build/make/tools/warn.py'):
+    if os.path.exists(root_path + '/' + THIS_FILE_PATH):
       return root_path
   return ''
 
 
+def find_android_root_top_dirs(root_dir):
+  """Return a list of directories under the root_dir, if it exists."""
+  # Root directory should contain at least build/make and build/soong.
+  if (not os.path.isdir(root_dir + '/build/make') or
+      not os.path.isdir(root_dir + '/build/soong')):
+    return None
+  return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
+                     os.listdir(root_dir)))
+
+
 def find_android_root(buildlog):
   """Guess android source root from common prefix of file paths."""
   # Use the longest common prefix of the absolute file paths
@@ -239,8 +253,8 @@
     # We want to find android_root of a local build machine.
     # Do not use RBE warning lines, which has '/b/f/w/' path prefix.
     # Do not use /tmp/ file warnings.
-    if warning_pattern.match(line) and (
-        '/b/f/w' not in line and not line.startswith('/tmp/')):
+    if ('/b/f/w' not in line and not line.startswith('/tmp/') and
+        warning_pattern.match(line)):
       warning_lines.append(line)
       count += 1
       if count > 9999:
@@ -249,17 +263,26 @@
       # the source tree root.
       if count < 100:
         path = os.path.normpath(re.sub(':.*$', '', line))
-        android_root = find_warn_py_and_android_root(path)
+        android_root = find_this_file_and_android_root(path)
         if android_root:
-          return android_root
+          return android_root, find_android_root_top_dirs(android_root)
   # Do not use common prefix of a small number of paths.
+  android_root = ''
   if count > 10:
     # pytype: disable=wrong-arg-types
     root_path = os.path.commonprefix(warning_lines)
     # pytype: enable=wrong-arg-types
     if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
-      return root_path[:-1]
-  return ''
+      android_root = root_path[:-1]
+  if android_root and os.path.isdir(android_root):
+    return android_root, find_android_root_top_dirs(android_root)
+  # When the build.log file is moved to a different machine where
+  # android_root is not found, use the location of this script
+  # to find the android source tree sub directories.
+  if __file__.endswith('/' + THIS_FILE_PATH):
+    script_root = __file__.replace('/' + THIS_FILE_PATH, '')
+    return android_root, find_android_root_top_dirs(script_root)
+  return android_root, None
 
 
 def remove_android_root_prefix(path, android_root):
@@ -310,8 +333,6 @@
   warning_pattern = re.compile(chrome_warning_pattern)
 
   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
   unique_warnings = dict()
   for line in infile:
     if warning_pattern.match(line):
@@ -353,8 +374,7 @@
   target_product = 'unknown'
   target_variant = 'unknown'
   build_id = 'unknown'
-  use_rbe = False
-  android_root = find_android_root(infile)
+  android_root, root_top_dirs = find_android_root(infile)
   infile.seek(0)
 
   # rustc warning messages have two lines that should be combined:
@@ -367,24 +387,39 @@
   # C/C++ compiler warning messages have line and column numbers:
   #     some/path/file.c:line_number:column_number: warning: description
   warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)')
-  warning_without_file = re.compile('^warning: .*')
   rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
 
-  # If RBE was used, try to reclaim some warning lines mixed with some
-  # leading chars from other concurrent job's stderr output .
+  # If RBE was used, try to reclaim some warning lines (from stdout)
+  # that contain leading characters from stderr.
   # The leading characters can be any character, including digits and spaces.
-  # It's impossible to correctly identify the starting point of the source
-  # file path without the file directory name knowledge.
-  # Here we can only be sure to recover lines containing "/b/f/w/".
-  rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
 
-   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
+  # If a warning line's source file path contains the special RBE prefix
+  # /b/f/w/, we can remove all leading chars up to and including the "/b/f/w/".
+  bfw_warning_pattern = re.compile('.*/b/f/w/([^ ]*: warning: .*)')
+
+  # When android_root is known and available, we find its top directories
+  # and remove all leading chars before a top directory name.
+  # We assume that the leading chars from stderr do not contain "/".
+  # For example,
+  #   10external/...
+  #   12 warningsexternal/...
+  #   413 warningexternal/...
+  #   5 warnings generatedexternal/...
+  #   Suppressed 1000 warnings (packages/modules/...
+  if root_top_dirs:
+    extra_warning_pattern = re.compile(
+        '^.[^/]*((' + '|'.join(root_top_dirs) +
+        ')/[^ ]*: warning: .*)')
+  else:
+    extra_warning_pattern = re.compile('^[^/]* ([^ /]*/[^ ]*: warning: .*)')
+
+  # Collect all unique warning lines
   unique_warnings = dict()
+  checked_warning_lines = dict()
   line_counter = 0
   prev_warning = ''
   for line in infile:
+    line_counter += 1
     if prev_warning:
       if rustc_file_position.match(line):
         # must be a rustc warning, combine 2 lines into one warning
@@ -399,14 +434,31 @@
           prev_warning, flags, android_root, unique_warnings)
       prev_warning = ''
 
-    if use_rbe and rbe_warning_pattern.match(line):
-      cleaned_up_line = re.sub('.*/b/f/w/', '', line)
-      unique_warnings = add_normalized_line_to_warnings(
-          cleaned_up_line, flags, android_root, unique_warnings)
+    # re.match is slow, with several warning line patterns and
+    # long input lines like "TIMEOUT: ...".
+    # We save significant time by skipping non-warning lines.
+    # But do not skip the first 100 lines, because we want to
+    # catch build variables.
+    if line_counter > 100 and line.find('warning: ') < 0:
       continue
 
+    # A large clean build output can contain up to 90% of duplicated
+    # "warning:" lines. If we can skip them quickly, we can
+    # speed up this for-loop 3X to 5X.
+    if line in checked_warning_lines:
+      continue
+    checked_warning_lines[line] = True
+
+    # Clean up extra prefix that could be introduced when RBE was used.
+    if '/b/f/w/' in line:
+      result = bfw_warning_pattern.search(line)
+    else:
+      result = extra_warning_pattern.search(line)
+    if result is not None:
+      line = result.group(1)
+
     if warning_pattern.match(line):
-      if warning_without_file.match(line):
+      if line.startswith('warning: '):
         # save this line and combine it with the next line
         prev_warning = line
       else:
@@ -416,7 +468,6 @@
 
     if line_counter < 100:
       # save a little bit of time by only doing this for the first few lines
-      line_counter += 1
       result = re.search('(?<=^PLATFORM_VERSION=).*', line)
       if result is not None:
         platform_version = result.group(0)
@@ -433,13 +484,6 @@
       if result is not None:
         build_id = result.group(0)
         continue
-      result = re.search('(?<=^TOP=).*', line)
-      if result is not None:
-        android_root = result.group(1)
-        continue
-      if re.search('USE_RBE=', line) is not None:
-        use_rbe = True
-        continue
 
   if android_root:
     new_unique_warnings = dict()