Merge "Put javac, d8 and r8 rules into RBE ninja pool"
diff --git a/core/Makefile b/core/Makefile
index e9bca77..4081f4a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -641,26 +641,34 @@
 	$(hide) $(SOONG_TO_CONVERT_SCRIPT) $< >$@
 $(call dist-for-goals,droidcore-unbundled,$(SOONG_TO_CONVERT))
 
+$(PRODUCT_OUT)/product_packages.txt:
+	@rm -f $@
+	echo "" > $@
+	$(foreach x,$(PRODUCT_PACKAGES),echo $(x) >> $@$(newline))
+
 MK2BP_CATALOG_SCRIPT := build/make/tools/mk2bp_catalog.py
+PRODUCT_PACKAGES_TXT := $(PRODUCT_OUT)/product_packages.txt
 MK2BP_REMAINING_HTML := $(PRODUCT_OUT)/mk2bp_remaining.html
 $(MK2BP_REMAINING_HTML): PRIVATE_CODE_SEARCH_BASE_URL := "https://cs.android.com/android/platform/superproject/+/master:"
-$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
 		--title="Remaining Android.mk files for $(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)" \
 		--codesearch=$(PRIVATE_CODE_SEARCH_BASE_URL) \
-		--out_dir="$(OUT_DIR)" \
+		--out-dir="$(OUT_DIR)" \
 		--mode=html \
 		> $@
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_HTML))
 
 MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
-$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
-		--out_dir="$(OUT_DIR)" \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
+		--out-dir="$(OUT_DIR)" \
 		--mode=csv \
 		> $@
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_CSV))
@@ -1514,48 +1522,7 @@
 notice_files: $(1)
 endef
 
-# Create the rule to combine the files into text and html/xml forms
-# $(1) - xml_excluded_system_product_odm_vendor_dlkm_odm_dlkm|
-#        xml_excluded_vendor_product_odm_vendor_dlkm_odm_dlkm|
-#        xml_product|xml_odm|xml_system_ext|xml_system|xml_vendor_dlkm|
-#        xml_odm_dlkm|html
-# $(2) - Plain text output file
-# $(3) - HTML/XML output file
-# $(4) - File title
-# $(5) - Directory to use.  Notice files are all $(5)/src.  Other
-#		 directories in there will be used for scratch
-# $(6) - Dependencies for the output files
-# $(7) - Directories to exclude
-#
-# The algorithm here is that we go collect a hash for each of the notice
-# files and write the names of the files that match that hash.  Then
-# to generate the real files, we go print out all of the files and their
-# hashes.
-#
-# These rules are fairly complex, so they depend on this makefile so if
-# it changes, they'll run again.
-#
-# TODO: We could clean this up so that we just record the locations of the
-# original notice files instead of making rules to copy them somwehere.
-# Then we could traverse that without quite as much bash drama.
-define combine-notice-files
-$(2): PRIVATE_MESSAGE := $(4)
-$(2): PRIVATE_DIR := $(5)
-$(2): .KATI_IMPLICIT_OUTPUTS := $(3)
-$(2): $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
-	build/make/tools/generate-notice-files.py --text-output $(2) $(foreach xdir, $(7), -e $(xdir) )\
-	    $(if $(filter $(1),xml_excluded_vendor_product_odm_vendor_dlkm_odm_dlkm),-e vendor -e product -e system_ext -e odm -e vendor_dlkm -e odm_dlkm --xml-output, \
-	      $(if $(filter $(1),xml_excluded_system_product_odm_vendor_dlkm_odm_dlkm),-e system -e product -e system_ext -e odm -e vendor_dlkm -e odm_dlkm --xml-output, \
-	        $(if $(filter $(1),xml_product),-i product --xml-output, \
-	          $(if $(filter $(1),xml_system_ext),-i system_ext --xml-output, \
-	            $(if $(filter $(1),xml_system),-i system --xml-output, \
-	              $(if $(filter $(1),xml_odm),-i odm --xml-output, \
-	                $(if $(filter $(1),xml_vendor_dlkm),-i vendor_dlkm --xml-output, \
-	                  $(if $(filter $(1),xml_odm_dlkm),-i odm_dlkm --xml-output, \
-	                    --html-output)))))))) $(3) \
-	    -t $$(PRIVATE_MESSAGE) $$(foreach dir,$$(sort $$(PRIVATE_DIR)), -s $$(dir)/src)
-notice_files: $(2) $(3)
-endef
+$(KATI_obsolete_var combine-notice-files, To create notice files use xml-notice-rule, html-notice-rule, or text-notice-rule.)
 
 # Notice file logic isn't relevant for TARGET_BUILD_APPS
 ifndef TARGET_BUILD_APPS
@@ -6158,25 +6125,39 @@
 # -----------------------------------------------------------------
 # A zip of the symbols directory.  Keep the full paths to make it
 # more obvious where these files came from.
+# Also produces a textproto containing mappings from elf IDs to symbols
+# filename, which will allow finding the appropriate symbols to deobfuscate
+# a stack trace frame.
 #
+
 name := $(TARGET_PRODUCT)
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-symbols-$(FILE_NAME_TAG)
 
-SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
+# The path to the zip file containing binaries with symbols.
+SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name)-symbols-$(FILE_NAME_TAG).zip
+# The path to a file containing mappings from elf IDs to filenames.
+SYMBOLS_MAPPING := $(PRODUCT_OUT)/$(name)-symbols-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := SYMBOLS_ZIP SYMBOLS_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(SYMBOLS_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
-$(SYMBOLS_ZIP): $(SOONG_ZIP)
+$(SYMBOLS_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)
+$(SYMBOLS_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Package symbols: $@"
 	$(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
-	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE))
+	$(hide) mkdir -p $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE)) $(PRIVATE_MAPPING_PACKAGING_DIR)
+	# Find all of the files in the symbols directory and zip them into the symbols zip.
 	$(hide) find -L $(TARGET_OUT_UNSTRIPPED) -type f | sort >$(PRIVATE_LIST_FILE)
 	$(hide) $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+	# Find all of the files in the symbols mapping directory and merge them into the symbols mapping textproto.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(hide) $(SYMBOLS_MAP) -merge $(SYMBOLS_MAPPING) -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(SYMBOLS_ZIP): .KATI_IMPLICIT_OUTPUTS := $(SYMBOLS_MAPPING)
+
 # -----------------------------------------------------------------
 # A zip of the coverage directory.
 #
@@ -6269,17 +6250,35 @@
 
 #------------------------------------------------------------------
 # A zip of Proguard obfuscation dictionary files.
+# Also produces a textproto containing mappings from the hashes of the
+# dictionary contents (which are also stored in the dex files on the
+# devices) to the filename of the proguard dictionary, which will allow
+# finding the appropriate dictionary to deobfuscate a stack trace frame.
 #
+
+# The path to the zip file containing proguard dictionaries.
 PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
+# The path to the zip file containing mappings from dictionary hashes to filenames.
+PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := PROGUARD_DICT_ZIP PROGUARD_DICT_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(PROGUARD_DICT_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary)
-$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
+$(PROGUARD_DICT_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)
+$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_filelist)/filelist
+$(PROGUARD_DICT_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Packaging Proguard obfuscation dictionary files."
-	mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR)
-	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -P out/target/common/obj -D $(PRIVATE_PACKAGING_DIR)
+	rm -rf $@ $(PRIVATE_LIST_FILE)
+	mkdir -p $(PRIVATE_PACKAGING_DIR) $(PRIVATE_MAPPING_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
+	# Zip all of the files in the proguard dictionary directory.
+	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -D $(PRIVATE_PACKAGING_DIR)
+	# Find all of the files in the proguard dictionary mapping directory and merge them into the mapping textproto.
+	# Strip the PRIVATE_PACKAGING_DIR off the filenames to match soong_zip's -C argument.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) -strip_prefix $(PRIVATE_PACKAGING_DIR)/ -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(PROGUARD_DICT_ZIP): .KATI_IMPLICIT_OUTPUTS := $(PROGUARD_DICT_MAPPING)
 
 #------------------------------------------------------------------
 # A zip of Proguard usage files.
diff --git a/core/board_config.mk b/core/board_config.mk
index 72a8044..dc50a68 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -254,7 +254,7 @@
   endif
 
   $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
-    $(OUT_DIR)/rbcrun RBC_OUT="make,global" $(OUT_DIR)/rbc/boardlauncher.rbc)
+    $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration runner failed: $(.SHELLSTATUS))
   endif
diff --git a/core/config.mk b/core/config.mk
index 42799ac..bd3d1a0 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -635,6 +635,7 @@
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 SPARSE_IMG := $(HOST_OUT_EXECUTABLES)/sparse_img$(HOST_EXECUTABLE_SUFFIX)
 CHECK_PARTITION_SIZES := $(HOST_OUT_EXECUTABLES)/check_partition_sizes$(HOST_EXECUTABLE_SUFFIX)
+SYMBOLS_MAP := $(HOST_OUT_EXECUTABLES)/symbols_map
 
 PROGUARD_HOME := external/proguard
 PROGUARD := $(PROGUARD_HOME)/bin/proguard.sh
diff --git a/core/definitions.mk b/core/definitions.mk
index 77804e8..0d72473 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -586,10 +586,11 @@
 ## License metadata build rule for my_register_name $(1)
 ###########################################################
 define license-metadata-rule
-$(foreach meta_lic, $(subst //,/,$(ALL_MODULES.$(1).DELAYED_META_LIC)),$(call _license-metadata-rule,$(1),$(meta_lic)))
-$(call notice-rule,$(1))
+$(foreach meta_lic, $(ALL_MODULES.$(1).DELAYED_META_LIC),$(call _license-metadata-rule,$(1),$(meta_lic)))
 endef
 
+$(KATI_obsolete_var notice-rule, This function has been removed)
+
 define _license-metadata-rule
 $(strip $(eval _srcs := $(strip $(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS),$(if $(strip $(ALL_MODULES.$(call word-colon,1,$(d)).INSTALLED)), $(ALL_MODULES.$(call word-colon,1,$(d)).INSTALLED),$(if $(strip $(ALL_MODULES.$(call word-colon,1,$(d)).BUILT)), $(ALL_MODULES.$(call word-colon,1,$(d)).BUILT), $(call word-colon,1,$d)))))))
 $(strip $(eval _deps := $(sort $(filter-out $(2)%,\
@@ -648,26 +649,6 @@
 	  -o $$@
 endef
 
-define notice-rule
-$(strip $(eval _mifs := $(sort $(ALL_MODULES.$(1).MODULE_INSTALLED_FILENAMES))))
-$(strip $(eval _infs := $(sort $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE))))
-
-# Emit each installed notice file rule if it references the current module
-$(if $(_infs),$(foreach inf,$(_infs),
-$(if $(strip $(filter $(1),$(INSTALLED_NOTICE_FILES.$(inf).MODULE))),
-$(strip $(eval _mif := $(firstword $(foreach m,$(_mifs),$(if $(filter %/src/$(m).txt,$(inf)),$(m))))))
-
-$(inf): PRIVATE_INSTALLED_MODULE := $(_mif)
-$(inf) : PRIVATE_NOTICES := $(sort $(foreach n,$(_notices),$(call word-colon,1,$(n) )))
-
-$(inf): $(foreach n,$(_notices),$(call word-colon,1,$(n)) )
-	@echo Notice file: $$< -- $$@
-	mkdir -p $$(dir $$@)
-	awk 'FNR==1 && NR > 1 {print "\n"} {print}' $$(PRIVATE_NOTICES) > $$@
-
-)))
-
-endef
 
 ###########################################################
 ## License metadata build rule for non-module target $(1)
@@ -3173,6 +3154,50 @@
 fi
 endef
 
+# Copy an unstripped binary to the symbols directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+define copy-unstripped-elf-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),\
+  elf,$(patsubst $(TARGET_OUT_UNSTRIPPED)/%,$(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)/%,$(2).textproto))
+endef
+
+# Copy an R8 dictionary to the packaging directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in packaging directory
+# $(3): path in mappings packaging directory
+define copy-r8-dictionary-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),r8,$(3))
+endef
+
+# Copy an unstripped binary or R8 dictionary to the symbols directory
+# while also extracting a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+# $(3): file type (elf or r8)
+# $(4): path in the mappings directory
+define _copy-symbols-file-with-mapping
+$(2): .KATI_IMPLICIT_OUTPUTS := $(4)
+$(2): $(SYMBOLS_MAP)
+$(2): $(1)
+	@echo "Copy symbols with mapping: $$@"
+	$$(copy-file-to-target)
+	$(SYMBOLS_MAP) -$(strip $(3)) $(2) -write_if_changed $(4)
+.KATI_RESTAT: $(2)
+endef
+
+# Returns the directory to copy proguard dictionaries into
+define local-proguard-dictionary-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
+# Returns the directory to copy proguard dictionary mappings into
+define local-proguard-dictionary-mapping-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
 
 ###########################################################
 ## Commands to call R8
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 9b1f2c2..640fe10 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -117,7 +117,6 @@
 	9 \
 	LOCAL_PATH \
 	MAKEFILE_LIST \
-	PARENT_PRODUCT_FILES \
 	current_mk \
 	_eiv_ev \
 	_eiv_i \
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 52d7ddc..0d2cd7f 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -55,9 +55,7 @@
 endif
 symbolic_input := $(inject_module)
 symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-$(symbolic_output) : $(symbolic_input)
-	@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
+$(eval $(call copy-unstripped-elf-file-with-mapping,$(symbolic_input),$(symbolic_output)))
 
 ###########################################################
 ## Store breakpad symbols
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 62ea1b6..049a4d6 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -334,8 +334,9 @@
   USER \
   SOONG_% \
   PRODUCT_COPY_OUT_%)\
+$(file >$(OUT_DIR)/dump-variables-rbc-temp.txt,$(subst $(space),$(newline),$(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES))))
 $(file >$(1),\
-$(foreach v, $(shell echo $(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES)) | tr ' ' '\n' | grep -he "^[A-Z][A-Z0-9_]*$$"),\
+$(foreach v, $(shell grep -he "^[A-Z][A-Z0-9_]*$$" $(OUT_DIR)/dump-variables-rbc-temp.txt),\
 $(v) := $(strip $($(v)))$(newline))\
 $(foreach ns,$(SOONG_CONFIG_NAMESPACES),\
 $(foreach v,$(SOONG_CONFIG_$(ns)),\
diff --git a/core/main.mk b/core/main.mk
index 72958da..7a12bf3 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -935,6 +935,7 @@
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(eval ALL_TARGETS.$(target).META_LIC:=$(module_license_metadata)))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
           $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
@@ -1234,33 +1235,9 @@
 #   See the select-bitness-of-required-modules definition.
 # $(1): product makefile
 
-# TODO(asmundak):
-# `product-installed-files` and `host-installed-files` macros below used to
-# call `get-product-var` directly to obtain per-file configuration variable
-# values (the value of variable FOO is fetched from PRODUCT.<product-makefile>.FOO).
-# Starlark-based configuration does not maintain per-file variable variable
-# values. To work around this problem, we utilize the fact that
-# `product-installed-files` and `host-installed-files` are called only in
-# two places:
-# 1. For the top-level product makefile (in this file). In this case
-#    $(call get-product-var <product>, FOO) is the same as $(FOO) as the
-#    product configuration has been run already. Therefore we define
-#    _product-var macro to pick the values directly from product config
-#    variables when using Starlark-based configuration.
-# 2. To check the path requirements (in artifact_path_requirements.mk).
-#    Starlark-based configuration does not perform this check at the moment.
-# In the longer run most of the logic of this file will be moved to the
-# Starlark.
-
-ifndef RBC_PRODUCT_CONFIG
 define _product-var
   $(call get-product-var,$(1),$(2))
 endef
-else
-define _product-var
-  $(call $(2))
-endef
-endif
 
 define product-installed-files
   $(eval _pif_modules := \
@@ -1375,7 +1352,7 @@
 
   # Verify the artifact path requirements made by included products.
   is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
-  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS),$(RBC_PRODUCT_CONFIG),$(RBC_BOARD_CONFIG)))
+  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
     include $(BUILD_SYSTEM)/artifact_path_requirements.mk
   endif
 else
@@ -1483,12 +1460,6 @@
 # fix-notice-deps replaces those unadorned module names with every built variant.
 $(call fix-notice-deps)
 
-# Create a license metadata rule per module. Could happen in base_rules.mk or
-# notice_files.mk; except, it has to happen after fix-notice-deps to avoid
-# missing dependency errors.
-$(call build-license-metadata)
-
-
 # These are additional goals that we build, in order to make sure that there
 # is as little code as possible in the tree that doesn't build.
 modules_to_check := $(foreach m,$(ALL_MODULES),$(ALL_MODULES.$(m).CHECKED))
@@ -1761,16 +1732,20 @@
   endif
 
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
+  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_MAPPING))
+  $(call declare-container-license-deps,$(PROGUARD_DICT_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
+  $(call declare-container-license-deps,$(PROGUARD_USAGE_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(SYMBOLS_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
+  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP) $(SYMBOLS_MAPPING))
+  $(call declare-container-license-deps,$(SYMBOLS_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(COVERAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(COVERAGE_ZIP))
+  $(call declare-container-license-deps,$(COVERAGE_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
 apps_only: $(unbundled_build_modules)
 
@@ -1818,7 +1793,9 @@
     $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
     $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
     $(SYMBOLS_ZIP) \
+    $(SYMBOLS_MAPPING) \
     $(PROGUARD_DICT_ZIP) \
+    $(PROGUARD_DICT_MAPPING) \
     $(PROGUARD_USAGE_ZIP) \
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
@@ -1922,6 +1899,8 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(APICHECK_COMMAND) --input-api-jar $< --api-xml $@
 
+  $(foreach xml,$(sort $(api_xmls)),$(call declare-1p-target,$(xml),))
+
   $(call dist-for-goals, dist_files, $(api_xmls))
   api_xmls :=
 
@@ -1962,6 +1941,7 @@
 $(call dist-for-goals,sdk, \
     $(ALL_SDK_TARGETS) \
     $(SYMBOLS_ZIP) \
+    $(SYMBOLS_MAPPING) \
     $(COVERAGE_ZIP) \
     $(APPCOMPAT_ZIP) \
     $(INSTALLED_BUILD_PROP_TARGET) \
@@ -2033,6 +2013,11 @@
   droidcore: ${APEX_ALLOWED_DEPS_CHECK}
 endif
 
+# Create a license metadata rule per module. Could happen in base_rules.mk or
+# notice_files.mk; except, it has to happen after fix-notice-deps to avoid
+# missing dependency errors.
+$(call build-license-metadata)
+
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 8b2dade..c05d4ea 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -157,102 +157,4 @@
 ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
 endif
 
-# This relies on the name of the directory in PRODUCT_OUT matching where
-# it's installed on the target - i.e. system, data, etc.  This does
-# not work for root and isn't exact, but it's probably good enough for
-# compliance.
-# Includes the leading slash
-ifdef LOCAL_INSTALLED_MODULE
-  module_installed_filename := $(patsubst $(PRODUCT_OUT)/%,%,$(LOCAL_INSTALLED_MODULE))
-else
-  # This module isn't installable
-  ifneq ($(filter  STATIC_LIBRARIES RLIB_LIBRARIES PROC_MACRO_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
-    # Stick the static libraries with the dynamic libraries.
-    # We can't use xxx_OUT_STATIC_LIBRARIES because it points into
-    # device-obj or host-obj.
-    module_installed_filename := \
-        $(patsubst $(PRODUCT_OUT)/%,%,$($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))/$(notdir $(LOCAL_BUILT_MODULE))
-  else ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
-    # Shared modules may be uninstallable(e.g. TARGET_SKIP_CURRENT_VNDK=true)
-    module_installed_filename :=
-  else
-    ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-      # Stick the static java libraries with the regular java libraries.
-      module_leaf := $(notdir $(LOCAL_BUILT_MODULE))
-      # javalib.jar is the default name for the build module (and isn't meaningful)
-      # If that's what we have, substitute the module name instead.  These files
-      # aren't included on the device, so this name is synthetic anyway.
-      # Extra path "static" is added to try to avoid name conflict between the notice file of
-      # this 'uninstallable' Java module and the notice file for another 'installable' Java module
-      # whose stem is the same as this module's name.
-      ifneq ($(filter javalib.jar,$(module_leaf)),)
-        module_leaf := static/$(LOCAL_MODULE).jar
-      endif
-      module_installed_filename := \
-          $(patsubst $(PRODUCT_OUT)/%,%,$($(my_prefix)OUT_JAVA_LIBRARIES))/$(module_leaf)
-    else ifneq ($(filter ETC DATA,$(LOCAL_MODULE_CLASS)),)
-      # ETC and DATA modules may be uninstallable, yet still have a NOTICE file.
-      # e.g. apex components
-      module_installed_filename :=
-    else ifneq (,$(and $(filter %.sdk,$(LOCAL_MODULE)),$(filter $(patsubst %.sdk,%,$(LOCAL_MODULE)),$(SOONG_SDK_VARIANT_MODULES))))
-      # Soong produces uninstallable *.sdk shared libraries for embedding in APKs.
-      module_installed_filename := \
-          $(patsubst $(PRODUCT_OUT)/%,%,$($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))/$(notdir $(LOCAL_BUILT_MODULE))
-    endif # JAVA_LIBRARIES
-  endif # STATIC_LIBRARIES
-endif
-
-ifdef module_installed_filename
-
-# In case it's actually a host file
-module_installed_filename := $(patsubst $(HOST_OUT)/%,%,$(module_installed_filename))
-module_installed_filename := $(patsubst $(HOST_CROSS_OUT)/%,%,$(module_installed_filename))
-
-installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
-
-$(installed_notice_file): $(module_license_metadata)
-
-ifdef my_register_name
-ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE) $(installed_notice_file)
-ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES := $(ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES) $(module_installed_filename)
-INSTALLED_NOTICE_FILES.$(installed_notice_file).MODULE := $(my_register_name)
-else
-$(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
-$(installed_notice_file) : PRIVATE_NOTICES := $(sort $(foreach n,$(notice_file),$(if $(filter %:%,$(n)), $(call word-colon,1,$(n)), $(n))))
-
-$(installed_notice_file): $(foreach n,$(notice_file),$(if $(filter %:%,$(n)), $(call word-colon,1,$(n)), $(n)))
-	@echo Notice file: $< -- $@
-	$(hide) mkdir -p $(dir $@)
-	$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
-endif
-
-ifdef LOCAL_INSTALLED_MODULE
-# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
-# libraries so they get installed along with it.  Make it an order-only
-# dependency so we don't re-install a module when the NOTICE changes.
-$(LOCAL_INSTALLED_MODULE): | $(installed_notice_file)
-endif
-
-# To facilitate collecting NOTICE files for apps_only build,
-# we install the NOTICE file even if a module gets built but not installed,
-# because shared jni libraries won't be installed to the system image.
-ifdef TARGET_BUILD_APPS
-# for static Java libraries, we don't need to even build LOCAL_BUILT_MODULE,
-# but just javalib.jar in the common intermediate dir.
-ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-$(intermediates.COMMON)/javalib.jar : | $(installed_notice_file)
-else
-$(LOCAL_BUILT_MODULE): | $(installed_notice_file)
-endif  # JAVA_LIBRARIES
-endif  # TARGET_BUILD_APPS
-
-endif  # module_installed_filename
 endif  # notice_file
-
-# Create a predictable, phony target to build this notice file.
-# Define it even if the notice file doesn't exist so that other
-# modules can depend on it.
-notice_target := NOTICE-$(if \
-    $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-$(LOCAL_MODULE_CLASS)-$(LOCAL_MODULE)
-.PHONY: $(notice_target)
-$(notice_target): $(installed_notice_file)
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 6d51db1..63e9040 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -17,7 +17,7 @@
 # the sort also acts as a strip to remove the single space entries that creep in because of the evals
 define gather-all-products
 $(eval _all_products_visited := )\
-$(sort $(call all-products-inner, $(PARENT_PRODUCT_FILES)))
+$(sort $(call all-products-inner, $(PRODUCTS)))
 endef
 
 define all-products-inner
@@ -78,7 +78,6 @@
 $(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list)
 
 $(products_graph): $(this_makefile)
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
 	@echo Product graph DOT: $@ for $(PRIVATE_PRODUCTS_FILTER)
 	$(hide) echo 'digraph {' > $@.in
 	$(hide) echo 'graph [ ratio=.5 ];' >> $@.in
@@ -87,20 +86,9 @@
 	$(foreach p,$(PRIVATE_PRODUCTS),$(call emit-product-node-props,$(p),$@.in))
 	$(hide) echo '}' >> $@.in
 	$(hide) build/make/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
-else
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
-
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
 
 .PHONY: product-graph
 product-graph: $(products_graph)
 	@echo Product graph .dot file: $(products_graph)
 	@echo Command to convert to pdf: dot -Tpdf -Nshape=box -o $(OUT_DIR)/products.pdf $(products_graph)
 	@echo Command to convert to svg: dot -Tsvg -Nshape=box -o $(OUT_DIR)/products.svg $(products_graph)
-else
-.PHONY: product-graph
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
diff --git a/core/product.mk b/core/product.mk
index 1f304cd..f316114 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -396,12 +396,11 @@
 # $(1): product to inherit
 #
 # To be called from product makefiles, and is later evaluated during the import-nodes
-# call below. It does three things:
+# call below. It does the following:
 #  1. Inherits all of the variables from $1.
 #  2. Records the inheritance in the .INHERITS_FROM variable
-#  3. Records the calling makefile in PARENT_PRODUCT_FILES
 #
-# (2) and (3) can be used together to reconstruct the include hierarchy
+# (2) and the PRODUCTS variable can be used together to reconstruct the include hierarchy
 # See e.g. product-graph.mk for an example of this.
 #
 define inherit-product
@@ -416,7 +415,6 @@
     $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
     $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
     $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
-    $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
     $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
     $(call dump-config-vals,$(current_mk),inherit))
 endef
diff --git a/core/product_config.mk b/core/product_config.mk
index be4aded..939a022 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -257,16 +257,16 @@
     $(error product configuration converter failed: $(.SHELLSTATUS))
   endif
   include $(OUT_DIR)/rbc/rbc_product_config_results.mk
-  PRODUCTS += $(current_product_makefile)
 endif
 endif  # Import all or just the current product makefile
 
-ifndef RBC_PRODUCT_CONFIG
 # Quick check
 $(check-all-products)
-endif
 
-ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
+# This step was already handled in the RBC product configuration.
+# Since the equivalent starlark code will not add the partial products to
+# the PRODUCTS variable, it's ok for them to be set before check-all-products
+ifeq ($(RBC_PRODUCT_CONFIG)$(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
 # Import all the products that have made artifact path requirements, so that we can verify
 # the artifacts they produce.
 # These are imported after check-all-products because some of them might not be real products.
@@ -284,7 +284,6 @@
 $(dump-products)
 endif
 
-ifndef RBC_PRODUCT_CONFIG
 # Convert a short name like "sooner" into the path to the product
 # file defining that product.
 #
@@ -297,9 +296,6 @@
 ############################################################################
 # Strip and assign the PRODUCT_ variables.
 $(call strip-product-vars)
-else
-INTERNAL_PRODUCT := $(current_product_makefile)
-endif
 
 current_product_makefile :=
 all_product_makefiles :=
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 0187251..11064f3 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -70,14 +70,7 @@
 
 def _printvars(state):
     """Prints configuration and global variables."""
-    (globals, cfg, globals_base) = state
-    for attr, val in sorted(cfg.items()):
-        __print_attr(attr, val)
-    if _options.print_globals:
-        print()
-        _printglobals(globals, globals_base)
-
-def _printglobals(globals, globals_base):
+    (globals, globals_base) = state
     for attr, val in sorted(globals.items()):
         if attr == _soong_config_namespaces_key:
             __print_attr("SOONG_CONFIG_NAMESPACES", val.keys())
@@ -164,6 +157,13 @@
         handle = __h_new()
         pcm(globals, handle)
 
+        if handle.artifact_path_requirements:
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENTS"] = handle.artifact_path_requirements
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_ALLOWED_LIST"] = handle.artifact_path_allowed_list
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENT_IS_RELAXED"] = "true" if handle.artifact_path_requirement_is_relaxed[0] else ""
+            globals.setdefault("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", [])
+            globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] += [name+".mk"]
+
         # Now we know everything about this PCM, record it in 'configs'.
         children = handle.inherited_modules
         if _options.trace_modules:
@@ -218,7 +218,23 @@
         _percolate_inherited(configs, pcm_name, cfg, children_names)
         configs[pcm_name] = pcm, cfg, children_names, True
 
-    return (globals, configs[top_pcm_name][1], globals_base)
+        if (pcm_name + ".mk") in globals.get("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", []):
+            for var, val in cfg.items():
+                globals["PRODUCTS."+pcm_name+".mk."+var] = val
+
+    # Copy product config variables from the cfg dictionary to the
+    # PRODUCTS.<top_level_makefile_name>.<var_name> global variables.
+    for var, val in configs[top_pcm_name][1].items():
+        globals["PRODUCTS."+top_pcm_name+".mk."+var] = val
+
+    # Record inheritance hierarchy in PRODUCTS.<file>.INHERITS_FROM variables.
+    # This is required for m product-graph.
+    for config in configs:
+        if len(configs[config][2]) > 0:
+            globals["PRODUCTS."+config+".mk.INHERITS_FROM"] = sorted([x + ".mk" for x in configs[config][2]])
+    globals["PRODUCTS"] = __words(globals.get("PRODUCTS", [])) + [top_pcm_name + ".mk"]
+
+    return (globals, globals_base)
 
 
 def _dictionary_difference(a, b):
@@ -237,7 +253,14 @@
     input_variables_init(globals_base, h_base)
     input_variables_init(globals, h)
     board_config_init(globals, h)
-    return (globals, _dictionary_difference(h.cfg, h_base.cfg), globals_base)
+
+    # Board configuration files aren't really supposed to change
+    # product configuration variables, but some do. You lose the
+    # inheritance features of the product config variables if you do.
+    for var, value in _dictionary_difference(h.cfg, h_base.cfg).items():
+        globals[var] = value
+
+    return (globals, globals_base)
 
 
 def _substitute_inherited(configs, pcm_name, cfg):
@@ -407,7 +430,10 @@
     return struct(
         cfg = dict(),
         inherited_modules = dict(),
-        default_list_value = list()
+        default_list_value = list(),
+        artifact_path_requirements = list(),
+        artifact_path_allowed_list = list(),
+        artifact_path_requirement_is_relaxed = [False], # as a list so that we can reassign it
     )
 
 def __h_cfg(handle):
@@ -552,13 +578,17 @@
     """
     return " ".join([__base(w) for w in __words(paths)])
 
-def _require_artifacts_in_path(paths, allowed_paths):
-    """TODO."""
-    pass
+def _require_artifacts_in_path(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path in Make."""
+    handle.artifact_path_requirements.clear()
+    handle.artifact_path_requirements.extend(__words(paths))
+    handle.artifact_path_allowed_list.clear()
+    handle.artifact_path_allowed_list.extend(__words(allowed_paths))
 
-def _require_artifacts_in_path_relaxed(paths, allowed_paths):
-    """TODO."""
-    pass
+def _require_artifacts_in_path_relaxed(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path-relaxed in Make."""
+    _require_artifacts_in_path(handle, paths, allowed_paths)
+    handle.artifact_path_requirement_is_relaxed[0] = True
 
 def _expand_wildcard(pattern):
     """Expands shell wildcard pattern."""
@@ -676,6 +706,20 @@
     return out_words if type(s) == "list" else " ".join(out_words)
 
 
+def _mksort(input):
+    """Emulate Make's sort.
+
+    This is unique from a regular sort in that it also strips
+    the input, and removes duplicate words from the input.
+    """
+    input = sorted(__words(input))
+    result = []
+    for w in input:
+        if len(result) == 0 or result[-1] != w:
+            result.append(w)
+    return result
+
+
 def _mkstrip(s):
     """Emulates Make's strip.
 
@@ -726,7 +770,6 @@
     """Returns struct containing runtime global settings."""
     settings = dict(
         format = "pretty",
-        print_globals = False,
         rearrange = "",
         trace_modules = False,
         trace_variables = [],
@@ -740,7 +783,8 @@
         elif x == "pretty" or x == "make":
             settings["format"] = x
         elif x == "global":
-            settings["print_globals"] = True
+            # TODO: Remove this, kept for backwards compatibility
+            pass
         elif x != "":
             fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
     for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
@@ -785,11 +829,11 @@
     mkerror = _mkerror,
     mkpatsubst = _mkpatsubst,
     mkwarning = _mkwarning,
+    mksort = _mksort,
     mkstrip = _mkstrip,
     mksubst = _mksubst,
     notdir = _notdir,
     printvars = _printvars,
-    printglobals = _printglobals,
     product_configuration = _product_configuration,
     board_configuration = _board_configuration,
     product_copy_files_by_pattern = _product_copy_files_by_pattern,
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 006e1dc..d771d22 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -100,18 +100,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index ca52374..7a177ff 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -184,7 +184,7 @@
       # drop /root as /root is mounted as /
       my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
       symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-      $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
+      $(eval $(call copy-unstripped-elf-file-with-mapping,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
       $(LOCAL_BUILT_MODULE): | $(symbolic_output)
 
       ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 916dfbb..32675f2 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -227,6 +227,8 @@
 $(call add_json_list, PackageNameOverrides,              $(PRODUCT_PACKAGE_NAME_OVERRIDES))
 $(call add_json_list, CertificateOverrides,              $(PRODUCT_CERTIFICATE_OVERRIDES))
 
+$(call add_json_str, ApexGlobalMinSdkVersionOverride,    $(APEX_GLOBAL_MIN_SDK_VERSION_OVERRIDE))
+
 $(call add_json_bool, EnforceSystemCertificate,          $(filter true,$(ENFORCE_SYSTEM_CERTIFICATE)))
 $(call add_json_list, EnforceSystemCertificateAllowList, $(ENFORCE_SYSTEM_CERTIFICATE_ALLOW_LIST))
 
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index b819cdc..a8f475f 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -62,18 +62,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(common_javalib.jar),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(common_javalib.jar),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index b9a349d..2af1ded 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -42,4 +42,7 @@
 art-host-tests: $(art_host_tests_zip)
 $(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
 
+$(call declare-1p-container,$(art_host_tests_zip),)
+$(call declare-container-license-deps,$(art_host_tests_zip),$(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests),$(PRODUCT_OUT)/:/)
+
 tests: art-host-tests
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 3f84668..c282268 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -198,6 +198,13 @@
 $(call dist-for-goals, cts-api-coverage, $(cts-combined-coverage-report):cts-combined-coverage-report.html)
 $(call dist-for-goals, cts-api-coverage, $(cts-combined-xml-coverage-report):cts-combined-coverage-report.xml)
 
+ALL_TARGETS.$(cts-test-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-xml-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-verifier-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-combined-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-combined-xml-coverage-report).META_LIC:=$(module_license_metadata)
+
 # Arguments;
 #  1 - Name of the report printed out on the screen
 #  2 - List of apk files that will be scanned to generate the report
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 73fad7c..3196f52 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -55,4 +55,7 @@
 device-tests: $(device-tests-zip)
 $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip))
 
+$(call declare-1p-container,$(device-tests-zip),)
+$(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/)
+
 tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index a820a28..0daf446 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -78,6 +78,9 @@
 general-tests: $(general_tests_zip)
 $(call dist-for-goals, general-tests, $(general_tests_zip) $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip))
 
+$(call declare-1p-container,$(general_tests_zip),)
+$(call declare-container-license-deps,$(general_tests_zip),$(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(my_host_shared_lib_for_general_tests),$(PRODUCT_OUT)/:/)
+
 intermediates_dir :=
 general_tests_tools :=
 general_tests_zip :=
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index 755b589..ed2f2a6 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -47,4 +47,7 @@
 host-unit-tests: $(host_unit_tests_zip)
 $(call dist-for-goals, host-unit-tests, $(host_unit_tests_zip))
 
+$(call declare-1p-container,$(host_unit_tests_zip),)
+$(call declare-container-license-deps,$(host_unit_tests_zip),$(COMPATIBILITY.host-unit-tests.FILES) $(my_host_shared_lib_for_host_unit_tests),$(PRODUCT_OUT)/:/)
+
 tests: host-unit-tests
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index a847b9d..134be01 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -43,4 +43,7 @@
 oem_image : $(INSTALLED_OEMIMAGE_TARGET)
 $(call dist-for-goals, oem_image, $(INSTALLED_OEMIMAGE_TARGET))
 
+$(call declare-1p-container,$(INSTALLED_OEMIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_OEMIMAGE_TARGET),$(INTERNAL_USERIMAGE_DEPS) $(INTERNAL_OEMIMAGE_FILES),$(INSTALLED_OEMIMAGE_TARGET):)
+
 endif  # oem_image in $(MAKECMDGOALS)
diff --git a/core/tasks/owners.mk b/core/tasks/owners.mk
index 6f32aaf..806b8ee 100644
--- a/core/tasks/owners.mk
+++ b/core/tasks/owners.mk
@@ -31,3 +31,5 @@
 owners : $(owners_zip)
 
 $(call dist-for-goals, general-tests, $(owners_zip))
+
+$(call declare-0p-target,$(owners_zip))
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
index da64cab..0b0c93c 100644
--- a/core/tasks/test_mapping.mk
+++ b/core/tasks/test_mapping.mk
@@ -36,3 +36,5 @@
 test_mapping : $(test_mappings_zip)
 
 $(call dist-for-goals, dist_files test_mapping,$(test_mappings_zip))
+
+$(call declare-1p-target,$(test_mappings_zip),)
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 3b348bd..add580d 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -80,6 +80,7 @@
   $(MERGE_ZIPS) \
   $(SOONG_ZIP) \
   $(host_shared_libs) \
+  $(test_suite_extra_deps) \
 
 compatibility_zip_resources := $(out_dir)/tools $(out_dir)/testcases $(out_dir)/lib $(out_dir)/lib64
 
@@ -120,8 +121,10 @@
 	$(SOONG_ZIP) -d -o $(PRIVATE_tests_list_zip) -j -f $(PRIVATE_tests_list)
 	rm -f $(PRIVATE_tests_list)
 
+$(call declare-0p-target,$(compatibility_tests_list_zip),)
+
 $(call declare-1p-container,$(compatibility_zip),)
-$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk),$(out_dir)/:/)
+$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk), $(out_dir)/:/)
 
 $(eval $(call html-notice-rule,$(test_suite_notice_html),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
 $(eval $(call text-notice-rule,$(test_suite_notice_txt),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
@@ -139,3 +142,4 @@
 test_suite_jdk :=
 test_suite_jdk_dir :=
 host_shared_libs :=
+test_suite_extra_deps :=
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index 47bf29c..f1159b3 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,5 +29,6 @@
       $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
                                $(patsubst system/%,DATA/%,\
                                    $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+      $(eval ALL_TARGETS.$(2)/$(my_copy_dest).META_LIC := $(if $(strip $(ALL_MODULES.$(m).META_LIC)),$(ALL_MODULES.$(m).META_LIC),$(ALL_MODULES.$(m).DELAYED_META_LIC)))\
       $(bui):$(2)/$(my_copy_dest))))
 endef
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index bcbdfcf..61bf136 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -35,4 +35,6 @@
 tradefed-tests-list : $(tradefed_tests_list_zip)
 $(call dist-for-goals, tradefed-tests-list, $(tradefed_tests_list_zip))
 
+$(call declare-1p-target,$(tradefed_tests_list_zip),)
+
 tests: tradefed-tests-list
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 3c838b5..befde87 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -35,14 +35,14 @@
 
 copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
 
+test_suite_extra_deps := $(copy_kernel_tests)
+
 # PHONY target to be used to build and test `vts_kernel_tests` without building full vts
 .PHONY: vts_kernel_tests
 vts_kernel_tests: $(copy_kernel_tests)
 
 include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
-$(compatibility_zip): $(copy_kernel_tests)
-
 .PHONY: vts
 vts: $(compatibility_zip) $(compatibility_tests_list_zip)
 $(call dist-for-goals, vts, $(compatibility_zip) $(compatibility_tests_list_zip))
diff --git a/core/tasks/with-license.mk b/core/tasks/with-license.mk
index 469ad76..d41e77a 100644
--- a/core/tasks/with-license.mk
+++ b/core/tasks/with-license.mk
@@ -37,6 +37,10 @@
 		RADIO/bootloader.img:bootloader.img RADIO/radio.img:radio.img \
 		IMAGES/*.img:. OTA/android-info.txt:android-info.txt
 endif
+
+$(call declare-1p-container,$(license_image_input_zip),build)
+$(call declare-container-deps,$(license_image_input_zip),$(BUILT_TARGET_FILES_PACKAGE))
+
 with_license_zip := $(PRODUCT_OUT)/$(name).sh
 $(with_license_zip): PRIVATE_NAME := $(name)
 $(with_license_zip): PRIVATE_INPUT_ZIP := $(license_image_input_zip)
@@ -48,3 +52,7 @@
 		$(PRIVATE_INPUT_ZIP) $(PRIVATE_NAME) $(PRIVATE_VENDOR_BLOBS_LICENSE)
 with-license : $(with_license_zip)
 $(call dist-for-goals, with-license, $(with_license_zip))
+
+$(call declare-1p-container,$(with_license_zip),)
+$(call declare-container-license-deps,$(with_license_zip),$(license_image_input_zip),$(with_license_zip):)
+
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 038b9c4..c8b381d 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -98,7 +98,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2022-03-05
+    PLATFORM_SECURITY_PATCH := 2022-04-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/envsetup.sh b/envsetup.sh
index 6b04eed..e7b8538 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -425,6 +425,61 @@
     complete -F _complete_android_module_names m
 }
 
+function multitree_lunch_help()
+{
+    echo "usage: lunch PRODUCT-VARIANT" 1>&2
+    echo "    Set up android build environment based on a product short name and variant" 1>&2
+    echo 1>&2
+    echo "lunch COMBO_FILE VARIANT" 1>&2
+    echo "    Set up android build environment based on a specific lunch combo file" 1>&2
+    echo "    and variant." 1>&2
+    echo 1>&2
+    echo "lunch --print [CONFIG]" 1>&2
+    echo "    Print the contents of a configuration.  If CONFIG is supplied, that config" 1>&2
+    echo "    will be flattened and printed.  If CONFIG is not supplied, the currently" 1>&2
+    echo "    selected config will be printed.  Returns 0 on success or nonzero on error." 1>&2
+    echo 1>&2
+    echo "lunch --list" 1>&2
+    echo "    List all possible combo files available in the current tree" 1>&2
+    echo 1>&2
+    echo "lunch --help" 1>&2
+    echo "lunch -h" 1>&2
+    echo "    Prints this message." 1>&2
+}
+
+function multitree_lunch()
+{
+    local code
+    local results
+    if $(echo "$1" | grep -q '^-') ; then
+        # Calls starting with a -- argument are passed directly and the function
+        # returns with the lunch.py exit code.
+        build/make/orchestrator/core/lunch.py "$@"
+        code=$?
+        if [[ $code -eq 2 ]] ; then
+          echo 1>&2
+          multitree_lunch_help
+          return $code
+        elif [[ $code -ne 0 ]] ; then
+          return $code
+        fi
+    else
+        # All other calls go through the --lunch variant of lunch.py
+        results=($(build/make/orchestrator/core/lunch.py --lunch "$@"))
+        code=$?
+        if [[ $code -eq 2 ]] ; then
+          echo 1>&2
+          multitree_lunch_help
+          return $code
+        elif [[ $code -ne 0 ]] ; then
+          return $code
+        fi
+
+        export TARGET_BUILD_COMBO=${results[0]}
+        export TARGET_BUILD_VARIANT=${results[1]}
+    fi
+}
+
 function choosetype()
 {
     echo "Build type choices are:"
@@ -1727,17 +1782,15 @@
 # Convenience entry point (like m) to use Bazel in AOSP.
 function b()
 (
-    local skip_tests=$(echo "$@" | grep -ow -- "--skip-soong-tests")
-    local bazel_args=(${@/--skip-soong-tests/})
     # Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
-    _trigger_build "all-modules" bp2build USE_BAZEL_ANALYSIS= $skip_tests || return 1
+    _trigger_build "all-modules" bp2build USE_BAZEL_ANALYSIS= || return 1
     # Then, run Bazel using the synthetic workspace as the --package_path.
-    if [[ -z "$bazel_args" ]]; then
+    if [[ -z "$@" ]]; then
         # If there are no args, show help.
         bazel help
     else
         # Else, always run with the bp2build configuration, which sets Bazel's package path to the synthetic workspace.
-        bazel $bazel_args --config=bp2build
+        bazel "$@" --config=bp2build
     fi
 )
 
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
new file mode 100755
index 0000000..35dac73
--- /dev/null
+++ b/orchestrator/core/lunch.py
@@ -0,0 +1,329 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import glob
+import json
+import os
+import sys
+
+EXIT_STATUS_OK = 0
+EXIT_STATUS_ERROR = 1
+EXIT_STATUS_NEED_HELP = 2
+
+def FindDirs(path, name, ttl=6):
+    """Search at most ttl directories deep inside path for a directory called name."""
+    # The dance with subdirs is so that we recurse in sorted order.
+    subdirs = []
+    with os.scandir(path) as it:
+        for dirent in sorted(it, key=lambda x: x.name):
+            try:
+                if dirent.is_dir():
+                    if dirent.name == name:
+                        yield os.path.join(path, dirent.name)
+                    elif ttl > 0:
+                        subdirs.append(dirent.name)
+            except OSError:
+                # Consume filesystem errors, e.g. too many links, permission etc.
+                pass
+    for subdir in subdirs:
+        yield from FindDirs(os.path.join(path, subdir), name, ttl-1)
+
+
+def WalkPaths(path, matcher, ttl=10):
+    """Do a traversal of all files under path yielding each file that matches
+    matcher."""
+    # First look for files, then recurse into directories as needed.
+    # The dance with subdirs is so that we recurse in sorted order.
+    subdirs = []
+    with os.scandir(path) as it:
+        for dirent in sorted(it, key=lambda x: x.name):
+            try:
+                if dirent.is_file():
+                    if matcher(dirent.name):
+                        yield os.path.join(path, dirent.name)
+                if dirent.is_dir():
+                    if ttl > 0:
+                        subdirs.append(dirent.name)
+            except OSError:
+                # Consume filesystem errors, e.g. too many links, permission etc.
+                pass
+    for subdir in sorted(subdirs):
+        yield from WalkPaths(os.path.join(path, subdir), matcher, ttl-1)
+
+
+def FindFile(path, filename):
+    """Return a file called filename inside path, no more than ttl levels deep.
+
+    Directories are searched alphabetically.
+    """
+    for f in WalkPaths(path, lambda x: x == filename):
+        return f
+
+
+def FindConfigDirs(workspace_root):
+    """Find the configuration files in the well known locations inside workspace_root
+
+        <workspace_root>/build/orchestrator/multitree_combos
+           (AOSP devices, such as cuttlefish)
+
+        <workspace_root>/vendor/**/multitree_combos
+            (specific to a vendor and not open sourced)
+
+        <workspace_root>/device/**/multitree_combos
+            (specific to a vendor and are open sourced)
+
+    Directories are returned specifically in this order, so that aosp can't be
+    overridden, but vendor overrides device.
+    """
+
+    # TODO: When orchestrator is in its own git project remove the "make/" here
+    yield os.path.join(workspace_root, "build/make/orchestrator/multitree_combos")
+
+    dirs = ["vendor", "device"]
+    for d in dirs:
+        yield from FindDirs(os.path.join(workspace_root, d), "multitree_combos")
+
+
+def FindNamedConfig(workspace_root, shortname):
+    """Find the config with the given shortname inside workspace_root.
+
+    Config directories are searched in the order described in FindConfigDirs,
+    and inside those directories, alphabetically."""
+    filename = shortname + ".mcombo"
+    for config_dir in FindConfigDirs(workspace_root):
+        found = FindFile(config_dir, filename)
+        if found:
+            return found
+    return None
+
+
+def ParseProductVariant(s):
+    """Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
+    split = s.split("-")
+    if len(split) != 2:
+        return None
+    return split
+
+
+def ChooseConfigFromArgs(workspace_root, args):
+    """Return the config file we should use for the given argument,
+    or null if there's no file that matches that."""
+    if len(args) == 1:
+        # Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
+        # file we don't match that.
+        pv = ParseProductVariant(args[0])
+        if pv:
+            config = FindNamedConfig(workspace_root, pv[0])
+            if config:
+                return (config, pv[1])
+            return None, None
+    # Look for a specifically named file
+    if os.path.isfile(args[0]):
+        return (args[0], args[1] if len(args) > 1 else None)
+    # That file didn't exist, return that we didn't find it.
+    return None, None
+
+
+class ConfigException(Exception):
+    ERROR_PARSE = "parse"
+    ERROR_CYCLE = "cycle"
+
+    def __init__(self, kind, message, locations, line=0):
+        """Error thrown when loading and parsing configurations.
+
+        Args:
+            message: Error message to display to user
+            locations: List of filenames of the include history.  The 0 index one
+                       the location where the actual error occurred
+        """
+        if len(locations):
+            s = locations[0]
+            if line:
+                s += ":"
+                s += str(line)
+            s += ": "
+        else:
+            s = ""
+        s += message
+        if len(locations):
+            for loc in locations[1:]:
+                s += "\n        included from %s" % loc
+        super().__init__(s)
+        self.kind = kind
+        self.message = message
+        self.locations = locations
+        self.line = line
+
+
+def LoadConfig(filename):
+    """Load a config, including processing the inherits fields.
+
+    Raises:
+        ConfigException on errors
+    """
+    def LoadAndMerge(fn, visited):
+        with open(fn) as f:
+            try:
+                contents = json.load(f)
+            except json.decoder.JSONDecodeError as ex:
+                if True:
+                    raise ConfigException(ConfigException.ERROR_PARSE, ex.msg, visited, ex.lineno)
+                else:
+                    sys.stderr.write("exception %s" % ex.__dict__)
+                    raise ex
+            # Merge all the parents into one data, with first-wins policy
+            inherited_data = {}
+            for parent in contents.get("inherits", []):
+                if parent in visited:
+                    raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
+                            visited)
+                DeepMerge(inherited_data, LoadAndMerge(parent, [parent,] + visited))
+            # Then merge inherited_data into contents, but what's already there will win.
+            DeepMerge(contents, inherited_data)
+            contents.pop("inherits", None)
+        return contents
+    return LoadAndMerge(filename, [filename,])
+
+
+def DeepMerge(merged, addition):
+    """Merge all fields of addition into merged. Pre-existing fields win."""
+    for k, v in addition.items():
+        if k in merged:
+            if isinstance(v, dict) and isinstance(merged[k], dict):
+                DeepMerge(merged[k], v)
+        else:
+            merged[k] = v
+
+
+def Lunch(args):
+    """Handle the lunch command."""
+    # Check that we're at the top of a multitree workspace
+    # TODO: Choose the right sentinel file
+    if not os.path.exists("build/make/orchestrator"):
+        sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
+        return EXIT_STATUS_ERROR
+
+    # Choose the config file
+    config_file, variant = ChooseConfigFromArgs(".", args)
+
+    if config_file == None:
+        sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
+        return EXIT_STATUS_NEED_HELP
+    if variant == None:
+        sys.stderr.write("Can't find variant for: %s\n" % " ".join(args))
+        return EXIT_STATUS_NEED_HELP
+
+    # Parse the config file
+    try:
+        config = LoadConfig(config_file)
+    except ConfigException as ex:
+        sys.stderr.write(str(ex))
+        return EXIT_STATUS_ERROR
+
+    # Fail if the lunchable bit isn't set, because this isn't a usable config
+    if not config.get("lunchable", False):
+        sys.stderr.write("%s: Lunch config file (or inherited files) does not have the 'lunchable'"
+                % config_file)
+        sys.stderr.write(" flag set, which means it is probably not a complete lunch spec.\n")
+
+    # All the validation has passed, so print the name of the file and the variant
+    sys.stdout.write("%s\n" % config_file)
+    sys.stdout.write("%s\n" % variant)
+
+    return EXIT_STATUS_OK
+
+
+def FindAllComboFiles(workspace_root):
+    """Find all .mcombo files in the prescribed locations in the tree."""
+    for dir in FindConfigDirs(workspace_root):
+        for file in WalkPaths(dir, lambda x: x.endswith(".mcombo")):
+            yield file
+
+
+def IsFileLunchable(config_file):
+    """Parse config_file, flatten the inheritance, and return whether it can be
+    used as a lunch target."""
+    try:
+        config = LoadConfig(config_file)
+    except ConfigException as ex:
+        sys.stderr.write("%s" % ex)
+        return False
+    return config.get("lunchable", False)
+
+
+def FindAllLunchable(workspace_root):
+    """Find all mcombo files in the tree (rooted at workspace_root) that when
+    parsed (and inheritance is flattened) have lunchable: true."""
+    for f in [x for x in FindAllComboFiles(workspace_root) if IsFileLunchable(x)]:
+        yield f
+
+
+def List():
+    """Handle the --list command."""
+    for f in sorted(FindAllLunchable(".")):
+        print(f)
+
+
+def Print(args):
+    """Handle the --print command."""
+    # Parse args
+    if len(args) == 0:
+        config_file = os.environ.get("TARGET_BUILD_COMBO")
+        if not config_file:
+            sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.\n")
+            return EXIT_STATUS_NEED_HELP
+    elif len(args) == 1:
+        config_file = args[0]
+    else:
+        return EXIT_STATUS_NEED_HELP
+
+    # Parse the config file
+    try:
+        config = LoadConfig(config_file)
+    except ConfigException as ex:
+        sys.stderr.write(str(ex))
+        return EXIT_STATUS_ERROR
+
+    # Print the config in json form
+    json.dump(config, sys.stdout, indent=4)
+
+    return EXIT_STATUS_OK
+
+
+def main(argv):
+    if len(argv) < 2 or argv[1] == "-h" or argv[1] == "--help":
+        return EXIT_STATUS_NEED_HELP
+
+    if len(argv) == 2 and argv[1] == "--list":
+        List()
+        return EXIT_STATUS_OK
+
+    if len(argv) == 2 and argv[1] == "--print":
+        return Print(argv[2:])
+        return EXIT_STATUS_OK
+
+    if (len(argv) == 2 or len(argv) == 3) and argv[1] == "--lunch":
+        return Lunch(argv[2:])
+
+    sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
+    return EXIT_STATUS_NEED_HELP
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv))
+
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test/configs/another/bad.mcombo b/orchestrator/core/test/configs/another/bad.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/another/bad.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/another/dir/a b/orchestrator/core/test/configs/another/dir/a
new file mode 100644
index 0000000..7898192
--- /dev/null
+++ b/orchestrator/core/test/configs/another/dir/a
@@ -0,0 +1 @@
+a
diff --git a/orchestrator/core/test/configs/b-eng b/orchestrator/core/test/configs/b-eng
new file mode 100644
index 0000000..eceb3f3
--- /dev/null
+++ b/orchestrator/core/test/configs/b-eng
@@ -0,0 +1 @@
+INVALID FILE
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
new file mode 100644
index 0000000..8cc8370
--- /dev/null
+++ b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
@@ -0,0 +1,3 @@
+{
+    "lunchable": "true"
+}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
new file mode 100644
index 0000000..f9805f2
--- /dev/null
+++ b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
@@ -0,0 +1 @@
+not a combo file
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
diff --git a/orchestrator/core/test/configs/parsing/cycles/1.mcombo b/orchestrator/core/test/configs/parsing/cycles/1.mcombo
new file mode 100644
index 0000000..ab8fe33
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/cycles/1.mcombo
@@ -0,0 +1,5 @@
+{
+    "inherits": [
+        "test/configs/parsing/cycles/2.mcombo"
+    ]
+}
diff --git a/orchestrator/core/test/configs/parsing/cycles/2.mcombo b/orchestrator/core/test/configs/parsing/cycles/2.mcombo
new file mode 100644
index 0000000..2b774d0
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/cycles/2.mcombo
@@ -0,0 +1,6 @@
+{
+    "inherits": [
+        "test/configs/parsing/cycles/3.mcombo"
+    ]
+}
+
diff --git a/orchestrator/core/test/configs/parsing/cycles/3.mcombo b/orchestrator/core/test/configs/parsing/cycles/3.mcombo
new file mode 100644
index 0000000..41b629b
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/cycles/3.mcombo
@@ -0,0 +1,6 @@
+{
+    "inherits": [
+        "test/configs/parsing/cycles/1.mcombo"
+    ]
+}
+
diff --git a/orchestrator/core/test/configs/parsing/merge/1.mcombo b/orchestrator/core/test/configs/parsing/merge/1.mcombo
new file mode 100644
index 0000000..a5a57d7
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/merge/1.mcombo
@@ -0,0 +1,13 @@
+{
+    "inherits": [
+        "test/configs/parsing/merge/2.mcombo",
+        "test/configs/parsing/merge/3.mcombo"
+    ],
+    "in_1": "1",
+    "in_1_2": "1",
+    "merged": {
+        "merged_1": "1",
+        "merged_1_2": "1"
+    },
+    "dict_1": { "a" : "b" }
+}
diff --git a/orchestrator/core/test/configs/parsing/merge/2.mcombo b/orchestrator/core/test/configs/parsing/merge/2.mcombo
new file mode 100644
index 0000000..00963e2
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/merge/2.mcombo
@@ -0,0 +1,12 @@
+{
+    "in_1_2": "2",
+    "in_2": "2",
+    "in_2_3": "2",
+    "merged": {
+        "merged_1_2": "2",
+        "merged_2": "2",
+        "merged_2_3": "2"
+    },
+    "dict_2": { "a" : "b" }
+}
+
diff --git a/orchestrator/core/test/configs/parsing/merge/3.mcombo b/orchestrator/core/test/configs/parsing/merge/3.mcombo
new file mode 100644
index 0000000..5fc9d90
--- /dev/null
+++ b/orchestrator/core/test/configs/parsing/merge/3.mcombo
@@ -0,0 +1,10 @@
+{
+    "in_3": "3",
+    "in_2_3": "3",
+    "merged": {
+        "merged_3": "3",
+        "merged_2_3": "3"
+    },
+    "dict_3": { "a" : "b" }
+}
+
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
@@ -0,0 +1 @@
+{}
diff --git a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
new file mode 100755
index 0000000..3c39493
--- /dev/null
+++ b/orchestrator/core/test_lunch.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import unittest
+
+sys.dont_write_bytecode = True
+import lunch
+
+class TestStringMethods(unittest.TestCase):
+
+    def test_find_dirs(self):
+        self.assertEqual([x for x in lunch.FindDirs("test/configs", "multitree_combos")], [
+                    "test/configs/build/make/orchestrator/multitree_combos",
+                    "test/configs/device/aa/bb/multitree_combos",
+                    "test/configs/vendor/aa/bb/multitree_combos"])
+
+    def test_find_file(self):
+        # Finds the one in device first because this is searching from the root,
+        # not using FindNamedConfig.
+        self.assertEqual(lunch.FindFile("test/configs", "v.mcombo"),
+                   "test/configs/device/aa/bb/multitree_combos/v.mcombo")
+
+    def test_find_config_dirs(self):
+        self.assertEqual([x for x in lunch.FindConfigDirs("test/configs")], [
+                    "test/configs/build/make/orchestrator/multitree_combos",
+                    "test/configs/vendor/aa/bb/multitree_combos",
+                    "test/configs/device/aa/bb/multitree_combos"])
+
+    def test_find_named_config(self):
+        # Inside build/orchestrator, overriding device and vendor
+        self.assertEqual(lunch.FindNamedConfig("test/configs", "b"),
+                    "test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
+
+        # Nested dir inside a combo dir
+        self.assertEqual(lunch.FindNamedConfig("test/configs", "nested"),
+                    "test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
+
+        # Inside vendor, overriding device
+        self.assertEqual(lunch.FindNamedConfig("test/configs", "v"),
+                    "test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
+
+        # Inside device
+        self.assertEqual(lunch.FindNamedConfig("test/configs", "d"),
+                    "test/configs/device/aa/bb/multitree_combos/d.mcombo")
+
+        # Make sure we don't look too deep (for performance)
+        self.assertIsNone(lunch.FindNamedConfig("test/configs", "too_deep"))
+
+
+    def test_choose_config_file(self):
+        # Empty string argument
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", [""]),
+                    (None, None))
+
+        # A PRODUCT-VARIANT name
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["v-eng"]),
+                    ("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
+
+        # A PRODUCT-VARIANT name that conflicts with a file
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["b-eng"]),
+                    ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
+
+        # A PRODUCT-VARIANT that doesn't exist
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs", ["z-user"]),
+                    (None, None))
+
+        # An explicit file
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+                        ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
+                    ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
+
+        # An explicit file that doesn't exist
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+                        ["test/configs/doesnt_exist.mcombo", "eng"]),
+                    (None, None))
+
+        # An explicit file without a variant should fail
+        self.assertEqual(lunch.ChooseConfigFromArgs("test/configs",
+                        ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
+                    ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
+
+
+    def test_config_cycles(self):
+        # Test that we catch cycles
+        with self.assertRaises(lunch.ConfigException) as context:
+            lunch.LoadConfig("test/configs/parsing/cycles/1.mcombo")
+        self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
+
+    def test_config_merge(self):
+        # Test the merge logic
+        self.assertEqual(lunch.LoadConfig("test/configs/parsing/merge/1.mcombo"), {
+                            "in_1": "1",
+                            "in_1_2": "1",
+                            "merged": {"merged_1": "1",
+                                "merged_1_2": "1",
+                                "merged_2": "2",
+                                "merged_2_3": "2",
+                                "merged_3": "3"},
+                            "dict_1": {"a": "b"},
+                            "in_2": "2",
+                            "in_2_3": "2",
+                            "dict_2": {"a": "b"},
+                            "in_3": "3",
+                            "dict_3": {"a": "b"}
+                        })
+
+    def test_list(self):
+        self.assertEqual(sorted(lunch.FindAllLunchable("test/configs")),
+                ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
+
+if __name__ == "__main__":
+    unittest.main()
+
+# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/multitree_combos/test.mcombo b/orchestrator/multitree_combos/test.mcombo
new file mode 100644
index 0000000..3ad0717
--- /dev/null
+++ b/orchestrator/multitree_combos/test.mcombo
@@ -0,0 +1,16 @@
+{
+    "lunchable": true,
+    "system": {
+        "tree": "inner_tree_system",
+        "product": "system_lunch_product"
+    },
+    "vendor": {
+        "tree": "inner_tree_vendor",
+        "product": "vendor_lunch_product"
+    },
+    "modules": {
+        "com.android.something": {
+            "tree": "inner_tree_module"
+        }
+    }
+}
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 142270e..baa3d3a 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -30,6 +30,8 @@
 	$(hide) echo "" > $@
 endif
 
+$(call declare-0p-target,$(INSTALLED_ANDROID_INFO_TXT_TARGET))
+
 # Copy compatibility metadata to the device.
 
 # Device Manifest
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 8e062ba..45cd515 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -11,8 +11,11 @@
 # This flag is set by mainline but isn't desired for GSI.
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
-# system.img is always ext4 and non-sparsed.
+# system.img is ext4/erofs and non-sparsed.
+GSI_FILE_SYSTEM_TYPE ?= ext4
+BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE := $(GSI_FILE_SYSTEM_TYPE)
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+TARGET_USERIMAGES_SPARSE_EROFS_DISABLED := true
 
 # GSI also includes make_f2fs to support userdata parition in f2fs
 # for some devices
diff --git a/tests/run.rbc b/tests/run.rbc
index 82a5e72..58cc4d6 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -32,6 +32,9 @@
         fail("Expected '%s', got '%s'" % (expected, actual))
 
 # Unit tests for non-trivial runtime functions
+assert_eq(["a", "b", "c"], rblf.mksort("b a    c c"))
+assert_eq(["a", "b", "c"], rblf.mksort(["b", "a", "c", "c"]))
+
 assert_eq("", rblf.mkstrip(" \n \t    "))
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
 assert_eq(1, rblf.mkstrip(1))
diff --git a/tools/event_log_tags.bzl b/tools/event_log_tags.bzl
index 3766da4..35305ae 100644
--- a/tools/event_log_tags.bzl
+++ b/tools/event_log_tags.bzl
@@ -1,7 +1,6 @@
 """Event log tags generation rule"""
 
 load("@bazel_skylib//lib:paths.bzl", "paths")
-load("@rules_android//rules:rules.bzl", "android_library")
 
 def _event_log_tags_impl(ctx):
     out_files = []
@@ -22,7 +21,7 @@
         )
     return [DefaultInfo(files = depset(out_files))]
 
-_event_log_tags = rule(
+event_log_tags = rule(
     implementation = _event_log_tags_impl,
     attrs = {
         "srcs": attr.label_list(allow_files = [".logtags"], mandatory = True),
@@ -34,14 +33,3 @@
         ),
     },
 )
-
-def event_log_tags(name, srcs):
-    _event_log_tags(
-        name = name + "_gen_logtags",
-        srcs = srcs,
-    )
-
-    android_library(
-        name = name,
-        srcs = [name + "_gen_logtags"],
-    )
diff --git a/tools/mk2bp_catalog.py b/tools/mk2bp_catalog.py
index c2afb9b..3fc6236 100755
--- a/tools/mk2bp_catalog.py
+++ b/tools/mk2bp_catalog.py
@@ -308,19 +308,31 @@
     print("""<th class="Count Warning">%s</th>""" % analyzer.title)
   print("      </tr>")
 
+# get all modules in $(PRODUCT_PACKAGE) and the corresponding deps
+def get_module_product_packages_plus_deps(initial_modules, result, soong_data):
+  for module in initial_modules:
+    if module in result:
+      continue
+    result.add(module)
+    if module in soong_data.deps:
+      get_module_product_packages_plus_deps(soong_data.deps[module], result, soong_data)
+
 def main():
   parser = argparse.ArgumentParser(description="Info about remaining Android.mk files.")
   parser.add_argument("--device", type=str, required=True,
                       help="TARGET_DEVICE")
+  parser.add_argument("--product-packages", type=argparse.FileType('r'),
+                      default=None,
+                      help="PRODUCT_PACKAGES")
   parser.add_argument("--title", type=str,
                       help="page title")
   parser.add_argument("--codesearch", type=str,
                       default="https://cs.android.com/android/platform/superproject/+/master:",
                       help="page title")
-  parser.add_argument("--out_dir", type=str,
+  parser.add_argument("--out-dir", type=str,
                       default=None,
                       help="Equivalent of $OUT_DIR, which will also be checked if"
-                        + " --out_dir is unset. If neither is set, default is"
+                        + " --out-dir is unset. If neither is set, default is"
                         + " 'out'.")
   parser.add_argument("--mode", type=str,
                       default="html",
@@ -354,16 +366,25 @@
       continue
     all_makefiles[filename] = Makefile(filename)
 
+  # Get all the modules in $(PRODUCT_PACKAGES) and the correspoding deps
+  product_package_modules_plus_deps = set()
+  if args.product_packages:
+    product_package_top_modules = args.product_packages.read().strip().split('\n')
+    get_module_product_packages_plus_deps(product_package_top_modules, product_package_modules_plus_deps, soong)
+
   if args.mode == "html":
-    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
   elif args.mode == "csv":
-    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
 
 class HtmlProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
     self.annotations = Annotations()
 
   def execute(self):
@@ -376,6 +397,8 @@
     modules_by_partition = dict()
     partitions = set()
     for installed, module in self.soong.installed.items():
+      if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+        continue
       partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
       modules_by_partition.setdefault(partition, []).append(module)
       partitions.add(partition)
@@ -985,10 +1008,11 @@
       return "";
 
 class CsvProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
 
   def execute(self):
     csvout = csv.writer(sys.stdout)
@@ -1004,6 +1028,8 @@
     for filename in sorted(self.all_makefiles.keys()):
       makefile = self.all_makefiles[filename]
       for module in self.soong.reverse_makefiles[filename]:
+        if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+          continue
         row = [filename, module]
         # Partitions
         row.append(";".join(sorted(set([get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT,
diff --git a/tools/mk2bp_partition.py b/tools/mk2bp_partition.py
new file mode 100644
index 0000000..30c1135
--- /dev/null
+++ b/tools/mk2bp_partition.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+"""
+The complete list of the remaining Make files in each partition for all lunch targets
+
+How to run?
+python3 $(path-to-file)/mk2bp_partition.py
+"""
+
+from pathlib import Path
+
+import csv
+import datetime
+import os
+import shutil
+import subprocess
+import sys
+import time
+
+def get_top():
+  path = '.'
+  while not os.path.isfile(os.path.join(path, 'build/soong/soong_ui.bash')):
+    if os.path.abspath(path) == '/':
+      sys.exit('Could not find android source tree root.')
+    path = os.path.join(path, '..')
+  return os.path.abspath(path)
+
+# get the values of a build variable
+def get_build_var(variable, product, build_variant):
+  """Returns the result of the shell command get_build_var."""
+  env = {
+      **os.environ,
+      'TARGET_PRODUCT': product if product else '',
+      'TARGET_BUILD_VARIANT': build_variant if build_variant else '',
+  }
+  return subprocess.run([
+      'build/soong/soong_ui.bash',
+      '--dumpvar-mode',
+      variable
+  ], check=True, capture_output=True, env=env, text=True).stdout.strip()
+
+def get_make_file_partitions():
+    lunch_targets = set(get_build_var("all_named_products", "", "").split())
+    total_lunch_targets = len(lunch_targets)
+    makefile_by_partition = dict()
+    partitions = set()
+    current_count = 0
+    start_time = time.time()
+    # cannot run command `m lunch_target`
+    broken_targets = {"mainline_sdk", "ndk"}
+    for lunch_target in sorted(lunch_targets):
+        current_count += 1
+        current_time = time.time()
+        print (current_count, "/", total_lunch_targets, lunch_target, datetime.timedelta(seconds=current_time - start_time))
+        if lunch_target in broken_targets:
+            continue
+        installed_product_out = get_build_var("PRODUCT_OUT", lunch_target, "userdebug")
+        filename = os.path.join(installed_product_out, "mk2bp_remaining.csv")
+        copy_filename = os.path.join(installed_product_out, lunch_target + "_mk2bp_remaining.csv")
+        # only generate if not exists
+        if not os.path.exists(copy_filename):
+            bash_cmd = "bash build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=" + lunch_target
+            bash_cmd += " TARGET_BUILD_VARIANT=userdebug " + filename
+            subprocess.run(bash_cmd, shell=True, text=True, check=True, stdout=subprocess.DEVNULL)
+            # generate a copied .csv file, to avoid possible overwritings
+            with open(copy_filename, "w") as file:
+                shutil.copyfile(filename, copy_filename)
+
+        # open mk2bp_remaining.csv file
+        with open(copy_filename, "r") as csvfile:
+            reader = csv.reader(csvfile, delimiter=",", quotechar='"')
+            # bypass the header row
+            next(reader, None)
+            for row in reader:
+                # read partition information
+                partition = row[2]
+                makefile_by_partition.setdefault(partition, set()).add(row[0])
+                partitions.add(partition)
+
+    # write merged make file list for each partition into a csv file
+    installed_path = Path(installed_product_out).parents[0].as_posix()
+    csv_path = installed_path + "/mk2bp_partition.csv"
+    with open(csv_path, "wt") as csvfile:
+        writer = csv.writer(csvfile, delimiter=",")
+        count_makefile = 0
+        for partition in sorted(partitions):
+            number_file = len(makefile_by_partition[partition])
+            count_makefile += number_file
+            writer.writerow([partition, number_file])
+            for makefile in sorted(makefile_by_partition[partition]):
+                writer.writerow([makefile])
+        row = ["The total count of make files is ", count_makefile]
+        writer.writerow(row)
+
+def main():
+    os.chdir(get_top())
+    get_make_file_partitions()
+
+if __name__ == "__main__":
+    main()