Merge "Fix "posix_spawn: Argument list too long" issue"
diff --git a/core/Makefile b/core/Makefile
index e9bca77..c19c7eb 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -641,26 +641,34 @@
 	$(hide) $(SOONG_TO_CONVERT_SCRIPT) $< >$@
 $(call dist-for-goals,droidcore-unbundled,$(SOONG_TO_CONVERT))
 
+$(PRODUCT_OUT)/product_packages.txt:
+	@rm -f $@
+	echo "" > $@
+	$(foreach x,$(PRODUCT_PACKAGES),echo $(x) >> $@$(newline))
+
 MK2BP_CATALOG_SCRIPT := build/make/tools/mk2bp_catalog.py
+PRODUCT_PACKAGES_TXT := $(PRODUCT_OUT)/product_packages.txt
 MK2BP_REMAINING_HTML := $(PRODUCT_OUT)/mk2bp_remaining.html
 $(MK2BP_REMAINING_HTML): PRIVATE_CODE_SEARCH_BASE_URL := "https://cs.android.com/android/platform/superproject/+/master:"
-$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
 		--title="Remaining Android.mk files for $(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)" \
 		--codesearch=$(PRIVATE_CODE_SEARCH_BASE_URL) \
-		--out_dir="$(OUT_DIR)" \
+		--out-dir="$(OUT_DIR)" \
 		--mode=html \
 		> $@
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_HTML))
 
 MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
-$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
-		--out_dir="$(OUT_DIR)" \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
+		--out-dir="$(OUT_DIR)" \
 		--mode=csv \
 		> $@
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_CSV))
@@ -6158,25 +6166,39 @@
 # -----------------------------------------------------------------
 # A zip of the symbols directory.  Keep the full paths to make it
 # more obvious where these files came from.
+# Also produces a textproto containing mappings from elf IDs to symbols
+# filename, which will allow finding the appropriate symbols to deobfuscate
+# a stack trace frame.
 #
+
 name := $(TARGET_PRODUCT)
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-symbols-$(FILE_NAME_TAG)
 
-SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
+# The path to the zip file containing binaries with symbols.
+SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name)-symbols-$(FILE_NAME_TAG).zip
+# The path to a file containing mappings from elf IDs to filenames.
+SYMBOLS_MAPPING := $(PRODUCT_OUT)/$(name)-symbols-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := SYMBOLS_ZIP SYMBOLS_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(SYMBOLS_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
-$(SYMBOLS_ZIP): $(SOONG_ZIP)
+$(SYMBOLS_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)
+$(SYMBOLS_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Package symbols: $@"
 	$(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
-	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE))
+	$(hide) mkdir -p $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE)) $(PRIVATE_MAPPING_PACKAGING_DIR)
+	# Find all of the files in the symbols directory and zip them into the symbols zip.
 	$(hide) find -L $(TARGET_OUT_UNSTRIPPED) -type f | sort >$(PRIVATE_LIST_FILE)
 	$(hide) $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+	# Find all of the files in the symbols mapping directory and merge them into the symbols mapping textproto.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(hide) $(SYMBOLS_MAP) -merge $(SYMBOLS_MAPPING) -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(SYMBOLS_ZIP): .KATI_IMPLICIT_OUTPUTS := $(SYMBOLS_MAPPING)
+
 # -----------------------------------------------------------------
 # A zip of the coverage directory.
 #
@@ -6269,17 +6291,35 @@
 
 #------------------------------------------------------------------
 # A zip of Proguard obfuscation dictionary files.
+# Also produces a textproto containing mappings from the hashes of the
+# dictionary contents (which are also stored in the dex files on the
+# devices) to the filename of the proguard dictionary, which will allow
+# finding the appropriate dictionary to deobfuscate a stack trace frame.
 #
+
+# The path to the zip file containing proguard dictionaries.
 PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
+# The path to the zip file containing mappings from dictionary hashes to filenames.
+PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := PROGUARD_DICT_ZIP PROGUARD_DICT_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(PROGUARD_DICT_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary)
-$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
+$(PROGUARD_DICT_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)
+$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_filelist)/filelist
+$(PROGUARD_DICT_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Packaging Proguard obfuscation dictionary files."
-	mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR)
-	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -P out/target/common/obj -D $(PRIVATE_PACKAGING_DIR)
+	rm -rf $@ $(PRIVATE_LIST_FILE)
+	mkdir -p $(PRIVATE_PACKAGING_DIR) $(PRIVATE_MAPPING_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
+	# Zip all of the files in the proguard dictionary directory.
+	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -D $(PRIVATE_PACKAGING_DIR)
+	# Find all of the files in the proguard dictionary mapping directory and merge them into the mapping textproto.
+	# Strip the PRIVATE_PACKAGING_DIR off the filenames to match soong_zip's -C argument.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) -strip_prefix $(PRIVATE_PACKAGING_DIR)/ -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(PROGUARD_DICT_ZIP): .KATI_IMPLICIT_OUTPUTS := $(PROGUARD_DICT_MAPPING)
 
 #------------------------------------------------------------------
 # A zip of Proguard usage files.
diff --git a/core/board_config.mk b/core/board_config.mk
index 72a8044..dc50a68 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -254,7 +254,7 @@
   endif
 
   $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
-    $(OUT_DIR)/rbcrun RBC_OUT="make,global" $(OUT_DIR)/rbc/boardlauncher.rbc)
+    $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration runner failed: $(.SHELLSTATUS))
   endif
diff --git a/core/config.mk b/core/config.mk
index e2bdcbd..205a2fb 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -620,6 +620,7 @@
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 SPARSE_IMG := $(HOST_OUT_EXECUTABLES)/sparse_img$(HOST_EXECUTABLE_SUFFIX)
 CHECK_PARTITION_SIZES := $(HOST_OUT_EXECUTABLES)/check_partition_sizes$(HOST_EXECUTABLE_SUFFIX)
+SYMBOLS_MAP := $(HOST_OUT_EXECUTABLES)/symbols_map
 
 PROGUARD_HOME := external/proguard
 PROGUARD := $(PROGUARD_HOME)/bin/proguard.sh
diff --git a/core/definitions.mk b/core/definitions.mk
index 77804e8..7308a38 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -3173,6 +3173,50 @@
 fi
 endef
 
+# Copy an unstripped binary to the symbols directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+define copy-unstripped-elf-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),\
+  elf,$(patsubst $(TARGET_OUT_UNSTRIPPED)/%,$(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)/%,$(2).textproto))
+endef
+
+# Copy an R8 dictionary to the packaging directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in packaging directory
+# $(3): path in mappings packaging directory
+define copy-r8-dictionary-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),r8,$(3))
+endef
+
+# Copy an unstripped binary or R8 dictionary to the symbols directory
+# while also extracting a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+# $(3): file type (elf or r8)
+# $(4): path in the mappings directory
+define _copy-symbols-file-with-mapping
+$(2): .KATI_IMPLICIT_OUTPUTS := $(4)
+$(2): $(SYMBOLS_MAP)
+$(2): $(1)
+	@echo "Copy symbols with mapping: $$@"
+	$$(copy-file-to-target)
+	$(SYMBOLS_MAP) -$(strip $(3)) $(2) -write_if_changed $(4)
+.KATI_RESTAT: $(2)
+endef
+
+# Returns the directory to copy proguard dictionaries into
+define local-proguard-dictionary-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
+# Returns the directory to copy proguard dictionary mappings into
+define local-proguard-dictionary-mapping-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
 
 ###########################################################
 ## Commands to call R8
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 9b1f2c2..640fe10 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -117,7 +117,6 @@
 	9 \
 	LOCAL_PATH \
 	MAKEFILE_LIST \
-	PARENT_PRODUCT_FILES \
 	current_mk \
 	_eiv_ev \
 	_eiv_i \
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 52d7ddc..0d2cd7f 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -55,9 +55,7 @@
 endif
 symbolic_input := $(inject_module)
 symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-$(symbolic_output) : $(symbolic_input)
-	@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
+$(eval $(call copy-unstripped-elf-file-with-mapping,$(symbolic_input),$(symbolic_output)))
 
 ###########################################################
 ## Store breakpad symbols
diff --git a/core/main.mk b/core/main.mk
index 72958da..929e052 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1234,33 +1234,9 @@
 #   See the select-bitness-of-required-modules definition.
 # $(1): product makefile
 
-# TODO(asmundak):
-# `product-installed-files` and `host-installed-files` macros below used to
-# call `get-product-var` directly to obtain per-file configuration variable
-# values (the value of variable FOO is fetched from PRODUCT.<product-makefile>.FOO).
-# Starlark-based configuration does not maintain per-file variable variable
-# values. To work around this problem, we utilize the fact that
-# `product-installed-files` and `host-installed-files` are called only in
-# two places:
-# 1. For the top-level product makefile (in this file). In this case
-#    $(call get-product-var <product>, FOO) is the same as $(FOO) as the
-#    product configuration has been run already. Therefore we define
-#    _product-var macro to pick the values directly from product config
-#    variables when using Starlark-based configuration.
-# 2. To check the path requirements (in artifact_path_requirements.mk).
-#    Starlark-based configuration does not perform this check at the moment.
-# In the longer run most of the logic of this file will be moved to the
-# Starlark.
-
-ifndef RBC_PRODUCT_CONFIG
 define _product-var
   $(call get-product-var,$(1),$(2))
 endef
-else
-define _product-var
-  $(call $(2))
-endef
-endif
 
 define product-installed-files
   $(eval _pif_modules := \
@@ -1375,7 +1351,7 @@
 
   # Verify the artifact path requirements made by included products.
   is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
-  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS),$(RBC_PRODUCT_CONFIG),$(RBC_BOARD_CONFIG)))
+  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
     include $(BUILD_SYSTEM)/artifact_path_requirements.mk
   endif
 else
@@ -1761,13 +1737,13 @@
   endif
 
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
+  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_MAPPING))
 
   $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
 
   $(SYMBOLS_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
+  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP) $(SYMBOLS_MAPPING))
 
   $(COVERAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(COVERAGE_ZIP))
@@ -1818,7 +1794,9 @@
     $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
     $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
     $(SYMBOLS_ZIP) \
+    $(SYMBOLS_MAPPING) \
     $(PROGUARD_DICT_ZIP) \
+    $(PROGUARD_DICT_MAPPING) \
     $(PROGUARD_USAGE_ZIP) \
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
@@ -1962,6 +1940,7 @@
 $(call dist-for-goals,sdk, \
     $(ALL_SDK_TARGETS) \
     $(SYMBOLS_ZIP) \
+    $(SYMBOLS_MAPPING) \
     $(COVERAGE_ZIP) \
     $(APPCOMPAT_ZIP) \
     $(INSTALLED_BUILD_PROP_TARGET) \
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 6d51db1..63e9040 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -17,7 +17,7 @@
 # the sort also acts as a strip to remove the single space entries that creep in because of the evals
 define gather-all-products
 $(eval _all_products_visited := )\
-$(sort $(call all-products-inner, $(PARENT_PRODUCT_FILES)))
+$(sort $(call all-products-inner, $(PRODUCTS)))
 endef
 
 define all-products-inner
@@ -78,7 +78,6 @@
 $(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list)
 
 $(products_graph): $(this_makefile)
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
 	@echo Product graph DOT: $@ for $(PRIVATE_PRODUCTS_FILTER)
 	$(hide) echo 'digraph {' > $@.in
 	$(hide) echo 'graph [ ratio=.5 ];' >> $@.in
@@ -87,20 +86,9 @@
 	$(foreach p,$(PRIVATE_PRODUCTS),$(call emit-product-node-props,$(p),$@.in))
 	$(hide) echo '}' >> $@.in
 	$(hide) build/make/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
-else
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
-
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
 
 .PHONY: product-graph
 product-graph: $(products_graph)
 	@echo Product graph .dot file: $(products_graph)
 	@echo Command to convert to pdf: dot -Tpdf -Nshape=box -o $(OUT_DIR)/products.pdf $(products_graph)
 	@echo Command to convert to svg: dot -Tsvg -Nshape=box -o $(OUT_DIR)/products.svg $(products_graph)
-else
-.PHONY: product-graph
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
diff --git a/core/product.mk b/core/product.mk
index 1f304cd..f316114 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -396,12 +396,11 @@
 # $(1): product to inherit
 #
 # To be called from product makefiles, and is later evaluated during the import-nodes
-# call below. It does three things:
+# call below. It does the following:
 #  1. Inherits all of the variables from $1.
 #  2. Records the inheritance in the .INHERITS_FROM variable
-#  3. Records the calling makefile in PARENT_PRODUCT_FILES
 #
-# (2) and (3) can be used together to reconstruct the include hierarchy
+# (2) and the PRODUCTS variable can be used together to reconstruct the include hierarchy
 # See e.g. product-graph.mk for an example of this.
 #
 define inherit-product
@@ -416,7 +415,6 @@
     $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
     $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
     $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
-    $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
     $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
     $(call dump-config-vals,$(current_mk),inherit))
 endef
diff --git a/core/product_config.mk b/core/product_config.mk
index be4aded..939a022 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -257,16 +257,16 @@
     $(error product configuration converter failed: $(.SHELLSTATUS))
   endif
   include $(OUT_DIR)/rbc/rbc_product_config_results.mk
-  PRODUCTS += $(current_product_makefile)
 endif
 endif  # Import all or just the current product makefile
 
-ifndef RBC_PRODUCT_CONFIG
 # Quick check
 $(check-all-products)
-endif
 
-ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
+# This step was already handled in the RBC product configuration.
+# Since the equivalent starlark code will not add the partial products to
+# the PRODUCTS variable, it's ok for them to be set before check-all-products
+ifeq ($(RBC_PRODUCT_CONFIG)$(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
 # Import all the products that have made artifact path requirements, so that we can verify
 # the artifacts they produce.
 # These are imported after check-all-products because some of them might not be real products.
@@ -284,7 +284,6 @@
 $(dump-products)
 endif
 
-ifndef RBC_PRODUCT_CONFIG
 # Convert a short name like "sooner" into the path to the product
 # file defining that product.
 #
@@ -297,9 +296,6 @@
 ############################################################################
 # Strip and assign the PRODUCT_ variables.
 $(call strip-product-vars)
-else
-INTERNAL_PRODUCT := $(current_product_makefile)
-endif
 
 current_product_makefile :=
 all_product_makefiles :=
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 0187251..11064f3 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -70,14 +70,7 @@
 
 def _printvars(state):
     """Prints configuration and global variables."""
-    (globals, cfg, globals_base) = state
-    for attr, val in sorted(cfg.items()):
-        __print_attr(attr, val)
-    if _options.print_globals:
-        print()
-        _printglobals(globals, globals_base)
-
-def _printglobals(globals, globals_base):
+    (globals, globals_base) = state
     for attr, val in sorted(globals.items()):
         if attr == _soong_config_namespaces_key:
             __print_attr("SOONG_CONFIG_NAMESPACES", val.keys())
@@ -164,6 +157,13 @@
         handle = __h_new()
         pcm(globals, handle)
 
+        if handle.artifact_path_requirements:
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENTS"] = handle.artifact_path_requirements
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_ALLOWED_LIST"] = handle.artifact_path_allowed_list
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENT_IS_RELAXED"] = "true" if handle.artifact_path_requirement_is_relaxed[0] else ""
+            globals.setdefault("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", [])
+            globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] += [name+".mk"]
+
         # Now we know everything about this PCM, record it in 'configs'.
         children = handle.inherited_modules
         if _options.trace_modules:
@@ -218,7 +218,23 @@
         _percolate_inherited(configs, pcm_name, cfg, children_names)
         configs[pcm_name] = pcm, cfg, children_names, True
 
-    return (globals, configs[top_pcm_name][1], globals_base)
+        if (pcm_name + ".mk") in globals.get("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", []):
+            for var, val in cfg.items():
+                globals["PRODUCTS."+pcm_name+".mk."+var] = val
+
+    # Copy product config variables from the cfg dictionary to the
+    # PRODUCTS.<top_level_makefile_name>.<var_name> global variables.
+    for var, val in configs[top_pcm_name][1].items():
+        globals["PRODUCTS."+top_pcm_name+".mk."+var] = val
+
+    # Record inheritance hierarchy in PRODUCTS.<file>.INHERITS_FROM variables.
+    # This is required for m product-graph.
+    for config in configs:
+        if len(configs[config][2]) > 0:
+            globals["PRODUCTS."+config+".mk.INHERITS_FROM"] = sorted([x + ".mk" for x in configs[config][2]])
+    globals["PRODUCTS"] = __words(globals.get("PRODUCTS", [])) + [top_pcm_name + ".mk"]
+
+    return (globals, globals_base)
 
 
 def _dictionary_difference(a, b):
@@ -237,7 +253,14 @@
     input_variables_init(globals_base, h_base)
     input_variables_init(globals, h)
     board_config_init(globals, h)
-    return (globals, _dictionary_difference(h.cfg, h_base.cfg), globals_base)
+
+    # Board configuration files aren't really supposed to change
+    # product configuration variables, but some do. You lose the
+    # inheritance features of the product config variables if you do.
+    for var, value in _dictionary_difference(h.cfg, h_base.cfg).items():
+        globals[var] = value
+
+    return (globals, globals_base)
 
 
 def _substitute_inherited(configs, pcm_name, cfg):
@@ -407,7 +430,10 @@
     return struct(
         cfg = dict(),
         inherited_modules = dict(),
-        default_list_value = list()
+        default_list_value = list(),
+        artifact_path_requirements = list(),
+        artifact_path_allowed_list = list(),
+        artifact_path_requirement_is_relaxed = [False], # as a list so that we can reassign it
     )
 
 def __h_cfg(handle):
@@ -552,13 +578,17 @@
     """
     return " ".join([__base(w) for w in __words(paths)])
 
-def _require_artifacts_in_path(paths, allowed_paths):
-    """TODO."""
-    pass
+def _require_artifacts_in_path(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path in Make."""
+    handle.artifact_path_requirements.clear()
+    handle.artifact_path_requirements.extend(__words(paths))
+    handle.artifact_path_allowed_list.clear()
+    handle.artifact_path_allowed_list.extend(__words(allowed_paths))
 
-def _require_artifacts_in_path_relaxed(paths, allowed_paths):
-    """TODO."""
-    pass
+def _require_artifacts_in_path_relaxed(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path-relaxed in Make."""
+    _require_artifacts_in_path(handle, paths, allowed_paths)
+    handle.artifact_path_requirement_is_relaxed[0] = True
 
 def _expand_wildcard(pattern):
     """Expands shell wildcard pattern."""
@@ -676,6 +706,20 @@
     return out_words if type(s) == "list" else " ".join(out_words)
 
 
+def _mksort(input):
+    """Emulate Make's sort.
+
+    This is unique from a regular sort in that it also strips
+    the input, and removes duplicate words from the input.
+    """
+    input = sorted(__words(input))
+    result = []
+    for w in input:
+        if len(result) == 0 or result[-1] != w:
+            result.append(w)
+    return result
+
+
 def _mkstrip(s):
     """Emulates Make's strip.
 
@@ -726,7 +770,6 @@
     """Returns struct containing runtime global settings."""
     settings = dict(
         format = "pretty",
-        print_globals = False,
         rearrange = "",
         trace_modules = False,
         trace_variables = [],
@@ -740,7 +783,8 @@
         elif x == "pretty" or x == "make":
             settings["format"] = x
         elif x == "global":
-            settings["print_globals"] = True
+            # TODO: Remove this, kept for backwards compatibility
+            pass
         elif x != "":
             fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
     for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
@@ -785,11 +829,11 @@
     mkerror = _mkerror,
     mkpatsubst = _mkpatsubst,
     mkwarning = _mkwarning,
+    mksort = _mksort,
     mkstrip = _mkstrip,
     mksubst = _mksubst,
     notdir = _notdir,
     printvars = _printvars,
-    printglobals = _printglobals,
     product_configuration = _product_configuration,
     board_configuration = _board_configuration,
     product_copy_files_by_pattern = _product_copy_files_by_pattern,
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 006e1dc..d771d22 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -100,18 +100,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index ca52374..7a177ff 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -184,7 +184,7 @@
       # drop /root as /root is mounted as /
       my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
       symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-      $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
+      $(eval $(call copy-unstripped-elf-file-with-mapping,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
       $(LOCAL_BUILT_MODULE): | $(symbolic_output)
 
       ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index b819cdc..a8f475f 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -62,18 +62,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(common_javalib.jar),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(common_javalib.jar),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 3b348bd..a7910ae 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -80,6 +80,7 @@
   $(MERGE_ZIPS) \
   $(SOONG_ZIP) \
   $(host_shared_libs) \
+  $(test_suite_extra_deps) \
 
 compatibility_zip_resources := $(out_dir)/tools $(out_dir)/testcases $(out_dir)/lib $(out_dir)/lib64
 
@@ -121,7 +122,7 @@
 	rm -f $(PRIVATE_tests_list)
 
 $(call declare-1p-container,$(compatibility_zip),)
-$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk),$(out_dir)/:/)
+$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk), $(out_dir)/:/)
 
 $(eval $(call html-notice-rule,$(test_suite_notice_html),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
 $(eval $(call text-notice-rule,$(test_suite_notice_txt),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
@@ -139,3 +140,4 @@
 test_suite_jdk :=
 test_suite_jdk_dir :=
 host_shared_libs :=
+test_suite_extra_deps :=
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index 47bf29c..f1159b3 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,5 +29,6 @@
       $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
                                $(patsubst system/%,DATA/%,\
                                    $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+      $(eval ALL_TARGETS.$(2)/$(my_copy_dest).META_LIC := $(if $(strip $(ALL_MODULES.$(m).META_LIC)),$(ALL_MODULES.$(m).META_LIC),$(ALL_MODULES.$(m).DELAYED_META_LIC)))\
       $(bui):$(2)/$(my_copy_dest))))
 endef
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 3c838b5..befde87 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -35,14 +35,14 @@
 
 copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
 
+test_suite_extra_deps := $(copy_kernel_tests)
+
 # PHONY target to be used to build and test `vts_kernel_tests` without building full vts
 .PHONY: vts_kernel_tests
 vts_kernel_tests: $(copy_kernel_tests)
 
 include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
-$(compatibility_zip): $(copy_kernel_tests)
-
 .PHONY: vts
 vts: $(compatibility_zip) $(compatibility_tests_list_zip)
 $(call dist-for-goals, vts, $(compatibility_zip) $(compatibility_tests_list_zip))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 038b9c4..c8b381d 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -98,7 +98,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2022-03-05
+    PLATFORM_SECURITY_PATCH := 2022-04-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 8e062ba..45cd515 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -11,8 +11,11 @@
 # This flag is set by mainline but isn't desired for GSI.
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
-# system.img is always ext4 and non-sparsed.
+# system.img is ext4/erofs and non-sparsed.
+GSI_FILE_SYSTEM_TYPE ?= ext4
+BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE := $(GSI_FILE_SYSTEM_TYPE)
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+TARGET_USERIMAGES_SPARSE_EROFS_DISABLED := true
 
 # GSI also includes make_f2fs to support userdata parition in f2fs
 # for some devices
diff --git a/tests/run.rbc b/tests/run.rbc
index 82a5e72..58cc4d6 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -32,6 +32,9 @@
         fail("Expected '%s', got '%s'" % (expected, actual))
 
 # Unit tests for non-trivial runtime functions
+assert_eq(["a", "b", "c"], rblf.mksort("b a    c c"))
+assert_eq(["a", "b", "c"], rblf.mksort(["b", "a", "c", "c"]))
+
 assert_eq("", rblf.mkstrip(" \n \t    "))
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
 assert_eq(1, rblf.mkstrip(1))
diff --git a/tools/mk2bp_catalog.py b/tools/mk2bp_catalog.py
index c2afb9b..3fc6236 100755
--- a/tools/mk2bp_catalog.py
+++ b/tools/mk2bp_catalog.py
@@ -308,19 +308,31 @@
     print("""<th class="Count Warning">%s</th>""" % analyzer.title)
   print("      </tr>")
 
+# get all modules in $(PRODUCT_PACKAGE) and the corresponding deps
+def get_module_product_packages_plus_deps(initial_modules, result, soong_data):
+  for module in initial_modules:
+    if module in result:
+      continue
+    result.add(module)
+    if module in soong_data.deps:
+      get_module_product_packages_plus_deps(soong_data.deps[module], result, soong_data)
+
 def main():
   parser = argparse.ArgumentParser(description="Info about remaining Android.mk files.")
   parser.add_argument("--device", type=str, required=True,
                       help="TARGET_DEVICE")
+  parser.add_argument("--product-packages", type=argparse.FileType('r'),
+                      default=None,
+                      help="PRODUCT_PACKAGES")
   parser.add_argument("--title", type=str,
                       help="page title")
   parser.add_argument("--codesearch", type=str,
                       default="https://cs.android.com/android/platform/superproject/+/master:",
                       help="page title")
-  parser.add_argument("--out_dir", type=str,
+  parser.add_argument("--out-dir", type=str,
                       default=None,
                       help="Equivalent of $OUT_DIR, which will also be checked if"
-                        + " --out_dir is unset. If neither is set, default is"
+                        + " --out-dir is unset. If neither is set, default is"
                         + " 'out'.")
   parser.add_argument("--mode", type=str,
                       default="html",
@@ -354,16 +366,25 @@
       continue
     all_makefiles[filename] = Makefile(filename)
 
+  # Get all the modules in $(PRODUCT_PACKAGES) and the correspoding deps
+  product_package_modules_plus_deps = set()
+  if args.product_packages:
+    product_package_top_modules = args.product_packages.read().strip().split('\n')
+    get_module_product_packages_plus_deps(product_package_top_modules, product_package_modules_plus_deps, soong)
+
   if args.mode == "html":
-    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
   elif args.mode == "csv":
-    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
 
 class HtmlProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
     self.annotations = Annotations()
 
   def execute(self):
@@ -376,6 +397,8 @@
     modules_by_partition = dict()
     partitions = set()
     for installed, module in self.soong.installed.items():
+      if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+        continue
       partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
       modules_by_partition.setdefault(partition, []).append(module)
       partitions.add(partition)
@@ -985,10 +1008,11 @@
       return "";
 
 class CsvProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
 
   def execute(self):
     csvout = csv.writer(sys.stdout)
@@ -1004,6 +1028,8 @@
     for filename in sorted(self.all_makefiles.keys()):
       makefile = self.all_makefiles[filename]
       for module in self.soong.reverse_makefiles[filename]:
+        if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+          continue
         row = [filename, module]
         # Partitions
         row.append(";".join(sorted(set([get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT,
diff --git a/tools/mk2bp_partition.py b/tools/mk2bp_partition.py
new file mode 100644
index 0000000..30c1135
--- /dev/null
+++ b/tools/mk2bp_partition.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+"""
+The complete list of the remaining Make files in each partition for all lunch targets
+
+How to run?
+python3 $(path-to-file)/mk2bp_partition.py
+"""
+
+from pathlib import Path
+
+import csv
+import datetime
+import os
+import shutil
+import subprocess
+import sys
+import time
+
+def get_top():
+  path = '.'
+  while not os.path.isfile(os.path.join(path, 'build/soong/soong_ui.bash')):
+    if os.path.abspath(path) == '/':
+      sys.exit('Could not find android source tree root.')
+    path = os.path.join(path, '..')
+  return os.path.abspath(path)
+
+# get the values of a build variable
+def get_build_var(variable, product, build_variant):
+  """Returns the result of the shell command get_build_var."""
+  env = {
+      **os.environ,
+      'TARGET_PRODUCT': product if product else '',
+      'TARGET_BUILD_VARIANT': build_variant if build_variant else '',
+  }
+  return subprocess.run([
+      'build/soong/soong_ui.bash',
+      '--dumpvar-mode',
+      variable
+  ], check=True, capture_output=True, env=env, text=True).stdout.strip()
+
+def get_make_file_partitions():
+    lunch_targets = set(get_build_var("all_named_products", "", "").split())
+    total_lunch_targets = len(lunch_targets)
+    makefile_by_partition = dict()
+    partitions = set()
+    current_count = 0
+    start_time = time.time()
+    # cannot run command `m lunch_target`
+    broken_targets = {"mainline_sdk", "ndk"}
+    for lunch_target in sorted(lunch_targets):
+        current_count += 1
+        current_time = time.time()
+        print (current_count, "/", total_lunch_targets, lunch_target, datetime.timedelta(seconds=current_time - start_time))
+        if lunch_target in broken_targets:
+            continue
+        installed_product_out = get_build_var("PRODUCT_OUT", lunch_target, "userdebug")
+        filename = os.path.join(installed_product_out, "mk2bp_remaining.csv")
+        copy_filename = os.path.join(installed_product_out, lunch_target + "_mk2bp_remaining.csv")
+        # only generate if not exists
+        if not os.path.exists(copy_filename):
+            bash_cmd = "bash build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=" + lunch_target
+            bash_cmd += " TARGET_BUILD_VARIANT=userdebug " + filename
+            subprocess.run(bash_cmd, shell=True, text=True, check=True, stdout=subprocess.DEVNULL)
+            # generate a copied .csv file, to avoid possible overwritings
+            with open(copy_filename, "w") as file:
+                shutil.copyfile(filename, copy_filename)
+
+        # open mk2bp_remaining.csv file
+        with open(copy_filename, "r") as csvfile:
+            reader = csv.reader(csvfile, delimiter=",", quotechar='"')
+            # bypass the header row
+            next(reader, None)
+            for row in reader:
+                # read partition information
+                partition = row[2]
+                makefile_by_partition.setdefault(partition, set()).add(row[0])
+                partitions.add(partition)
+
+    # write merged make file list for each partition into a csv file
+    installed_path = Path(installed_product_out).parents[0].as_posix()
+    csv_path = installed_path + "/mk2bp_partition.csv"
+    with open(csv_path, "wt") as csvfile:
+        writer = csv.writer(csvfile, delimiter=",")
+        count_makefile = 0
+        for partition in sorted(partitions):
+            number_file = len(makefile_by_partition[partition])
+            count_makefile += number_file
+            writer.writerow([partition, number_file])
+            for makefile in sorted(makefile_by_partition[partition]):
+                writer.writerow([makefile])
+        row = ["The total count of make files is ", count_makefile]
+        writer.writerow(row)
+
+def main():
+    os.chdir(get_top())
+    get_make_file_partitions()
+
+if __name__ == "__main__":
+    main()