Merge "finalize_branch_for_release.sh: avoid envsetup.sh"
diff --git a/core/Makefile b/core/Makefile
index 3246f58..86324cb 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -694,8 +694,8 @@
 	@rm -f $@
 	echo "# Modules using -Wno-error" >> $@
 	for m in $(sort $(SOONG_MODULES_USING_WNO_ERROR) $(MODULES_USING_WNO_ERROR)); do echo $$m >> $@; done
-	echo "# Modules added default -Wall" >> $@
-	for m in $(sort $(SOONG_MODULES_ADDED_WALL) $(MODULES_ADDED_WALL)); do echo $$m >> $@; done
+	echo "# Modules that allow warnings" >> $@
+	for m in $(sort $(SOONG_MODULES_WARNINGS_ALLOWED) $(MODULES_WARNINGS_ALLOWED)); do echo $$m >> $@; done
 
 $(call declare-0p-target,$(WALL_WERROR))
 
@@ -1824,6 +1824,7 @@
 define add-common-ro-flags-to-image-props
 $(eval _var := $(call to-upper,$(1)))
 $(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR),$(hide) echo "$(1)_erofs_compressor=$(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS),$(hide) echo "$(1)_erofs_compress_hints=$(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE),$(hide) echo "$(1)_erofs_pcluster_size=$(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
@@ -1909,6 +1910,7 @@
 $(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
 $(if $(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG),$(hide) echo "f2fs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG)" >> $(1))
 $(if $(BOARD_EROFS_COMPRESSOR),$(hide) echo "erofs_default_compressor=$(BOARD_EROFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_EROFS_COMPRESS_HINTS),$(hide) echo "erofs_default_compress_hints=$(BOARD_EROFS_COMPRESS_HINTS)" >> $(1))
 $(if $(BOARD_EROFS_PCLUSTER_SIZE),$(hide) echo "erofs_pcluster_size=$(BOARD_EROFS_PCLUSTER_SIZE)" >> $(1))
 $(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
@@ -3504,7 +3506,7 @@
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
 $(if $(strip $(ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC)),,\
     $(if $(strip $(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
-        $(eval ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC:=$(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
+        $(call declare-copy-target-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_PREBUILT_VENDORIMAGE)),\
         $(call declare-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)))
 endif
 
@@ -5236,7 +5238,7 @@
 endif # BOARD_AVB_VBMETA_SYSTEM
 ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
 	$(hide) echo "avb_vbmeta_vendor=$(BOARD_AVB_VBMETA_VENDOR)" >> $@
-	$(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $@
+	$(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_key_path=$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $@
@@ -5963,6 +5965,8 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 	$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
+	$(hide) mkdir -p $(zip_root)/PVMFW
+	$(hide) cp $(PREBUILT_PVMFWIMAGE_TARGET) $(zip_root)/PVMFW/
 endif
 ifdef BOARD_PREBUILT_BOOTLOADER
 	$(hide) mkdir -p $(zip_root)/IMAGES
diff --git a/core/OWNERS b/core/OWNERS
index dae34ff..980186c 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,4 +1,4 @@
-per-file dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
+per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
 per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
 
 # For version updates
diff --git a/core/binary.mk b/core/binary.mk
index 665270e..3f32fa9 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1506,7 +1506,7 @@
         ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
           my_cflags := -Wall -Werror $(my_cflags)
         else
-          $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+          $(eval MODULES_WARNINGS_ALLOWED := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
           my_cflags := -Wall $(my_cflags)
         endif
       endif
diff --git a/core/definitions.mk b/core/definitions.mk
index e424bc2..c5423e7 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -41,6 +41,9 @@
 ALL_NON_MODULES:=
 NON_MODULES_WITHOUT_LICENSE_METADATA:=
 
+# List of copied targets that need license metadata copied.
+ALL_COPIED_TARGETS:=
+
 # Full paths to targets that should be added to the "make droid"
 # set of installed targets.
 ALL_DEFAULT_INSTALLED_MODULES:=
@@ -583,6 +586,19 @@
 endef
 
 ###########################################################
+## Record a target $(1) copied from another target(s) $(2) that will need
+## license metadata.
+###########################################################
+define declare-copy-target-license-metadata
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir))\
+  $(eval _tgt:=$(strip $(1)))\
+  $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
+  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+  $(eval ALL_COPIED_TARGETS += $(_tgt)),\
+  $(eval ALL_TARGETS.$(1).META_LIC:=$(module_license_metadata))))
+endef
+
+###########################################################
 ## License metadata build rule for my_register_name $(1)
 ###########################################################
 define license-metadata-rule
@@ -661,13 +677,6 @@
 $(strip $(eval _notices := $(sort $(ALL_NON_MODULES.$(_tgt).NOTICES))))
 $(strip $(eval _path := $(sort $(ALL_NON_MODULES.$(_tgt).PATH))))
 $(strip $(eval _install_map := $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS)))
-$(strip $(eval \
-  $$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
-    $$(if $$(strip $$(ALL_TARGETS.$$(d).META_LIC)), \
-      , \
-      $$(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $$(d))) \
-  )) \
-)
 
 $(_meta): PRIVATE_KINDS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_KINDS))
 $(_meta): PRIVATE_CONDITIONS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS))
@@ -705,6 +714,60 @@
 endef
 
 ###########################################################
+## Record missing dependencies for non-module target $(1)
+###########################################################
+define record-missing-non-module-dependencies
+$(strip $(eval _tgt := $(strip $(1))))
+$(strip $(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
+  $(if $(strip $(ALL_TARGETS.$(d).META_LIC)), \
+    , \
+    $(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $(d))) \
+))
+endef
+
+###########################################################
+## License metadata build rule for copied target $(1)
+###########################################################
+define copied-target-license-metadata-rule
+$(if $(strip $(ALL_TARGETS.$(1).META_LIC)),,$(call _copied-target-license-metadata-rule,$(1)))
+endef
+
+define _copied-target-license-metadata-rule
+$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(1).meta_lic))))
+$(strip $(eval ALL_TARGETS.$(1).META_LIC:=$(_meta)))
+$(strip $(eval _dep:=))
+$(strip $(foreach s,$(ALL_COPIED_TARGETS.$(1).SOURCES),\
+  $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+  $(if $(filter 0p,$(_dmeta)),\
+    $(if $(filter-out 0p,$(_dep)),,$(eval ALL_TARGETS.$(1).META_LIC:=0p)),\
+    $(if $(_dep),\
+      $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),
+      $(eval _dep:=$(_dmeta))))))
+$(strip $(if $(strip $(_dep)),,$(error cannot copy target from unknown module: $(1) from $(ALL_COPIED_TARGETS.$(1).SOURCES))))
+
+ifneq (0p,$(ALL_TARGETS.$(1).META_LIC))
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(ALL_COPIED_TARGETS.$(1).SOURCES)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,copynotice)/$(_meta)/arguments
+$(_meta) : $(_dep)
+	rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+	    $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
+	OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
+	  -o $$@
+
+endif
+endef
+
+###########################################################
 ## Declare the license metadata for non-module target $(1).
 ##
 ## $(2) -- license kinds e.g. SPDX-license-identifier-Apache-2.0
@@ -919,6 +982,8 @@
   ) \
   $(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
   $(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
+  $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
+  $(foreach t,$(sort $(ALL_NON_MODULES)),$(call record-missing-non-module-dependencies,$(t))) \
   $(eval $(call report-missing-licenses-rule)) \
   $(eval $(call report-all-notice-library-names-rule)) \
   $(eval $(call build-all-license-metadata-rule)))
@@ -3402,11 +3467,11 @@
 define create-suite-dependencies
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+    $$(foreach f,$$(my_compat_dist_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
     $$(eval my_test_data += $$(foreach f,$$(my_compat_dist_$(suite)), $$(call word-colon,2,$$(f)))) \
   )) \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
+    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
     $$(eval my_test_config += $$(foreach f,$$(my_compat_dist_config_$(suite)), $$(call word-colon,2,$$(f)))) \
   )) \
   $(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
diff --git a/core/dex_preopt_config_merger.py b/core/dex_preopt_config_merger.py
index 4efcc17..401e8a8 100755
--- a/core/dex_preopt_config_merger.py
+++ b/core/dex_preopt_config_merger.py
@@ -31,6 +31,7 @@
 
 import json
 from collections import OrderedDict
+import os
 import sys
 
 
@@ -42,8 +43,9 @@
   # Read all JSON configs.
   cfgs = []
   for arg in sys.argv[1:]:
-    with open(arg, 'r') as f:
-      cfgs.append(json.load(f, object_pairs_hook=OrderedDict))
+    if os.stat(arg).st_size != 0:
+      with open(arg, 'r') as f:
+        cfgs.append(json.load(f, object_pairs_hook=OrderedDict))
 
   # The first config is the dexpreopted library/app, the rest are its
   # <uses-library> dependencies.
@@ -88,6 +90,33 @@
       clcs2.append(clc)
     clc_map2[sdk_ver] = clcs2
 
+  # Go over all uses-libraries in dependency dexpreopt.config files (these don't
+  # have to be uses-libraries themselves, they can be e.g. transitive static
+  # library dependencies) and merge their CLC to the current one
+  for ulib, cfg in uses_libs.items():
+    any_sdk_ver = 'any' # not interested in compatibility libraries
+    clcs = cfg['ClassLoaderContexts'].get(any_sdk_ver, [])
+
+    # If the dependency is a uses-library itself, its uses-library dependencies
+    # are added as a subcontext, so don't add them to top-level CLC.
+    dep_is_a_uses_lib = False
+    for clc2 in clc_map2[any_sdk_ver]:
+      if clc2['Name'] == cfg['ProvidesUsesLibrary']:
+        dep_is_a_uses_lib = True
+    if dep_is_a_uses_lib:
+      continue
+
+    # Check if CLC for these libraries is already present (avoid duplicates).
+    # Don't bother optimizing quadratic loop, since CLC is typically small.
+    for clc in clcs:
+      already_in_clc = False
+      for clc2 in clc_map2[any_sdk_ver]:
+        if clc2['Name'] == clc['Name']:
+          already_in_clc = True
+          break
+      if not already_in_clc:
+        clc_map2[any_sdk_ver] += clcs
+
   # Overwrite the original class loader context with the patched one.
   cfg0['ClassLoaderContexts'] = clc_map2
 
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 216168b..9d6436f 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -110,7 +110,8 @@
 # Local module variables and functions used in dexpreopt and manifest_check.
 ################################################################################
 
-my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
+my_dexpreopt_libs_required := $(LOCAL_USES_LIBRARIES)
+my_dexpreopt_libs_optional := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
   $(LOCAL_OPTIONAL_USES_LIBRARIES))
 
 # TODO(b/132357300): This may filter out too much, as PRODUCT_PACKAGES doesn't
@@ -120,8 +121,7 @@
 # to load dexpreopt code on device. We should fix this, either by deferring
 # dependency computation until the full list of product packages is known, or
 # by adding product-specific lists of missing libraries.
-my_filtered_optional_uses_libraries := $(filter $(PRODUCT_PACKAGES), \
-  $(my_filtered_optional_uses_libraries))
+my_dexpreopt_libs_optional := $(filter $(PRODUCT_PACKAGES), $(my_dexpreopt_libs_optional))
 
 ifeq ($(LOCAL_MODULE_CLASS),APPS)
   # compatibility libraries are added to class loader context of an app only if
@@ -146,10 +146,6 @@
   my_dexpreopt_libs_compat :=
 endif
 
-my_dexpreopt_libs := \
-  $(LOCAL_USES_LIBRARIES) \
-  $(my_filtered_optional_uses_libraries)
-
 # Module dexpreopt.config depends on dexpreopt.config files of each
 # <uses-library> dependency, because these libraries may be processed after
 # the current module by Make (there's no topological order), so the dependency
@@ -157,11 +153,12 @@
 # this dexpreopt.config is generated. So it's necessary to add file-level
 # dependencies between dexpreopt.config files.
 my_dexpreopt_dep_configs := $(foreach lib, \
-  $(filter-out $(my_dexpreopt_libs_compat),$(LOCAL_USES_LIBRARIES) $(my_filtered_optional_uses_libraries)), \
+  $(filter-out $(my_dexpreopt_libs_compat),$(my_dexpreopt_libs_required) $(my_dexpreopt_libs_optional)), \
   $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,)/dexpreopt.config)
 
 # 1: SDK version
 # 2: list of libraries
+# 3: boolean, true if optional, else required
 #
 # Make does not process modules in topological order wrt. <uses-library>
 # dependencies, therefore we cannot rely on variables to get the information
@@ -180,12 +177,48 @@
   $(foreach lib, $(2),\
     $(call add_json_map_anon) \
     $(call add_json_str, Name, $(lib)) \
+    $(call add_json_bool, Optional, $(filter true,$(3))) \
     $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
     $(call add_json_str, Device, /system/framework/$(lib).jar) \
     $(call add_json_val, Subcontexts, null) \
     $(call end_json_map)) \
   $(call end_json_array)
 
+my_dexpreopt_archs :=
+my_dexpreopt_images :=
+my_dexpreopt_images_deps :=
+my_dexpreopt_image_locations_on_host :=
+my_dexpreopt_image_locations_on_device :=
+# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
+# for each boot image (primary and the framework extension). The only reason why
+# the primary image is exposed to Make is testing (art gtests) and benchmarking
+# (art golem benchmarks). Install rules that use those variables are in
+# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
+my_dexpreopt_infix := boot
+
+ifdef LOCAL_DEX_PREOPT
+  ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
+    # Store uncompressed dex files preopted in /system
+    ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
+      ifeq ($(call install-on-system-other, $(my_module_path)),)
+        LOCAL_UNCOMPRESS_DEX := true
+      endif  # install-on-system-other
+    else  # BOARD_USES_SYSTEM_OTHER_ODEX
+      LOCAL_UNCOMPRESS_DEX := true
+    endif
+  endif
+  my_create_dexpreopt_config := true
+endif
+
+# dexpreopt is disabled when TARGET_BUILD_UNBUNDLED_IMAGE is true,
+# but dexpreopt config files are required to dexpreopt in post-processing.
+ifeq ($(TARGET_BUILD_UNBUNDLED_IMAGE),true)
+  my_create_dexpreopt_config := true
+endif
+
+# This is needed for both <uses-library> check and dexpreopt command.
+my_dexpreopt_config := $(intermediates)/dexpreopt.config
+
 ################################################################################
 # Verify <uses-library> coherence between the build system and the manifest.
 ################################################################################
@@ -238,7 +271,13 @@
     $(LOCAL_OPTIONAL_USES_LIBRARIES))
   my_relax_check_arg := $(if $(filter true,$(RELAX_USES_LIBRARY_CHECK)), \
     --enforce-uses-libraries-relax,)
-  my_dexpreopt_config_args := $(patsubst %,--dexpreopt-config %,$(my_dexpreopt_dep_configs))
+
+  my_dexpreopt_config_deps := $(my_dexpreopt_dep_configs)
+  my_dexpreopt_config_args := $(patsubst %,--dexpreopt-dep-config %,$(my_dexpreopt_dep_configs))
+  ifeq ($(my_create_dexpreopt_config), true)
+    my_dexpreopt_config_deps += $(my_dexpreopt_config)
+    my_dexpreopt_config_args += --dexpreopt-config $(my_dexpreopt_config)
+  endif
 
   my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
   $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(my_uses_libs_args)
@@ -247,7 +286,7 @@
   $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
   $(my_enforced_uses_libraries): $(AAPT)
   $(my_enforced_uses_libraries): $(my_verify_script)
-  $(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
+  $(my_enforced_uses_libraries): $(my_dexpreopt_config_deps)
   $(my_enforced_uses_libraries): $(my_manifest_or_apk)
 	@echo Verifying uses-libraries: $<
 	rm -f $@
@@ -267,39 +306,6 @@
 # Dexpreopt command.
 ################################################################################
 
-my_dexpreopt_archs :=
-my_dexpreopt_images :=
-my_dexpreopt_images_deps :=
-my_dexpreopt_image_locations_on_host :=
-my_dexpreopt_image_locations_on_device :=
-# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
-# for each boot image (primary and the framework extension). The only reason why
-# the primary image is exposed to Make is testing (art gtests) and benchmarking
-# (art golem benchmarks). Install rules that use those variables are in
-# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
-my_dexpreopt_infix := boot
-my_create_dexpreopt_config :=
-
-ifdef LOCAL_DEX_PREOPT
-  ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
-    # Store uncompressed dex files preopted in /system
-    ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true)
-      ifeq ($(call install-on-system-other, $(my_module_path)),)
-        LOCAL_UNCOMPRESS_DEX := true
-      endif  # install-on-system-other
-    else  # BOARD_USES_SYSTEM_OTHER_ODEX
-      LOCAL_UNCOMPRESS_DEX := true
-    endif
-  endif
-  my_create_dexpreopt_config := true
-endif
-
-# dexpreopt is disabled when TARGET_BUILD_UNBUNDLED_IMAGE is true,
-# but dexpreopt config files are required to dexpreopt in post-processing.
-ifeq ($(TARGET_BUILD_UNBUNDLED_IMAGE),true)
-  my_create_dexpreopt_config := true
-endif
-
 ifeq ($(my_create_dexpreopt_config), true)
   ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
     my_module_multilib := $(LOCAL_MULTILIB)
@@ -389,10 +395,11 @@
   $(call add_json_bool, EnforceUsesLibraries,           $(filter true,$(LOCAL_ENFORCE_USES_LIBRARIES)))
   $(call add_json_str,  ProvidesUsesLibrary,            $(firstword $(LOCAL_PROVIDES_USES_LIBRARY) $(LOCAL_MODULE)))
   $(call add_json_map,  ClassLoaderContexts)
-  $(call add_json_class_loader_context, any, $(my_dexpreopt_libs))
-  $(call add_json_class_loader_context,  28, $(my_dexpreopt_libs_compat_28))
-  $(call add_json_class_loader_context,  29, $(my_dexpreopt_libs_compat_29))
-  $(call add_json_class_loader_context,  30, $(my_dexpreopt_libs_compat_30))
+  $(call add_json_class_loader_context, any, $(my_dexpreopt_libs_required),)
+  $(call add_json_class_loader_context, any, $(my_dexpreopt_libs_optional),true)
+  $(call add_json_class_loader_context,  28, $(my_dexpreopt_libs_compat_28),)
+  $(call add_json_class_loader_context,  29, $(my_dexpreopt_libs_compat_29),)
+  $(call add_json_class_loader_context,  30, $(my_dexpreopt_libs_compat_30),)
   $(call end_json_map)
   $(call add_json_list, Archs,                          $(my_dexpreopt_archs))
   $(call add_json_list, DexPreoptImages,                $(my_dexpreopt_images))
@@ -407,7 +414,6 @@
 
   $(call json_end)
 
-  my_dexpreopt_config := $(intermediates)/dexpreopt.config
   my_dexpreopt_config_for_postprocessing := $(PRODUCT_OUT)/dexpreopt_config/$(LOCAL_MODULE)_dexpreopt.config
   my_dexpreopt_config_merger := $(BUILD_SYSTEM)/dex_preopt_config_merger.py
 
@@ -466,7 +472,7 @@
   my_dexpreopt_deps := $(my_dex_jar)
   my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
   my_dexpreopt_deps += \
-    $(foreach lib, $(my_dexpreopt_libs) $(my_dexpreopt_libs_compat), \
+    $(foreach lib, $(my_dexpreopt_libs_required) $(my_dexpreopt_libs_optional) $(my_dexpreopt_libs_compat), \
       $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
   my_dexpreopt_deps += $(my_dexpreopt_images_deps)
   my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
@@ -506,4 +512,4 @@
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
-endif # my_create_dexpreopt_config
\ No newline at end of file
+endif # my_create_dexpreopt_config
diff --git a/core/main.mk b/core/main.mk
index 78f38f3..2cfea45 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -935,7 +935,7 @@
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
-        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(eval ALL_TARGETS.$(target).META_LIC:=$(module_license_metadata)))\
+        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
           $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
diff --git a/core/notice_files.mk b/core/notice_files.mk
index c05d4ea..84523af 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -127,7 +127,8 @@
 ifdef my_register_name
   module_license_metadata := $(call local-intermediates-dir)/$(my_register_name).meta_lic
 
-  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(my_test_data) $(my_test_config),\
+  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))) \
+      $(my_test_data) $(my_test_config),\
     $(eval ALL_TARGETS.$(target).META_LIC := $(module_license_metadata)))
 
   ALL_MODULES.$(my_register_name).META_LIC := $(strip $(ALL_MODULES.$(my_register_name).META_LIC) $(module_license_metadata))
diff --git a/core/rbe.mk b/core/rbe.mk
index 370d4bd..90328d3 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -87,11 +87,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
   endif
 
   rbe_dir :=
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index 07e577a..05b4b6b 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -50,6 +50,28 @@
 # to avoid checkbuilds making an extra copy of every module.
 LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
 
+my_check_same_vndk_variants :=
+same_vndk_variants_stamp :=
+ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
+  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
+    ifneq ($(CLANG_COVERAGE),true)
+      # Do not compare VNDK variant for special cases e.g. coverage builds.
+      ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
+        my_check_same_vndk_variants := true
+        same_vndk_variants_stamp := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/same_vndk_variants.timestamp
+      endif
+    endif
+  endif
+endif
+
+ifeq ($(my_check_same_vndk_variants),true)
+  # Add the timestamp to the CHECKED list so that `checkbuild` can run it.
+  # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
+  # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
+  # isn't used at all and it may break in the downstream trees.
+  LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+endif
+
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
@@ -125,21 +147,7 @@
   endif
 endif
 
-my_check_same_vndk_variants :=
-ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
-  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
-    ifneq ($(CLANG_COVERAGE),true)
-        # Do not compare VNDK variant for special cases e.g. coverage builds.
-        ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
-            my_check_same_vndk_variants := true
-        endif
-    endif
-  endif
-endif
-
 ifeq ($(my_check_same_vndk_variants),true)
-  same_vndk_variants_stamp := $(intermediates)/same_vndk_variants.timestamp
-
   my_core_register_name := $(subst .vendor,,$(subst .product,,$(my_register_name)))
   my_core_variant_files := $(call module-target-built-files,$(my_core_register_name))
   my_core_shared_lib := $(sort $(filter %.so,$(my_core_variant_files)))
diff --git a/core/tasks/README.dex_preopt_check.md b/core/tasks/README.dex_preopt_check.md
new file mode 100644
index 0000000..b0baa9e
--- /dev/null
+++ b/core/tasks/README.dex_preopt_check.md
@@ -0,0 +1,43 @@
+# `dex_preopt_check`
+
+`dex_preopt_check` is a build-time check to make sure that all system server
+jars are dexpreopted. When the check fails, you will see the following error
+message:
+
+```
+FAILED:
+build/make/core/tasks/dex_preopt_check.mk:13: warning:  Missing compilation artifacts. Dexpreopting is not working for some system server jars
+Offending entries:
+```
+
+Possible causes are:
+
+1.  There is an APEX/SDK mismatch. (E.g., the APEX is built from source while
+    the SDK is built from prebuilt.)
+
+1.  The `systemserverclasspath_fragment` is not added as
+    `systemserverclasspath_fragments` of the corresponding `apex` module, or not
+    added as `exported_systemserverclasspath_fragments` of the corresponding
+    `prebuilt_apex`/`apex_set` module when building from prebuilt.
+
+1.  The expected version of the system server java library is not preferred.
+    (E.g., the `java_import` module has `prefer: false` when building from
+    prebuilt.)
+
+1.  Dexpreopting is disabled for the system server java library. This can be due
+    to various reasons including but not limited to:
+
+    - The java library has `dex_preopt: { enabled: false }` in the Android.bp
+      file.
+
+    - The java library is listed in `DEXPREOPT_DISABLED_MODULES` in a Makefile.
+
+    - The java library is missing `installable: true` in the Android.bp
+      file when building from source.
+
+    - Sanitizer is enabled.
+
+1.  `PRODUCT_SYSTEM_SERVER_JARS`, `PRODUCT_APEX_SYSTEM_SERVER_JARS`,
+    `PRODUCT_STANDALONE_SYSTEM_SERVER_JARS`, or
+    `PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS` has an extra entry that is not
+    needed by the product.
diff --git a/core/tasks/dex_preopt_check.mk b/core/tasks/dex_preopt_check.mk
index bfa1ec5..5fd60c8 100644
--- a/core/tasks/dex_preopt_check.mk
+++ b/core/tasks/dex_preopt_check.mk
@@ -12,7 +12,8 @@
   ifneq (,$(filter services,$(PRODUCT_PACKAGES)))
     $(call maybe-print-list-and-error,\
       $(filter-out $(ALL_DEFAULT_INSTALLED_MODULES),$(DEXPREOPT_SYSTEMSERVER_ARTIFACTS)),\
-      Missing compilation artifacts. Dexpreopting is not working for some system server jars \
+      Missing compilation artifacts. Dexpreopting is not working for some system server jars. See \
+      https://cs.android.com/android/platform/superproject/+/master:build/make/core/tasks/README.dex_preopt_check.md \
     )
   endif
 endif
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 20a1694..aecb4a8 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -25,7 +25,7 @@
 LOCAL_MODULE_STEM := $(my_package_name).zip
 LOCAL_UNINSTALLABLE_MODULE := true
 include $(BUILD_SYSTEM)/base_rules.mk
-my_staging_dir := $(intermediates)
+my_staging_dir := $(intermediates)/staging
 my_package_zip := $(LOCAL_BUILT_MODULE)
 
 my_built_modules := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)))
@@ -92,17 +92,18 @@
 endif
 
 $(my_package_zip): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
+$(my_package_zip): PRIVATE_STAGING_DIR := $(my_staging_dir)
 $(my_package_zip): PRIVATE_PICKUP_FILES := $(my_pickup_files)
-$(my_package_zip) : $(my_built_modules)
+$(my_package_zip) : $(my_built_modules) $(SOONG_ZIP)
 	@echo "Package $@"
-	@rm -rf $(dir $@) && mkdir -p $(dir $@)
+	@rm -rf $(PRIVATE_STAGING_DIR) && mkdir -p $(PRIVATE_STAGING_DIR)
 	$(foreach p, $(PRIVATE_COPY_PAIRS),\
 	  $(eval pair := $(subst :,$(space),$(p)))\
 	  mkdir -p $(dir $(word 2,$(pair))) && \
 	  cp -Rf $(word 1,$(pair)) $(word 2,$(pair)) && ) true
 	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
-	  cp -RfL $(f) $(dir $@) && ) true
-	$(hide) cd $(dir $@) && zip -rqX $(notdir $@) *
+	  cp -RfL $(f) $(PRIVATE_STAGING_DIR) && ) true
+	$(hide) $(SOONG_ZIP) -o $@ -C $(PRIVATE_STAGING_DIR) -D $(PRIVATE_STAGING_DIR)
 
 my_makefile :=
 my_staging_dir :=
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index f1159b3..06161f0 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,6 +29,6 @@
       $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
                                $(patsubst system/%,DATA/%,\
                                    $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
-      $(eval ALL_TARGETS.$(2)/$(my_copy_dest).META_LIC := $(if $(strip $(ALL_MODULES.$(m).META_LIC)),$(ALL_MODULES.$(m).META_LIC),$(ALL_MODULES.$(m).DELAYED_META_LIC)))\
+      $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\
       $(bui):$(2)/$(my_copy_dest))))
 endef
diff --git a/envsetup.sh b/envsetup.sh
index b079d41..c9b1b54 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -403,7 +403,9 @@
     # e.g.
     # ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
     # ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+    local T=$(gettop)
     for f in ${completion_files[*]}; do
+        f="$T/$f"
         if [ ! -f "$f" ]; then
           echo "Warning: completion file $f not found"
         elif should_add_completion "$f"; then
@@ -878,7 +880,7 @@
     fi
 
     if [ -z "$product" ]; then
-        product=arm
+        product=arm64
     elif [ $(echo $product | wc -w) -gt 1 ]; then
         echo "banchan: Error: Multiple build archs or products supplied: $products"
         return
@@ -1095,7 +1097,7 @@
         return;
     fi;
     echo "Setting core limit for $PID to infinite...";
-    adb shell /system/bin/ulimit -p $PID -c unlimited
+    adb shell /system/bin/ulimit -P $PID -c unlimited
 }
 
 # core - send SIGV and pull the core for process
diff --git a/orchestrator/README b/orchestrator/README
index ce6f5c3..9a1e302 100644
--- a/orchestrator/README
+++ b/orchestrator/README
@@ -2,6 +2,7 @@
 
 from the root of the workspace
 
-ln -fs ../build/build/orchestrator/inner_build/inner_build_demo.py master/.inner_build
-ln -fs ../build/build/orchestrator/inner_build/inner_build_demo.py sc-mainline-prod/.inner_build
+multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
+
+rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
 
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
index d87a83d..d7abef7 100644
--- a/orchestrator/core/api_assembly.py
+++ b/orchestrator/core/api_assembly.py
@@ -14,11 +14,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import collections
 import json
 import os
+import sys
 
-def assemble_apis(inner_trees):
+import api_assembly_cc
+import ninja_tools
 
+
+ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
+
+def assemble_apis(context, inner_trees):
     # Find all of the contributions from the inner tree
     contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
 
@@ -27,25 +34,33 @@
     contributions = []
     for tree_key, filenames in contribution_files_dict.items():
         for filename in filenames:
-            contribution_data = load_contribution_file(filename)
-            if not contribution_data:
+            json_data = load_contribution_file(context, filename)
+            if not json_data:
                 continue
             # TODO: Validate the configs, especially that the domains match what we asked for
             # from the lunch config.
-            contributions.append(contribution_data)
+            contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
 
     # Group contributions by language and API surface
     stub_libraries = collate_contributions(contributions)
 
-    # Iterate through all of the stub libraries and generate rules to assemble them
-    # and Android.bp/BUILD files to make those available to inner trees.
-    # TODO: Parallelize? Skip unnecessary work?
-    ninja_file = NinjaFile() # TODO: parameters?
-    build_file = BuildFile() # TODO: parameters?
-    for stub_library in stub_libraries:
-        STUB_LANGUAGE_HANDLERS[stub_library.language](ninja_file, build_file, stub_library)
+    # Initialize the ninja file writer
+    with open(context.out.api_ninja_file(), "w") as ninja_file:
+        ninja = ninja_tools.Ninja(context, ninja_file)
 
-    # TODO: Handle host_executables separately or as a StubLibrary language?
+        # Initialize the build file writer
+        build_file = BuildFile() # TODO: parameters?
+
+        # Iterate through all of the stub libraries and generate rules to assemble them
+        # and Android.bp/BUILD files to make those available to inner trees.
+        # TODO: Parallelize? Skip unnecessary work?
+        for stub_library in stub_libraries:
+            STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
+
+        # TODO: Handle host_executables separately or as a StubLibrary language?
+
+        # Finish writing the ninja file
+        ninja.write()
 
 
 def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
@@ -61,18 +76,20 @@
     return result
 
 
-def load_contribution_file(filename):
+def load_contribution_file(context, filename):
     "Load and return the API contribution at filename. On error report error and return None."
     with open(filename) as f:
         try:
             return json.load(f)
         except json.decoder.JSONDecodeError as ex:
             # TODO: Error reporting
+            context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
             raise ex
 
 
 class StubLibraryContribution(object):
-    def __init__(self, api_domain, library_contribution):
+    def __init__(self, inner_tree, api_domain, library_contribution):
+        self.inner_tree = inner_tree
         self.api_domain = api_domain
         self.library_contribution = library_contribution
 
@@ -96,54 +113,42 @@
     grouped = {}
     for contribution in contributions:
         for language in STUB_LANGUAGE_HANDLERS.keys():
-            for library in contribution.get(language, []):
-                key = (language, contribution["name"], contribution["version"], library["name"])
+            for library in contribution.json_data.get(language, []):
+                key = (language, contribution.json_data["name"],
+                        contribution.json_data["version"], library["name"])
                 stub_library = grouped.get(key)
                 if not stub_library:
-                    stub_library = StubLibrary(language, contribution["name"],
-                            contribution["version"], library["name"])
+                    stub_library = StubLibrary(language, contribution.json_data["name"],
+                            contribution.json_data["version"], library["name"])
                     grouped[key] = stub_library
-                stub_library.add_contribution(StubLibraryContribution(
-                        contribution["api_domain"], library))
+                stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
+                        contribution.json_data["api_domain"], library))
     return list(grouped.values())
 
 
-def assemble_cc_api_library(ninja_file, build_file, stub_library):
-    print("assembling cc_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
-            stub_library.name))
+def assemble_java_api_library(context, ninja, build_file, stub_library):
+    print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name))
     for contrib in stub_library.contributions:
         print("  %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
     # TODO: Implement me
 
 
-def assemble_java_api_library(ninja_file, build_file, stub_library):
-    print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
-            stub_library.name))
-    for contrib in stub_library.contributions:
-        print("  %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
-    # TODO: Implement me
-
-
-def assemble_resource_api_library(ninja_file, build_file, stub_library):
-    print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
-            stub_library.name))
+def assemble_resource_api_library(context, ninja, build_file, stub_library):
+    print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name))
     for contrib in stub_library.contributions:
         print("  %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
     # TODO: Implement me
 
 
 STUB_LANGUAGE_HANDLERS = {
-    "cc_libraries": assemble_cc_api_library,
+    "cc_libraries": api_assembly_cc.assemble_cc_api_library,
     "java_libraries": assemble_java_api_library,
     "resource_libraries": assemble_resource_api_library,
 }
 
 
-class NinjaFile(object):
-    "Generator for build actions and dependencies."
-    pass
-
-
 class BuildFile(object):
     "Abstract generator for Android.bp files and BUILD files."
     pass
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
new file mode 100644
index 0000000..ca9b2a4
--- /dev/null
+++ b/orchestrator/core/api_assembly_cc.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+def assemble_cc_api_library(context, ninja, build_file, stub_library):
+    staging_dir = context.out.api_library_dir(stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name)
+    work_dir = context.out.api_library_work_dir(stub_library.api_surface,
+            stub_library.api_surface_version, stub_library.name)
+
+    # Generate rules to copy headers
+    includes = []
+    include_dir = os.path.join(staging_dir, "include")
+    for contrib in stub_library.contributions:
+        for headers in contrib.library_contribution["headers"]:
+            root = headers["root"]
+            for file in headers["files"]:
+                # TODO: Deal with collisions of the same name from multiple contributions
+                include = os.path.join(include_dir, file)
+                ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
+                includes.append(include)
+
+    # Generate rule to run ndkstubgen
+
+
+    # Generate rule to compile stubs to library
+
+    # Generate phony rule to build the library
+    # TODO: This name probably conflictgs with something
+    ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
+            stub_library.name)), includes)
+
+    # Generate build files
+
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
new file mode 100644
index 0000000..03fe890
--- /dev/null
+++ b/orchestrator/core/final_packaging.py
@@ -0,0 +1,117 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import sys
+
+import ninja_tools
+import ninja_syntax # Has to be after ninja_tools because of the path hack
+
+def final_packaging(context, inner_trees):
+    """Pull together all of the previously defined rules into the final build stems."""
+
+    with open(context.out.outer_ninja_file(), "w") as ninja_file:
+        ninja = ninja_tools.Ninja(context, ninja_file)
+
+        # Add the api surfaces file
+        ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
+
+        # For each inner tree
+        for tree in inner_trees.keys():
+            # TODO: Verify that inner_tree.ninja was generated
+
+            # Read and verify file
+            build_targets = read_build_targets_json(context, tree)
+            if not build_targets:
+                continue
+
+            # Generate the ninja and build files for this inner tree
+            generate_cross_domain_build_rules(context, ninja, tree, build_targets)
+
+        # Finish writing the ninja file
+        ninja.write()
+
+
+def read_build_targets_json(context, tree):
+    """Read and validate the build_targets.json file for the given tree."""
+    try:
+        f = open(tree.out.build_targets_file())
+    except FileNotFoundError:
+        # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
+        return None
+
+    data = None
+    with f:
+        try:
+            data = json.load(f)
+        except json.decoder.JSONDecodeError as ex:
+            sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
+            # TODO: Error reporting
+            raise ex
+
+    # TODO: Better error handling
+    # TODO: Validate json schema
+    return data
+
+
+def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
+    "Generate the ninja and build files for the inner tree."
+    # Include the inner tree's inner_tree.ninja
+    ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
+
+    # Generate module rules and files
+    for module in build_targets.get("modules", []):
+        generate_shared_module(context, ninja, tree, module)
+
+    # Generate staging rules
+    staging_dir = context.out.staging_dir()
+    for staged in build_targets.get("staging", []):
+        # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
+        dest = staged["dest"]
+        dest = os.path.join(staging_dir, dest)
+        if "src" in staged and "obj" in staged:
+            context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
+                    ) # TODO: Filename and line if possible
+        if "src" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
+        elif "obj" in staged:
+            ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
+        ninja.add_global_phony("staging", [dest])
+
+    # Generate dist rules
+    dist_dir = context.out.dist_dir()
+    for disted in build_targets.get("dist", []):
+        # TODO: Enforce that dest absolute
+        dest = disted["dest"]
+        dest = os.path.join(dist_dir, dest)
+        ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
+        ninja.add_global_phony("dist", [dest])
+
+
+def generate_shared_module(context, ninja, tree, module):
+    """Generate ninja rules for the given build_targets.json defined module."""
+    module_name = module["name"]
+    module_type = module["type"]
+    share_dir = context.out.module_share_dir(module_type, module_name)
+    src_file = os.path.join(tree.root, module["file"])
+
+    if module_type == "apex":
+        ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
+        # TODO: Generate build file
+
+    else:
+        # TODO: Better error handling
+        raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
index cdb0d85..d348ee7 100644
--- a/orchestrator/core/inner_tree.py
+++ b/orchestrator/core/inner_tree.py
@@ -36,33 +36,53 @@
     def __hash__(self):
         return hash((self.root, self.product))
 
+    def _cmp(self, other):
+        if self.root < other.root:
+            return -1
+        if self.root > other.root:
+            return 1
+        if self.product == other.product:
+            return 0
+        if self.product is None:
+            return -1
+        if other.product is None:
+            return 1
+        if self.product < other.product:
+            return -1
+        return 1
+
     def __eq__(self, other):
-        return (self.root == other.root and self.product == other.product)
+        return self._cmp(other) == 0
 
     def __ne__(self, other):
-        return not self.__eq__(other)
+        return self._cmp(other) != 0
 
     def __lt__(self, other):
-        return (self.root, self.product) < (other.root, other.product)
+        return self._cmp(other) < 0
 
     def __le__(self, other):
-        return (self.root, self.product) <= (other.root, other.product)
+        return self._cmp(other) <= 0
 
     def __gt__(self, other):
-        return (self.root, self.product) > (other.root, other.product)
+        return self._cmp(other) > 0
 
     def __ge__(self, other):
-        return (self.root, self.product) >= (other.root, other.product)
+        return self._cmp(other) >= 0
 
 
 class InnerTree(object):
-    def __init__(self, root, product):
+    def __init__(self, context, root, product):
         """Initialize with the inner tree root (relative to the workspace root)"""
         self.root = root
         self.product = product
         self.domains = {}
         # TODO: Base directory on OUT_DIR
-        self.out = OutDirLayout(os.path.join("out", "trees", root))
+        out_root = context.out.inner_tree_dir(root)
+        if product:
+            out_root += "_" + product
+        else:
+            out_root += "_unbundled"
+        self.out = OutDirLayout(out_root)
 
     def __str__(self):
         return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
@@ -134,7 +154,19 @@
         return result
 
 
+    def get(self, tree_key):
+        """Get an inner tree for tree_key"""
+        return self.trees.get(tree_key)
+
+    def keys(self):
+        "Get the keys for the inner trees in name order."
+        return [self.trees[k] for k in sorted(self.trees.keys())]
+
+
 class OutDirLayout(object):
+    """Encapsulates the logic about the layout of the inner tree out directories.
+    See also context.OutDir for outer tree out dir contents."""
+
     def __init__(self, root):
         "Initialize with the root of the OUT_DIR for the inner tree."
         self._root = root
@@ -148,6 +180,12 @@
     def api_contributions_dir(self):
         return os.path.join(self._root, "api_contributions")
 
+    def build_targets_file(self):
+        return os.path.join(self._root, "build_targets.json")
+
+    def main_ninja_file(self):
+        return os.path.join(self._root, "inner_tree.ninja")
+
 
 def enquote(s):
     return "None" if s is None else "\"%s\"" % s
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
new file mode 100644
index 0000000..ab81d66
--- /dev/null
+++ b/orchestrator/core/ninja_runner.py
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import sys
+
+def run_ninja(context, targets):
+    """Run ninja.
+    """
+
+    # Construct the command
+    cmd = [
+            context.tools.ninja(),
+            "-f",
+            context.out.outer_ninja_file(),
+        ] + targets
+
+    # Run the command
+    process = subprocess.run(cmd, shell=False)
+
+    # TODO: Probably want better handling of inner tree failures
+    if process.returncode:
+        sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
+        sys.exit(1)
+
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
new file mode 100644
index 0000000..16101ea
--- /dev/null
+++ b/orchestrator/core/ninja_tools.py
@@ -0,0 +1,59 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+# Workaround for python include path
+_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
+if _ninja_dir not in sys.path:
+    sys.path.append(_ninja_dir)
+import ninja_writer
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+
+class Ninja(ninja_writer.Writer):
+    """Some higher level constructs on top of raw ninja writing.
+    TODO: Not sure where these should be."""
+    def __init__(self, context, file):
+        super(Ninja, self).__init__(file)
+        self._context = context
+        self._did_copy_file = False
+        self._phonies = {}
+
+    def add_copy_file(self, copy_to, copy_from):
+        if not self._did_copy_file:
+            self._did_copy_file = True
+            rule = Rule("copy_file")
+            rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
+                    + " -f ${in} ${out}")
+            self.add_rule(rule)
+        build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
+                implicits=[self._context.tools.acp()])
+        build_action.add_variable("out_dir", os.path.dirname(copy_to))
+        self.add_build_action(build_action)
+
+    def add_global_phony(self, name, deps):
+        """Add a phony target where there are multiple places that will want to add to
+        the same phony. If you can, to save memory, use add_phony instead of this function."""
+        if type(deps) not in (list, tuple):
+            raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
+        self._phonies.setdefault(name, []).extend(deps)
+
+    def write(self):
+        for phony, deps in self._phonies.items():
+            self.add_phony(phony, deps)
+        super(Ninja, self).write()
+
+
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
index e99c956..508f73a 100755
--- a/orchestrator/core/orchestrator.py
+++ b/orchestrator/core/orchestrator.py
@@ -22,9 +22,13 @@
 import api_assembly
 import api_domain
 import api_export
+import final_packaging
 import inner_tree
+import tree_analysis
 import interrogate
 import lunch
+import ninja_runner
+import utils
 
 EXIT_STATUS_OK = 0
 EXIT_STATUS_ERROR = 1
@@ -33,14 +37,14 @@
 API_DOMAIN_VENDOR = "vendor"
 API_DOMAIN_MODULE = "module"
 
-def process_config(lunch_config):
+def process_config(context, lunch_config):
     """Returns a InnerTrees object based on the configuration requested in the lunch config."""
     def add(domain_name, tree_root, product):
         tree_key = inner_tree.InnerTreeKey(tree_root, product)
         if tree_key in trees:
             tree = trees[tree_key]
         else:
-            tree = inner_tree.InnerTree(tree_root, product)
+            tree = inner_tree.InnerTree(context, tree_root, product)
             trees[tree_key] = tree
         domain = api_domain.ApiDomain(domain_name, tree, product)
         domains[domain_name] = domain
@@ -64,11 +68,10 @@
 
 
 def build():
-    #
-    # Load lunch combo
-    #
+    # Choose the out directory, set up error handling, etc.
+    context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
 
-    # Read the config file
+    # Read the lunch config file
     try:
         config_file, config, variant = lunch.load_current_config()
     except lunch.ConfigException as ex:
@@ -77,40 +80,33 @@
     sys.stdout.write(lunch.make_config_header(config_file, config, variant))
 
     # Construct the trees and domains dicts
-    inner_trees = process_config(config)
+    inner_trees = process_config(context, config)
 
-    #
     # 1. Interrogate the trees
-    #
     inner_trees.for_each_tree(interrogate.interrogate_tree)
     # TODO: Detect bazel-only mode
 
-    #
     # 2a. API Export
-    #
     inner_trees.for_each_tree(api_export.export_apis_from_tree)
 
-    #
     # 2b. API Surface Assembly
-    #
-    api_assembly.assemble_apis(inner_trees)
+    api_assembly.assemble_apis(context, inner_trees)
 
-    #
-    # 3a. API Domain Analysis
-    #
+    # 3a. Inner tree analysis
+    tree_analysis.analyze_trees(context, inner_trees)
 
-    #
     # 3b. Final Packaging Rules
-    #
+    final_packaging.final_packaging(context, inner_trees)
 
-    #
     # 4. Build Execution
-    #
+    # TODO: Decide what we want the UX for selecting targets to be across
+    # branches... since there are very likely to be conflicting soong short
+    # names.
+    print("Running ninja...")
+    targets = ["staging", "system"]
+    ninja_runner.run_ninja(context, targets)
 
-
-    #
     # Success!
-    #
     return EXIT_STATUS_OK
 
 def main(argv):
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
new file mode 100644
index 0000000..052cad6
--- /dev/null
+++ b/orchestrator/core/tree_analysis.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def analyze_trees(context, inner_trees):
+    inner_trees.for_each_tree(run_analysis)
+
+def run_analysis(tree_key, inner_tree, cookie):
+    inner_tree.invoke(["analyze"])
+
+
+
+
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
new file mode 100644
index 0000000..41310e0
--- /dev/null
+++ b/orchestrator/core/utils.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import platform
+
+class Context(object):
+    """Mockable container for global state."""
+    def __init__(self, out_root, errors):
+        self.out = OutDir(out_root)
+        self.errors = errors
+        self.tools = HostTools()
+
+class TestContext(Context):
+    "Context for testing. The real Context is manually constructed in orchestrator.py."
+
+    def __init__(self, test_work_dir, test_name):
+        super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
+                Errors(None))
+
+
+class OutDir(object):
+    """Encapsulates the logic about the out directory at the outer-tree level.
+    See also inner_tree.OutDirLayout for inner tree out dir contents."""
+
+    def __init__(self, root):
+        "Initialize with the root of the OUT_DIR for the outer tree."
+        self._out_root = root
+        self._intermediates = "intermediates"
+
+    def root(self):
+        return self._out_root
+
+    def inner_tree_dir(self, tree_root):
+        """Root directory for inner tree inside the out dir."""
+        return os.path.join(self._out_root, "trees", tree_root)
+
+    def api_ninja_file(self):
+        """The ninja file that assembles API surfaces."""
+        return os.path.join(self._out_root, "api_surfaces.ninja")
+
+    def api_library_dir(self, surface, version, library):
+        """Directory for all the contents of a library inside an API surface, including
+        the build files.  Any intermediates should go in api_library_work_dir."""
+        return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
+
+    def api_library_work_dir(self, surface, version, library):
+        """Intermediates / scratch directory for library inside an API surface."""
+        return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
+                str(version), library)
+
+    def outer_ninja_file(self):
+        return os.path.join(self._out_root, "multitree.ninja")
+
+    def module_share_dir(self, module_type, module_name):
+        return os.path.join(self._out_root, "shared", module_type, module_name)
+
+    def staging_dir(self):
+        return os.path.join(self._out_root, "staging")
+
+    def dist_dir(self):
+        "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
+        return os.path.join(self._out_root, "dist")
+
+class Errors(object):
+    """Class for reporting and tracking errors."""
+    def __init__(self, stream):
+        """Initialize Error reporter with a file-like object."""
+        self._stream = stream
+        self._all = []
+
+    def error(self, message, file=None, line=None, col=None):
+        """Record the error message."""
+        s = ""
+        if file:
+            s += str(file)
+            s += ":"
+        if line:
+            s += str(line)
+            s += ":"
+        if col:
+            s += str(col)
+            s += ":"
+        if s:
+            s += " "
+        s += str(message)
+        if s[-1] != "\n":
+            s += "\n"
+        self._all.append(s)
+        if self._stream:
+            self._stream.write(s)
+
+    def had_error(self):
+        """Return if there were any errors reported."""
+        return len(self._all)
+
+    def get_errors(self):
+        """Get all errors that were reported."""
+        return self._all
+
+
+class HostTools(object):
+    def __init__(self):
+        if platform.system() == "Linux":
+            self._arch = "linux-x86"
+        else:
+            raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
+
+        # Some of these are called a lot, so pre-compute the strings to save memory
+        self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
+        self._acp = os.path.join(self._prebuilts, "acp")
+        self._ninja = os.path.join(self._prebuilts, "ninja")
+
+    def acp(self):
+        return self._acp
+
+    def ninja(self):
+        return self._ninja
+
+
+def choose_out_dir():
+    """Get the root of the out dir, either from the environment or by picking
+    a default."""
+    result = os.environ.get("OUT_DIR")
+    if result:
+        return result
+    else:
+        return "out"
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
index 6919e04..382844b 100644
--- a/orchestrator/inner_build/common.py
+++ b/orchestrator/inner_build/common.py
@@ -40,6 +40,10 @@
     export_parser = subparsers.add_parser("export_api_contributions",
             help="export the API contributions of this inner tree")
 
+    # create the parser for the "b" command
+    export_parser = subparsers.add_parser("analyze",
+            help="main build analysis for this inner tree")
+
     # Parse the arguments
     return parser.parse_args(argv)
 
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
index 9aafb4d..264739b 100755
--- a/orchestrator/inner_build/inner_build_demo.py
+++ b/orchestrator/inner_build/inner_build_demo.py
@@ -44,93 +44,60 @@
         mkdirs(contributions_dir)
 
         if "system" in args.api_domain:
-            with open(os.path.join(contributions_dir, "public_api-1.json"), "w") as f:
+            with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
                 # 'name: android' is android.jar
                 f.write(textwrap.dedent("""\
                 {
-                    "name": "public_api",
+                    "name": "api_a",
                     "version": 1,
                     "api_domain": "system",
                     "cc_libraries": [
                         {
-                            "name": "libhwui",
+                            "name": "libhello1",
                             "headers": [
                                 {
-                                    "root": "frameworks/base/libs/hwui/apex/include",
+                                    "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
                                     "files": [
-                                        "android/graphics/jni_runtime.h",
-                                        "android/graphics/paint.h",
-                                        "android/graphics/matrix.h",
-                                        "android/graphics/canvas.h",
-                                        "android/graphics/renderthread.h",
-                                        "android/graphics/bitmap.h",
-                                        "android/graphics/region.h"
+                                        "hello1.h"
                                     ]
                                 }
                             ],
                             "api": [
-                                "frameworks/base/libs/hwui/libhwui.map.txt"
-                            ]
-                        }
-                    ],
-                    "java_libraries": [
-                        {
-                            "name": "android",
-                            "api": [
-                                "frameworks/base/core/api/current.txt"
-                            ]
-                        }
-                    ],
-                    "resource_libraries": [
-                        {
-                            "name": "android",
-                            "api": "frameworks/base/core/res/res/values/public.xml"
-                        }
-                    ],
-                    "host_executables": [
-                        {
-                            "name": "aapt2",
-                            "binary": "out/host/bin/aapt2",
-                            "runfiles": [
-                                "../lib/todo.so"
-                            ]
-                        }
-                    ]
-                }"""))
-        elif "com.android.bionic" in args.api_domain:
-            with open(os.path.join(contributions_dir, "public_api-1.json"), "w") as f:
-                # 'name: android' is android.jar
-                f.write(textwrap.dedent("""\
-                {
-                    "name": "public_api",
-                    "version": 1,
-                    "api_domain": "system",
-                    "cc_libraries": [
-                        {
-                            "name": "libc",
-                            "headers": [
-                                {
-                                    "root": "bionic/libc/include",
-                                    "files": [
-                                        "stdio.h",
-                                        "sys/klog.h"
-                                    ]
-                                }
-                            ],
-                            "api": "bionic/libc/libc.map.txt"
-                        }
-                    ],
-                    "java_libraries": [
-                        {
-                            "name": "android",
-                            "api": [
-                                "frameworks/base/libs/hwui/api/current.txt"
+                                "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
                             ]
                         }
                     ]
                 }"""))
 
-
+    def analyze(self, args):
+        if "system" in args.api_domain:
+            # Nothing to export in this demo
+            # Write a fake inner_tree.ninja; what the inner tree would have generated
+            with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
+                # TODO: Note that this uses paths relative to the workspace not the iner tree
+                # for demo purposes until we get the ninja chdir change in.
+                f.write(textwrap.dedent("""\
+                    rule compile_c
+                        command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
+                    rule link_so
+                        command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
+                    build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+                        out_dir = %(OUT_DIR)s/libhello1
+                        cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
+                    build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
+                        out_dir = %(OUT_DIR)s/libhello1
+                    build system: phony %(OUT_DIR)s/libhello1/libhello1.so
+                """ % { "OUT_DIR": args.out_dir }))
+            with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
+                f.write(textwrap.dedent("""\
+                {
+                    "staging": [
+                        {
+                            "dest": "staging/system/lib/libhello1.so",
+                            "obj": "libhello1/libhello1.so"
+                        }
+                    ]
+                }""" % { "OUT_DIR": args.out_dir }))
 
 def main(argv):
     return InnerBuildSoong().Run(argv)
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
index 328c99c..df97b68 100644
--- a/orchestrator/ninja/ninja_syntax.py
+++ b/orchestrator/ninja/ninja_syntax.py
@@ -159,7 +159,7 @@
     self.chDir = chDir
 
   # TODO(spandandas): Update the syntax when aosp/2064612 lands
-  def stream() -> Iterator[str]:
+  def stream(self) -> Iterator[str]:
     yield f"subninja {self.subninja}"
 
 class Line(Node):
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
index e3070bb..9e80b4b 100644
--- a/orchestrator/ninja/ninja_writer.py
+++ b/orchestrator/ninja/ninja_writer.py
@@ -49,6 +49,10 @@
   def add_subninja(self, subninja: Subninja):
     self.nodes.append(subninja)
 
+  def add_phony(self, name, deps):
+    build_action = BuildAction(name, "phony", inputs=deps)
+    self.add_build_action(build_action)
+
   def write(self):
     for node in self.nodes:
       for line in node.stream():
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
new file mode 100644
index 0000000..8200dc0
--- /dev/null
+++ b/orchestrator/test_workspace/combo.mcombo
@@ -0,0 +1,17 @@
+{
+    "lunchable": true,
+    "system": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product1"
+    },
+    "vendor": {
+        "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
+        "product": "test_product2"
+    },
+    "modules": {
+        "module_1": {
+            "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
+        }
+    }
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
new file mode 120000
index 0000000..d8f235f
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/.inner_build
@@ -0,0 +1 @@
+../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
new file mode 100644
index 0000000..1415082
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
@@ -0,0 +1,8 @@
+#include <stdio.h>
+
+#include "hello1.h"
+
+void hello1(void) {
+    printf("hello1");
+}
+
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
new file mode 100644
index 0000000..0309c1c
--- /dev/null
+++ b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
@@ -0,0 +1,4 @@
+#pragma once
+
+extern "C" void hello1(void);
+
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 05ddfe5..2c2b5a9 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -24,7 +24,7 @@
     android.hidl.manager-V1.0-java \
     android.hidl.memory@1.0-impl \
     android.hidl.memory@1.0-impl.vendor \
-    android.system.suspend@1.0-service \
+    android.system.suspend-service \
     android.test.base \
     android.test.mock \
     android.test.runner \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 851a2cb..5695803 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -76,7 +76,11 @@
     com.android.media:service-media-s \
     com.android.permission:service-permission \
 
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
+# art project.
+ifneq (,$(wildcard art))
+  PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+endif
 
 # List of jars on the platform that system_server loads dynamically using separate classloaders.
 # Keep the list sorted library names.
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index fb0370e..c7dcd60 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,10 +22,7 @@
 # Ramdisk
 PRODUCT_PACKAGES += \
     init_first_stage \
-    e2fsck.ramdisk \
-    fsck.f2fs.ramdisk \
-    tune2fs.ramdisk \
-    snapuserd.ramdisk \
+    snapuserd_ramdisk \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 96d8cc9..fa7e1ad 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,8 +14,11 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_armv7.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 
 PRODUCT_NAME := sdk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 18e08e4..716c8e0 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -38,15 +38,3 @@
 PRODUCT_PACKAGES += \
     snapuserd \
 
-# For dedicated recovery partitions, we need to include snapuserd
-# For GKI devices, BOARD_USES_RECOVERY_AS_BOOT is empty, but
-# so is BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
-ifdef BUILDING_RECOVERY_IMAGE
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-PRODUCT_PACKAGES += \
-    snapuserd.recovery
-endif
-endif
-endif
-
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index 7a6c4ba..225f3a5 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -20,14 +20,20 @@
 blueprint_go_binary {
     name: "compliance_checkshare",
     srcs: ["cmd/checkshare/checkshare.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/checkshare/checkshare_test.go"],
 }
 
 blueprint_go_binary {
     name: "compliancenotice_bom",
     srcs: ["cmd/bom/bom.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/bom/bom_test.go"],
 }
 
@@ -44,21 +50,30 @@
 blueprint_go_binary {
     name: "compliance_listshare",
     srcs: ["cmd/listshare/listshare.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/listshare/listshare_test.go"],
 }
 
 blueprint_go_binary {
     name: "compliance_dumpgraph",
     srcs: ["cmd/dumpgraph/dumpgraph.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/dumpgraph/dumpgraph_test.go"],
 }
 
 blueprint_go_binary {
     name: "compliance_dumpresolutions",
     srcs: ["cmd/dumpresolutions/dumpresolutions.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/dumpresolutions/dumpresolutions_test.go"],
 }
 
@@ -68,6 +83,7 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/htmlnotice/htmlnotice_test.go"],
 }
@@ -75,7 +91,10 @@
 blueprint_go_binary {
     name: "compliance_rtrace",
     srcs: ["cmd/rtrace/rtrace.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/rtrace/rtrace_test.go"],
 }
 
@@ -85,6 +104,7 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/textnotice/textnotice_test.go"],
 }
@@ -95,6 +115,7 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
 }
diff --git a/tools/compliance/cmd/bom/bom.go b/tools/compliance/cmd/bom/bom.go
index b613a1f..187f828 100644
--- a/tools/compliance/cmd/bom/bom.go
+++ b/tools/compliance/cmd/bom/bom.go
@@ -24,13 +24,11 @@
 	"path/filepath"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the bill of materials. (default stdout)")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -55,22 +53,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a bill of materials. i.e. the list of installed paths.
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -81,16 +67,52 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a bill of materials. i.e. the list of installed paths.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the bill of materials. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -118,10 +140,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *stripPrefix}
 
-	err := billOfMaterials(ctx, flag.Args()...)
+	err := billOfMaterials(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
diff --git a/tools/compliance/cmd/checkshare/checkshare.go b/tools/compliance/cmd/checkshare/checkshare.go
index 73bdcb5..f7b4cd2 100644
--- a/tools/compliance/cmd/checkshare/checkshare.go
+++ b/tools/compliance/cmd/checkshare/checkshare.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -22,31 +23,12 @@
 	"os"
 	"path/filepath"
 	"sort"
+	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
-
-Reports on stderr any targets where policy says that the source both
-must and must not be shared. The error report indicates the target, the
-license condition that has a source privacy policy, and the license
-condition that has a source sharing policy.
-
-Any given target may appear multiple times with different combinations
-of conflicting license conditions.
-
-If all the source code that policy says must be shared may be shared,
-outputs "PASS" to stdout and exits with status 0.
-
-If policy says any source must both be shared and not be shared,
-outputs "FAIL" to stdout and exits with status 1.
-`, filepath.Base(os.Args[0]))
-	}
-}
-
 var (
 	failConflicts     = fmt.Errorf("conflicts")
 	failNoneRequested = fmt.Errorf("\nNo metadata files requested")
@@ -61,24 +43,105 @@
 func (l byError) Less(i, j int) bool { return l[i].Error() < l[j].Error() }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
+
+Reports on stderr any targets where policy says that the source both
+must and must not be shared. The error report indicates the target, the
+license condition that has a source privacy policy, and the license
+condition that has a source sharing policy.
+
+Any given target may appear multiple times with different combinations
+of conflicting license conditions.
+
+If all the source code that policy says must be shared may be shared,
+outputs "PASS" to stdout and exits with status 0.
+
+If policy says any source must both be shared and not be shared,
+outputs "FAIL" to stdout and exits with status 1.
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
-	err := checkShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	err := checkShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err != failConflicts {
 			if err == failNoneRequested {
-				flag.Usage()
+				flags.Usage()
 			}
 			fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		}
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
@@ -92,7 +155,7 @@
 	// Read the license graph from the license metadata files (*.meta_lic).
 	licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
 	if err != nil {
-		return fmt.Errorf("Unable to read license metadata file(s) %q: %w\n", files, err)
+		return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %w\n", files, os.Getenv("PWD"), err)
 	}
 	if licenseGraph == nil {
 		return failNoLicenses
diff --git a/tools/compliance/cmd/dumpgraph/dumpgraph.go b/tools/compliance/cmd/dumpgraph/dumpgraph.go
index 32a3fc4..5625779 100644
--- a/tools/compliance/cmd/dumpgraph/dumpgraph.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,14 +25,11 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
-	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -55,8 +53,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs space-separated Target Dependency Annotations tuples for each
@@ -70,42 +104,68 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+	labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
 	ctx := &context{*graphViz, *labelConditions, *stripPrefix}
 
-	err := dumpGraph(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	err := dumpGraph(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
index d02c238..dc0cf88 100644
--- a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,15 +25,11 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	conditions      = newMultiString("c", "License condition to resolve. (may be given multiple times)")
-	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
-	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -57,8 +54,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -75,32 +108,52 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	conditions := newMultiString(flags, "c", "License condition to resolve. (may be given multiple times)")
+	graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+	labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
 	lcs := make([]compliance.LicenseCondition, 0, len(*conditions))
 	for _, name := range *conditions {
 		lcs = append(lcs, compliance.RecognizedConditionNames[name])
@@ -111,14 +164,21 @@
 		labelConditions: *labelConditions,
 		stripPrefix:     *stripPrefix,
 	}
-	_, err := dumpResolutions(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	_, err := dumpResolutions(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
index e98b272..1a49610 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -26,19 +26,13 @@
 	"path/filepath"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	includeTOC  = flag.Bool("toc", true, "Whether to include a table of contents.")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -70,23 +64,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
-ends with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -97,16 +78,57 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
+ends with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	includeTOC := flags.Bool("toc", true, "Whether to include a table of contents.")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -143,10 +165,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *includeTOC, *product, *stripPrefix, *title, &deps}
 
-	err := htmlNotice(ctx, flag.Args()...)
+	err := htmlNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
diff --git a/tools/compliance/cmd/listshare/listshare.go b/tools/compliance/cmd/listshare/listshare.go
index 7f4038b..31bd1b2 100644
--- a/tools/compliance/cmd/listshare/listshare.go
+++ b/tools/compliance/cmd/listshare/listshare.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,12 +25,41 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
+var (
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
 
 Outputs a csv file with 1 project per line in the first field followed
 by target:condition pairs describing why the project must be shared.
@@ -39,30 +69,61 @@
 restricted (e.g. GPL) or reciprocal (e.g. MPL).
 `, filepath.Base(os.Args[0]))
 	}
-}
 
-var (
-	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
-	failNoLicenses    = fmt.Errorf("No licenses found")
-)
+	outputFile := flags.String("o", "-", "Where to write the list of projects to share. (default stdout)")
 
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
-	err := listShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	err := listShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
@@ -76,7 +137,7 @@
 	// Read the license graph from the license metadata files (*.meta_lic).
 	licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
 	if err != nil {
-		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+		return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %v\n", files, os.Getenv("PWD"), err)
 	}
 	if licenseGraph == nil {
 		return failNoLicenses
diff --git a/tools/compliance/cmd/rtrace/rtrace.go b/tools/compliance/cmd/rtrace/rtrace.go
index 91171c4..667cdce 100644
--- a/tools/compliance/cmd/rtrace/rtrace.go
+++ b/tools/compliance/cmd/rtrace/rtrace.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,21 +25,19 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	sources         = newMultiString("rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoSources     = fmt.Errorf("\nNo projects or metadata files to trace back from")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
 type context struct {
-	sources         []string
-	stripPrefix     []string
+	sources     []string
+	stripPrefix []string
 }
 
 func (ctx context) strip(installPath string) string {
@@ -54,8 +53,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -72,50 +107,75 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	sources := newMultiString(flags, "rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*sources) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "\nMust specify at least 1 --rtrace source.\n")
 		os.Exit(2)
 	}
 
-	ctx := &context{
-		sources:         *sources,
-		stripPrefix:     *stripPrefix,
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
 	}
-	_, err := traceRestricted(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	ctx := &context{
+		sources:     *sources,
+		stripPrefix: *stripPrefix,
+	}
+	_, err := traceRestricted(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/shippedlibs/shippedlibs.go b/tools/compliance/cmd/shippedlibs/shippedlibs.go
index 9d25dd3..add6dd6 100644
--- a/tools/compliance/cmd/shippedlibs/shippedlibs.go
+++ b/tools/compliance/cmd/shippedlibs/shippedlibs.go
@@ -39,9 +39,6 @@
 	rootFS fs.FS
 }
 
-func init() {
-}
-
 func main() {
 	var expandedArgs []string
 	for _, arg := range os.Args[1:] {
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
index cfa0859..9beaf58 100644
--- a/tools/compliance/cmd/textnotice/textnotice.go
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -25,18 +25,13 @@
 	"path/filepath"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -67,22 +62,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a text NOTICE file.
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -93,16 +76,55 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a text NOTICE file.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -139,10 +161,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
 
-	err := textNotice(ctx, flag.Args()...)
+	err := textNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
index 84859d7..2097b7c 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -26,18 +26,13 @@
 	"path/filepath"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -68,23 +63,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
-with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -95,16 +77,56 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
+with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -141,10 +163,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
 
-	err := xmlNotice(ctx, flag.Args()...)
+	err := xmlNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3f13a4a..941edc6 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -54,7 +54,7 @@
 class ApexApkSigner(object):
   """Class to sign the apk files and other files in an apex payload image and repack the apex"""
 
-  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None, fsverity_tool=None):
+  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None):
     self.apex_path = apex_path
     if not key_passwords:
       self.key_passwords = dict()
@@ -65,9 +65,8 @@
         OPTIONS.search_path, "bin", "debugfs_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
-    self.fsverity_tool = fsverity_tool if fsverity_tool else "fsverity"
 
-  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None):
+  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
     """Scans and signs the payload files and repack the apex
 
     Args:
@@ -85,14 +84,10 @@
                 self.debugfs_path, 'list', self.apex_path]
     entries_names = common.RunAndCheckOutput(list_cmd).split()
     apk_entries = [name for name in entries_names if name.endswith('.apk')]
-    sepolicy_entries = []
-    if is_sepolicy:
-      sepolicy_entries = [name for name in entries_names if
-          name.startswith('./etc/SEPolicy') and name.endswith('.zip')]
 
     # No need to sign and repack, return the original apex path.
-    if not apk_entries and not sepolicy_entries and self.sign_tool is None:
-      logger.info('No payload (apk or zip) file to sign in %s', self.apex_path)
+    if not apk_entries and self.sign_tool is None:
+      logger.info('No apk file to sign in %s', self.apex_path)
       return self.apex_path
 
     for entry in apk_entries:
@@ -106,16 +101,15 @@
         logger.warning('Apk path does not contain the intended directory name:'
                        ' %s', entry)
 
-    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(apk_entries,
-        apk_keys, payload_key, sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args)
+    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
+        apk_entries, apk_keys, payload_key, signing_args)
     if not has_signed_content:
       logger.info('No contents has been signed in %s', self.apex_path)
       return self.apex_path
 
     return self.RepackApexPayload(payload_dir, payload_key, signing_args)
 
-  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key,
-  sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args):
+  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args):
     """Extracts the payload image and signs the containing apk files."""
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
@@ -147,11 +141,6 @@
           codename_to_api_level_map=self.codename_to_api_level_map)
       has_signed_content = True
 
-    for entry in sepolicy_entries:
-      sepolicy_key = sepolicy_key if sepolicy_key else payload_key
-      self.SignSePolicy(payload_dir, entry, sepolicy_key, sepolicy_cert)
-      has_signed_content = True
-
     if self.sign_tool:
       logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
       # Pass avbtool to the custom signing tool
@@ -165,36 +154,6 @@
 
     return payload_dir, has_signed_content
 
-  def SignSePolicy(self, payload_dir, sepolicy_zip, sepolicy_key, sepolicy_cert):
-    sepolicy_sig = sepolicy_zip + '.sig'
-    sepolicy_fsv_sig = sepolicy_zip + '.fsv_sig'
-
-    policy_zip_path = os.path.join(payload_dir, sepolicy_zip)
-    sig_out_path = os.path.join(payload_dir, sepolicy_sig)
-    sig_old = sig_out_path + '.old'
-    if os.path.exists(sig_out_path):
-      os.rename(sig_out_path, sig_old)
-    sign_cmd = ['openssl', 'dgst', '-sign', sepolicy_key, '-keyform', 'PEM', '-sha256',
-        '-out', sig_out_path, '-binary', policy_zip_path]
-    common.RunAndCheckOutput(sign_cmd)
-    if os.path.exists(sig_old):
-      os.remove(sig_old)
-
-    if not sepolicy_cert:
-      logger.info('No cert provided for SEPolicy, skipping fsverity sign')
-      return
-
-    fsv_sig_out_path = os.path.join(payload_dir, sepolicy_fsv_sig)
-    fsv_sig_old = fsv_sig_out_path + '.old'
-    if os.path.exists(fsv_sig_out_path):
-      os.rename(fsv_sig_out_path, fsv_sig_old)
-
-    fsverity_cmd = [self.fsverity_tool, 'sign', policy_zip_path, fsv_sig_out_path,
-        '--key=' + sepolicy_key, '--cert=' + sepolicy_cert]
-    common.RunAndCheckOutput(fsverity_cmd)
-    if os.path.exists(fsv_sig_old):
-      os.remove(fsv_sig_old)
-
   def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
     """Rebuilds the apex file with the updated payload directory."""
     apex_dir = common.MakeTempDir()
@@ -365,9 +324,7 @@
 
 def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
                          container_pw, apk_keys, codename_to_api_level_map,
-                         no_hashtree, signing_args=None, sign_tool=None,
-                         is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                         fsverity_tool=None):
+                         no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current uncompressed APEX with the given payload/container keys.
 
   Args:
@@ -380,10 +337,6 @@
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
     sign_tool: A tool to sign the contents of the APEX.
-    is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -392,9 +345,8 @@
   # the apex file after signing.
   apk_signer = ApexApkSigner(apex_file, container_pw,
                              codename_to_api_level_map,
-                             avbtool, sign_tool, fsverity_tool)
-  apex_file = apk_signer.ProcessApexFile(
-      apk_keys, payload_key, signing_args, is_sepolicy, sepolicy_key, sepolicy_cert)
+                             avbtool, sign_tool)
+  apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key, signing_args)
 
   # 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
   # payload_key.
@@ -448,9 +400,7 @@
 
 def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
                        container_pw, apk_keys, codename_to_api_level_map,
-                       no_hashtree, signing_args=None, sign_tool=None,
-                       is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                       fsverity_tool=None):
+                       no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current compressed APEX with the given payload/container keys.
 
   Args:
@@ -462,10 +412,6 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
-    is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -492,11 +438,7 @@
       codename_to_api_level_map,
       no_hashtree,
       signing_args,
-      sign_tool,
-      is_sepolicy,
-      sepolicy_key,
-      sepolicy_cert,
-      fsverity_tool)
+      sign_tool)
 
   # 3. Compress signed original apex.
   compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -524,8 +466,7 @@
 
 def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
              apk_keys, codename_to_api_level_map,
-             no_hashtree, signing_args=None, sign_tool=None,
-             is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+             no_hashtree, signing_args=None, sign_tool=None):
   """Signs the current APEX with the given payload/container keys.
 
   Args:
@@ -537,9 +478,6 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -565,11 +503,7 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          sign_tool=sign_tool)
     elif apex_type == 'COMPRESSED':
       return SignCompressedApex(
           avbtool,
@@ -581,11 +515,7 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          sign_tool=sign_tool)
     else:
       # TODO(b/172912232): support signing compressed apex
       raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 7fdf4ba..9567fdc 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -331,6 +331,14 @@
     if compressor:
       build_command.extend(["-z", compressor])
 
+    compress_hints = None
+    if "erofs_default_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_default_compress_hints"]
+    if "erofs_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_compress_hints"]
+    if compress_hints:
+      build_command.extend(["--compress-hints", compress_hints])
+
     build_command.extend(["--mount-point", prop_dict["mount_point"]])
     if target_out:
       build_command.extend(["--product-out", target_out])
@@ -652,6 +660,7 @@
   common_props = (
       "extfs_sparse_flag",
       "erofs_default_compressor",
+      "erofs_default_compress_hints",
       "erofs_pcluster_size",
       "erofs_share_dup_blocks",
       "erofs_sparse_flag",
@@ -706,6 +715,7 @@
       (True, "{}_base_fs_file", "base_fs_file"),
       (True, "{}_disable_sparse", "disable_sparse"),
       (True, "{}_erofs_compressor", "erofs_compressor"),
+      (True, "{}_erofs_compress_hints", "erofs_compress_hints"),
       (True, "{}_erofs_pcluster_size", "erofs_pcluster_size"),
       (True, "{}_erofs_share_dup_blocks", "erofs_share_dup_blocks"),
       (True, "{}_extfs_inode_count", "extfs_inode_count"),
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index cff7542..caa4641 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -97,7 +97,6 @@
     self.stash_threshold = 0.8
     self.logfile = None
     self.host_tools = {}
-    self.sepolicy_name = 'sepolicy.apex'
 
 
 OPTIONS = Options()
diff --git a/tools/releasetools/merge/OWNERS b/tools/releasetools/merge/OWNERS
index 9012e3a..0eddee2 100644
--- a/tools/releasetools/merge/OWNERS
+++ b/tools/releasetools/merge/OWNERS
@@ -1,3 +1,4 @@
-danielnorman@google.com
+deyaoren@google.com
+haamed@google.com
 jgalmes@google.com
 rseymour@google.com
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index a68f1ec..6926467 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -42,15 +42,6 @@
 
   --sign_tool <sign_tool>
       Optional flag that specifies a custom signing tool for the contents of the apex.
-
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
 """
 
 import logging
@@ -61,12 +52,10 @@
 import common
 
 logger = logging.getLogger(__name__)
-OPTIONS = common.OPTIONS
 
 
 def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
-                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None,
-                 sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None):
   """Signs the given apex file."""
   with open(apex_file, 'rb') as input_fp:
     apex_data = input_fp.read()
@@ -81,11 +70,7 @@
       no_hashtree=no_hashtree,
       apk_keys=apk_keys,
       signing_args=signing_args,
-      sign_tool=sign_tool,
-      is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name),
-      sepolicy_key=sepolicy_key,
-      sepolicy_cert=sepolicy_cert,
-      fsverity_tool=fsverity_tool)
+      sign_tool=sign_tool)
 
 
 def main(argv):
@@ -121,12 +106,6 @@
         options['extra_apks'].update({n: key})
     elif o == '--sign_tool':
       options['sign_tool'] = a
-    elif o == '--sepolicy_key':
-      options['sepolicy_key'] = a
-    elif o == '--sepolicy_cert':
-      options['sepolicy_cert'] = a
-    elif o == '--fsverity_tool':
-      options['fsverity_tool'] = a
     else:
       return False
     return True
@@ -142,9 +121,6 @@
           'payload_key=',
           'extra_apks=',
           'sign_tool=',
-          'sepolicy_key=',
-          'sepolicy_cert=',
-          'fsverity_tool='
       ],
       extra_option_handler=option_handler)
 
@@ -165,10 +141,7 @@
       signing_args=options.get('payload_extra_args'),
       codename_to_api_level_map=options.get(
           'codename_to_api_level_map', {}),
-      sign_tool=options.get('sign_tool', None),
-      sepolicy_key=options.get('sepolicy_key', None),
-      sepolicy_cert=options.get('sepolicy_cert', None),
-      fsverity_tool=options.get('fsverity_tool', None))
+      sign_tool=options.get('sign_tool', None))
   shutil.copyfile(signed_apex, args[1])
   logger.info("done.")
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index f363afd..27e9dfb 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -137,15 +137,6 @@
   --android_jar_path <path>
       Path to the android.jar to repack the apex file.
 
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key for the sepolicy.apex.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
-
   --allow_gsi_debug_sepolicy
       Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
       (/system/system_ext|/system_ext)/etc/selinux.
@@ -205,9 +196,6 @@
 OPTIONS.android_jar_path = None
 OPTIONS.vendor_partitions = set()
 OPTIONS.vendor_otatools = None
-OPTIONS.sepolicy_key = None
-OPTIONS.sepolicy_cert = None
-OPTIONS.fsverity_tool = None
 OPTIONS.allow_gsi_debug_sepolicy = False
 
 
@@ -247,8 +235,6 @@
 def IsApexFile(filename):
   return filename.endswith(".apex") or filename.endswith(".capex")
 
-def IsSepolicyApex(filename):
-  return filename.endswith(OPTIONS.sepolicy_name)
 
 def GetApexFilename(filename):
   name = os.path.basename(filename)
@@ -271,24 +257,6 @@
 
   return certmap
 
-def GetSepolicyKeys(keys_info):
-  """Gets SEPolicy signing keys applying overrides from command line options.
-
-  Args:
-    keys_info: A dict that maps from the SEPolicy APEX filename to a tuple of
-    (sepolicy_key, sepolicy_cert, fsverity_tool).
-
-  Returns:
-    A dict that contains the updated APEX key mapping, which should be used for
-    the current signing.
-  """
-  for name in keys_info:
-      (sepolicy_key, sepolicy_cert, fsverity_tool) = keys_info[name]
-      sepolicy_key = OPTIONS.sepolicy_key if OPTIONS.sepolicy_key else sepolicy_key
-      sepolicy_cert = OPTIONS.sepolicy_cert if OPTIONS.sepolicy_cert else sepolicy_cert
-      fsverity_tool = OPTIONS.fsverity_tool if OPTIONS.fsverity_tool else fsverity_tool
-      keys_info[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-  return keys_info
 
 def GetApexKeys(keys_info, key_map):
   """Gets APEX payload and container signing keys by applying the mapping rules.
@@ -551,7 +519,7 @@
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
-                       compressed_extension, sepolicy_keys):
+                       compressed_extension):
   # maxsize measures the maximum filename length, including the ones to be
   # skipped.
   try:
@@ -619,17 +587,6 @@
         print("           : %-*s payload   (%s)" % (
             maxsize, name, payload_key))
 
-        sepolicy_key = None
-        sepolicy_cert = None
-        fsverity_tool = None
-
-        if IsSepolicyApex(name):
-          (sepolicy_key, sepolicy_cert, fsverity_tool) = sepolicy_keys[name]
-          print("           : %-*s sepolicy key   (%s)" % (
-            maxsize, name, sepolicy_key))
-          print("           : %-*s sepolicy cert  (%s)" % (
-            maxsize, name, sepolicy_cert))
-
         signed_apex = apex_utils.SignApex(
             misc_info['avb_avbtool'],
             data,
@@ -640,11 +597,7 @@
             codename_to_api_level_map,
             no_hashtree=None,  # Let apex_util determine if hash tree is needed
             signing_args=OPTIONS.avb_extra_args.get('apex'),
-            sign_tool=sign_tool,
-            is_sepolicy=IsSepolicyApex(name),
-            sepolicy_key=sepolicy_key,
-            sepolicy_cert=sepolicy_cert,
-            fsverity_tool=fsverity_tool)
+            sign_tool=sign_tool)
         common.ZipWrite(output_tf_zip, signed_apex, filename)
 
       else:
@@ -1254,24 +1207,20 @@
 def ReadApexKeysInfo(tf_zip):
   """Parses the APEX keys info from a given target-files zip.
 
-  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns
-  two dicts, the first one contains the mapping from APEX names
-  (e.g. com.android.tzdata) to a tuple of (payload_key, container_key,
-  sign_tool). The second one maps the sepolicy APEX name to a tuple containing
-  (sepolicy_key, sepolicy_cert, fsverity_tool).
+  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+  dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+  tuple of (payload_key, container_key, sign_tool).
 
   Args:
     tf_zip: The input target_files ZipFile (already open).
 
   Returns:
-    name : (payload_key, container_key, sign_tool)
+    (payload_key, container_key, sign_tool):
       - payload_key contains the path to the payload signing key
       - container_key contains the path to the container signing key
       - sign_tool is an apex-specific signing tool for its payload contents
-    name : (sepolicy_key, sepolicy_cert, fsverity_tool)
   """
   keys = {}
-  sepolicy_keys = {}
   for line in tf_zip.read('META/apexkeys.txt').decode().split('\n'):
     line = line.strip()
     if not line:
@@ -1282,9 +1231,6 @@
         r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
         r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
         r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*?)"'
-        r'(\s+sepolicy_key="(?P<SEPOLICY_KEY>.*?)")?'
-        r'(\s+sepolicy_certificate="(?P<SEPOLICY_CERT>.*?)")?'
-        r'(\s+fsverity_tool="(?P<FSVERITY_TOOL>.*?)")?'
         r'(\s+partition="(?P<PARTITION>.*?)")?'
         r'(\s+sign_tool="(?P<SIGN_TOOL>.*?)")?$',
         line)
@@ -1313,18 +1259,12 @@
             container_private_key, OPTIONS.private_key_suffix):
       container_key = container_cert[:-len(OPTIONS.public_key_suffix)]
     else:
-      raise ValueError("Failed to parse container keys: \n{} **** {}".format(container_cert, container_private_key))
+      raise ValueError("Failed to parse container keys: \n{}".format(line))
 
     sign_tool = matches.group("SIGN_TOOL")
     keys[name] = (payload_private_key, container_key, sign_tool)
 
-    if IsSepolicyApex(name):
-      sepolicy_key = matches.group('SEPOLICY_KEY')
-      sepolicy_cert = matches.group('SEPOLICY_CERT')
-      fsverity_tool = matches.group('FSVERITY_TOOL')
-      sepolicy_keys[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-
-  return keys, sepolicy_keys
+  return keys
 
 
 def BuildVendorPartitions(output_zip_path):
@@ -1415,7 +1355,8 @@
       img_file_path = "IMAGES/{}.img".format(p)
       map_file_path = "IMAGES/{}.map".format(p)
       common.ZipWrite(output_zip, os.path.join(vendor_tempdir, img_file_path), img_file_path)
-      common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
+      if os.path.exists(os.path.join(vendor_tempdir, map_file_path)):
+        common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
     # copy recovery.img, boot.img, recovery patch & install.sh
     if OPTIONS.rebuild_recovery:
       recovery_img = "IMAGES/recovery.img"
@@ -1541,12 +1482,6 @@
       OPTIONS.vendor_otatools = a
     elif o == "--vendor_partitions":
       OPTIONS.vendor_partitions = set(a.split(","))
-    elif o == '--sepolicy_key':
-      OPTIONS.sepolicy_key = a
-    elif o == '--sepolicy_cert':
-      OPTIONS.sepolicy_cert = a
-    elif o == '--fsverity_tool':
-      OPTIONS.fsverity_tool = a
     elif o == "--allow_gsi_debug_sepolicy":
       OPTIONS.allow_gsi_debug_sepolicy = True
     else:
@@ -1601,9 +1536,6 @@
           "gki_signing_extra_args=",
           "vendor_partitions=",
           "vendor_otatools=",
-          "sepolicy_key=",
-          "sepolicy_cert=",
-          "fsverity_tool=",
           "allow_gsi_debug_sepolicy",
       ],
       extra_option_handler=option_handler)
@@ -1626,9 +1558,8 @@
   apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
   apk_keys = GetApkCerts(apk_keys_info)
 
-  apex_keys_info, sepolicy_keys_info = ReadApexKeysInfo(input_zip)
+  apex_keys_info = ReadApexKeysInfo(input_zip)
   apex_keys = GetApexKeys(apex_keys_info, apk_keys)
-  sepolicy_keys = GetSepolicyKeys(sepolicy_keys_info)
 
   # TODO(xunchang) check for the apks inside the apex files, and abort early if
   # the keys are not available.
@@ -1646,7 +1577,7 @@
   ProcessTargetFiles(input_zip, output_zip, misc_info,
                      apk_keys, apex_keys, key_passwords,
                      platform_api_level, codename_to_api_level_map,
-                     compressed_extension, sepolicy_keys)
+                     compressed_extension)
 
   common.ZipClose(input_zip)
   common.ZipClose(output_zip)
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index c344e22..8470f20 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -71,21 +71,3 @@
         False,
         codename_to_api_level_map={'S': 31, 'Tiramisu' : 32})
     self.assertTrue(os.path.exists(signed_apex))
-
-  @test_utils.SkipIfExternalToolsUnavailable()
-  def test_SignApexWithSepolicy(self):
-    test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
-    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    container_key = os.path.join(self.testdata_dir, 'testkey')
-    sepolicy_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    sepolicy_cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
-    signed_test_apex = sign_apex.SignApexFile(
-        'avbtool',
-        test_apex,
-        payload_key,
-        container_key,
-        False,
-        None,
-        sepolicy_key=sepolicy_key,
-        sepolicy_cert=sepolicy_cert)
-    self.assertTrue(os.path.exists(signed_test_apex))
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 144a3cd..0f13add 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -476,7 +476,7 @@
       target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -486,7 +486,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
     # Mismatching payload public / private keys.
@@ -516,7 +515,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -526,7 +525,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
     # Invalid lines will be skipped.
@@ -540,7 +538,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -550,7 +548,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -564,7 +561,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -574,7 +571,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -588,7 +584,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -598,72 +594,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyKeys(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" '
-        'sepolicy_key="build/make/target/product/security/testkey.key" '
-        'sepolicy_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'fsverity_tool="fsverity"')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            'build/make/target/product/security/testkey.key',
-            'build/make/target/product/security/testkey.x509.pem',
-            'fsverity'),
-        }, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyApex(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" ')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            None,
-            None,
-            None),
-        }, sepolicy_keys_info)
 
   def test_ReplaceGkiSigningKey(self):
     common.OPTIONS.gki_signing_key = 'release_gki_key'
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
deleted file mode 100644
index f7d267d..0000000
--- a/tools/releasetools/testdata/sepolicy.apex
+++ /dev/null
Binary files differ
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 3fa822a..46ba253 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -56,6 +56,7 @@
 
 from __future__ import print_function
 import csv
+import datetime
 import html
 import sys
 
@@ -258,7 +259,7 @@
 
 
 def dump_stats(writer, warn_patterns):
-  """Dump some stats about total number of warnings and such."""
+  """Dump some stats about total number of warnings and date."""
 
   known = 0
   skipped = 0
@@ -279,6 +280,8 @@
   if total < 1000:
     extra_msg = ' (low count may indicate incremental build)'
   writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+  date_time_str = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+  writer('<p>(generated on ' + date_time_str + ')')
 
 
 # New base table of warnings, [severity, warn_id, project, warning_message]
@@ -662,15 +665,26 @@
   var warningsOfFiles = {};
   var warningsOfDirs = {};
   var subDirs = {};
-  function addOneWarning(map, key) {
-    map[key] = 1 + ((key in map) ? map[key] : 0);
+  function addOneWarning(map, key, type, unique) {
+    function increaseCounter(idx) {
+      map[idx] = 1 + ((idx in map) ? map[idx] : 0);
+    }
+    increaseCounter(key)
+    if (type != "") {
+      increaseCounter(type + " " + key)
+      if (unique) {
+        increaseCounter(type + " *")
+      }
+    }
   }
   for (var i = 0; i < numWarnings; i++) {
-    var file = WarningMessages[i].replace(/:.*/, "");
-    addOneWarning(warningsOfFiles, file);
+    var message = WarningMessages[i]
+    var file = message.replace(/:.*/, "");
+    var warningType = message.endsWith("]") ? message.replace(/.*\[/, "[") : "";
+    addOneWarning(warningsOfFiles, file, warningType, true);
     var dirs = file.split("/");
     var dir = dirs[0];
-    addOneWarning(warningsOfDirs, dir);
+    addOneWarning(warningsOfDirs, dir, warningType, true);
     for (var d = 1; d < dirs.length - 1; d++) {
       var subDir = dir + "/" + dirs[d];
       if (!(dir in subDirs)) {
@@ -678,7 +692,7 @@
       }
       subDirs[dir][subDir] = 1;
       dir = subDir;
-      addOneWarning(warningsOfDirs, dir);
+      addOneWarning(warningsOfDirs, dir, warningType, false);
     }
   }
   var minDirWarnings = numWarnings*(LimitPercentWarnings/100);
@@ -725,27 +739,33 @@
         document.getElementById(divName));
     table.draw(view, {allowHtml: true, alternatingRowStyle: true});
   }
-  addTable("Directory", "top_dirs_table", TopDirs, "selectDir");
-  addTable("File", "top_files_table", TopFiles, "selectFile");
+  addTable("[Warning Type] Directory", "top_dirs_table", TopDirs, "selectDir");
+  addTable("[Warning Type] File", "top_files_table", TopFiles, "selectFile");
 }
 function selectDirFile(idx, rows, dirFile) {
   if (rows.length <= idx) {
     return;
   }
   var name = rows[idx][2];
+  var type = "";
+  if (name.startsWith("[")) {
+    type = " " + name.replace(/ .*/, "");
+    name = name.replace(/.* /, "");
+  }
   var spanName = "selected_" + dirFile + "_name";
-  document.getElementById(spanName).innerHTML = name;
+  document.getElementById(spanName).innerHTML = name + type;
   var divName = "selected_" + dirFile + "_warnings";
   var numWarnings = rows[idx][1].v;
   var prefix = name.replace(/\\.\\.\\.$/, "");
   var data = new google.visualization.DataTable();
-  data.addColumn('string', numWarnings + ' warnings in ' + name);
+  data.addColumn('string', numWarnings + type + ' warnings in ' + name);
   var getWarningMessage = (FlagPlatform == "chrome")
         ? ((x) => addURLToLine(WarningMessages[Warnings[x][2]],
                                WarningLinks[Warnings[x][3]]))
         : ((x) => addURL(WarningMessages[Warnings[x][2]]));
   for (var i = 0; i < Warnings.length; i++) {
-    if (WarningMessages[Warnings[i][2]].startsWith(prefix)) {
+    if ((prefix.startsWith("*") || WarningMessages[Warnings[i][2]].startsWith(prefix)) &&
+        (type == "" || WarningMessages[Warnings[i][2]].endsWith(type))) {
       data.addRow([getWarningMessage(i)]);
     }
   }
@@ -827,14 +847,14 @@
   def section2():
     dump_dir_file_section(
         writer, 'directory', 'top_dirs_table',
-        'Directories with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% warnings')
+        'Directories/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all cases')
   def section3():
     dump_dir_file_section(
         writer, 'file', 'top_files_table',
-        'Files with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% or ' +
-        str(LIMIT_WARNINGS_PER_FILE) + ' warnings')
+        'Files/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all or ' +
+        str(LIMIT_WARNINGS_PER_FILE) + ' cases')
   def section4():
     writer('<script>')
     emit_js_data(writer, flags, warning_messages, warning_links,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 61c8676..aa68313 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -64,6 +64,10 @@
 from . import tidy_warn_patterns as tidy_patterns
 
 
+# Location of this file is used to guess the root of Android source tree.
+THIS_FILE_PATH = 'build/make/tools/warn/warn_common.py'
+
+
 def parse_args(use_google3):
   """Define and parse the args. Return the parse_args() result."""
   parser = argparse.ArgumentParser(
@@ -217,17 +221,27 @@
   return link
 
 
-def find_warn_py_and_android_root(path):
-  """Return android source root path if warn.py is found."""
+def find_this_file_and_android_root(path):
+  """Return android source root path if this file is found."""
   parts = path.split('/')
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
-    if os.path.exists(root_path + '/build/make/tools/warn.py'):
+    if os.path.exists(root_path + '/' + THIS_FILE_PATH):
       return root_path
   return ''
 
 
+def find_android_root_top_dirs(root_dir):
+  """Return a list of directories under the root_dir, if it exists."""
+  # Root directory should contain at least build/make and build/soong.
+  if (not os.path.isdir(root_dir + '/build/make') or
+      not os.path.isdir(root_dir + '/build/soong')):
+    return None
+  return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
+                     os.listdir(root_dir)))
+
+
 def find_android_root(buildlog):
   """Guess android source root from common prefix of file paths."""
   # Use the longest common prefix of the absolute file paths
@@ -239,8 +253,8 @@
     # We want to find android_root of a local build machine.
     # Do not use RBE warning lines, which has '/b/f/w/' path prefix.
     # Do not use /tmp/ file warnings.
-    if warning_pattern.match(line) and (
-        '/b/f/w' not in line and not line.startswith('/tmp/')):
+    if ('/b/f/w' not in line and not line.startswith('/tmp/') and
+        warning_pattern.match(line)):
       warning_lines.append(line)
       count += 1
       if count > 9999:
@@ -249,17 +263,26 @@
       # the source tree root.
       if count < 100:
         path = os.path.normpath(re.sub(':.*$', '', line))
-        android_root = find_warn_py_and_android_root(path)
+        android_root = find_this_file_and_android_root(path)
         if android_root:
-          return android_root
+          return android_root, find_android_root_top_dirs(android_root)
   # Do not use common prefix of a small number of paths.
+  android_root = ''
   if count > 10:
     # pytype: disable=wrong-arg-types
     root_path = os.path.commonprefix(warning_lines)
     # pytype: enable=wrong-arg-types
     if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
-      return root_path[:-1]
-  return ''
+      android_root = root_path[:-1]
+  if android_root and os.path.isdir(android_root):
+    return android_root, find_android_root_top_dirs(android_root)
+  # When the build.log file is moved to a different machine where
+  # android_root is not found, use the location of this script
+  # to find the android source tree sub directories.
+  if __file__.endswith('/' + THIS_FILE_PATH):
+    script_root = __file__.replace('/' + THIS_FILE_PATH, '')
+    return android_root, find_android_root_top_dirs(script_root)
+  return android_root, None
 
 
 def remove_android_root_prefix(path, android_root):
@@ -310,8 +333,6 @@
   warning_pattern = re.compile(chrome_warning_pattern)
 
   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
   unique_warnings = dict()
   for line in infile:
     if warning_pattern.match(line):
@@ -353,8 +374,7 @@
   target_product = 'unknown'
   target_variant = 'unknown'
   build_id = 'unknown'
-  use_rbe = False
-  android_root = find_android_root(infile)
+  android_root, root_top_dirs = find_android_root(infile)
   infile.seek(0)
 
   # rustc warning messages have two lines that should be combined:
@@ -367,24 +387,39 @@
   # C/C++ compiler warning messages have line and column numbers:
   #     some/path/file.c:line_number:column_number: warning: description
   warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)')
-  warning_without_file = re.compile('^warning: .*')
   rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
 
-  # If RBE was used, try to reclaim some warning lines mixed with some
-  # leading chars from other concurrent job's stderr output .
+  # If RBE was used, try to reclaim some warning lines (from stdout)
+  # that contain leading characters from stderr.
   # The leading characters can be any character, including digits and spaces.
-  # It's impossible to correctly identify the starting point of the source
-  # file path without the file directory name knowledge.
-  # Here we can only be sure to recover lines containing "/b/f/w/".
-  rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
 
-   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
+  # If a warning line's source file path contains the special RBE prefix
+  # /b/f/w/, we can remove all leading chars up to and including the "/b/f/w/".
+  bfw_warning_pattern = re.compile('.*/b/f/w/([^ ]*: warning: .*)')
+
+  # When android_root is known and available, we find its top directories
+  # and remove all leading chars before a top directory name.
+  # We assume that the leading chars from stderr do not contain "/".
+  # For example,
+  #   10external/...
+  #   12 warningsexternal/...
+  #   413 warningexternal/...
+  #   5 warnings generatedexternal/...
+  #   Suppressed 1000 warnings (packages/modules/...
+  if root_top_dirs:
+    extra_warning_pattern = re.compile(
+        '^.[^/]*((' + '|'.join(root_top_dirs) +
+        ')/[^ ]*: warning: .*)')
+  else:
+    extra_warning_pattern = re.compile('^[^/]* ([^ /]*/[^ ]*: warning: .*)')
+
+  # Collect all unique warning lines
   unique_warnings = dict()
+  checked_warning_lines = dict()
   line_counter = 0
   prev_warning = ''
   for line in infile:
+    line_counter += 1
     if prev_warning:
       if rustc_file_position.match(line):
         # must be a rustc warning, combine 2 lines into one warning
@@ -399,14 +434,31 @@
           prev_warning, flags, android_root, unique_warnings)
       prev_warning = ''
 
-    if use_rbe and rbe_warning_pattern.match(line):
-      cleaned_up_line = re.sub('.*/b/f/w/', '', line)
-      unique_warnings = add_normalized_line_to_warnings(
-          cleaned_up_line, flags, android_root, unique_warnings)
+    # re.match is slow, with several warning line patterns and
+    # long input lines like "TIMEOUT: ...".
+    # We save significant time by skipping non-warning lines.
+    # But do not skip the first 100 lines, because we want to
+    # catch build variables.
+    if line_counter > 100 and line.find('warning: ') < 0:
       continue
 
+    # A large clean build output can contain up to 90% of duplicated
+    # "warning:" lines. If we can skip them quickly, we can
+    # speed up this for-loop 3X to 5X.
+    if line in checked_warning_lines:
+      continue
+    checked_warning_lines[line] = True
+
+    # Clean up extra prefix that could be introduced when RBE was used.
+    if '/b/f/w/' in line:
+      result = bfw_warning_pattern.search(line)
+    else:
+      result = extra_warning_pattern.search(line)
+    if result is not None:
+      line = result.group(1)
+
     if warning_pattern.match(line):
-      if warning_without_file.match(line):
+      if line.startswith('warning: '):
         # save this line and combine it with the next line
         prev_warning = line
       else:
@@ -416,7 +468,6 @@
 
     if line_counter < 100:
       # save a little bit of time by only doing this for the first few lines
-      line_counter += 1
       result = re.search('(?<=^PLATFORM_VERSION=).*', line)
       if result is not None:
         platform_version = result.group(0)
@@ -433,13 +484,6 @@
       if result is not None:
         build_id = result.group(0)
         continue
-      result = re.search('(?<=^TOP=).*', line)
-      if result is not None:
-        android_root = result.group(1)
-        continue
-      if re.search('USE_RBE=', line) is not None:
-        use_rbe = True
-        continue
 
   if android_root:
     new_unique_warnings = dict()