Merge "Improve bazel action status mnemonics"
diff --git a/Android.bp b/Android.bp
index 7c50047..42a8e5c 100644
--- a/Android.bp
+++ b/Android.bp
@@ -50,6 +50,7 @@
     name: "device_kernel_headers",
     vendor: true,
     recovery_available: true,
+    min_sdk_version: "apex_inherit",
 }
 
 cc_genrule {
@@ -60,6 +61,9 @@
         linux_bionic: {
             enabled: true,
         },
+        linux_musl: {
+            enabled: false,
+        },
         linux_glibc: {
             enabled: false,
         },
@@ -81,6 +85,9 @@
         linux_bionic: {
             enabled: true,
         },
+        linux_musl: {
+            enabled: false,
+        },
         linux_glibc: {
             enabled: false,
         },
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..caa2a95
--- /dev/null
+++ b/METADATA
@@ -0,0 +1 @@
+name: "Android"
diff --git a/OWNERS b/OWNERS
index 937a243..0662016 100644
--- a/OWNERS
+++ b/OWNERS
@@ -2,11 +2,13 @@
 # approving build related projects.
 
 # AMER
-ahumesky@google.com
+agespino@google.com
 alexmarquez@google.com
 asmundak@google.com
 ccross@android.com
+colefaust@google.com
 cparsons@google.com
+dacek@google.com
 delmerico@google.com
 dwillemsen@google.com
 eakammer@google.com
@@ -16,13 +18,12 @@
 spandandas@google.com
 tradical@google.com
 usta@google.com
+vinhdaitran@google.com
 weiwli@google.com
 yudiliu@google.com
-yuntaoxu@google.com
 
 # APAC
 jingwen@google.com
-ruperts@google.com
 
 # EMEA
 hansson@google.com
diff --git a/android/Android.bp b/android/Android.bp
index d3540b2..87b021f 100644
--- a/android/Android.bp
+++ b/android/Android.bp
@@ -8,6 +8,7 @@
     deps: [
         "blueprint",
         "blueprint-bootstrap",
+        "blueprint-metrics",
         "sbox_proto",
         "soong",
         "soong-android-soongconfig",
@@ -18,6 +19,7 @@
         "soong-shared",
         "soong-starlark-format",
         "soong-ui-metrics_proto",
+        "soong-android-allowlists",
 
         "golang-protobuf-proto",
         "golang-protobuf-encoding-prototext",
@@ -35,6 +37,7 @@
         "bazel_handler.go",
         "bazel_paths.go",
         "config.go",
+        "config_bp2build.go",
         "csuite_config.go",
         "deapexer.go",
         "defaults.go",
@@ -95,6 +98,7 @@
         "bazel_handler_test.go",
         "bazel_test.go",
         "config_test.go",
+        "config_bp2build_test.go",
         "csuite_config_test.go",
         "defaults_test.go",
         "depset_test.go",
diff --git a/android/allowlists/Android.bp b/android/allowlists/Android.bp
new file mode 100644
index 0000000..05cffc1
--- /dev/null
+++ b/android/allowlists/Android.bp
@@ -0,0 +1,25 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+    name: "soong-android-allowlists",
+    pkgPath: "android/soong/android/allowlists",
+    srcs: [
+        "allowlists.go",
+    ],
+}
diff --git a/android/allowlists/allowlists.go b/android/allowlists/allowlists.go
new file mode 100644
index 0000000..4fd4f13
--- /dev/null
+++ b/android/allowlists/allowlists.go
@@ -0,0 +1,418 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package allowlists
+
+// Configuration to decide if modules in a directory should default to true/false for bp2build_available
+type Bp2BuildConfig map[string]BazelConversionConfigEntry
+type BazelConversionConfigEntry int
+
+const (
+	// iota + 1 ensures that the int value is not 0 when used in the Bp2buildAllowlist map,
+	// which can also mean that the key doesn't exist in a lookup.
+
+	// all modules in this package and subpackages default to bp2build_available: true.
+	// allows modules to opt-out.
+	Bp2BuildDefaultTrueRecursively BazelConversionConfigEntry = iota + 1
+
+	// all modules in this package (not recursively) default to bp2build_available: true.
+	// allows modules to opt-out.
+	Bp2BuildDefaultTrue
+
+	// all modules in this package (not recursively) default to bp2build_available: false.
+	// allows modules to opt-in.
+	Bp2BuildDefaultFalse
+)
+
+var (
+	Bp2buildDefaultConfig = Bp2BuildConfig{
+		"art/libartpalette":                     Bp2BuildDefaultTrueRecursively,
+		"art/libdexfile":                        Bp2BuildDefaultTrueRecursively,
+		"art/runtime":                           Bp2BuildDefaultTrueRecursively,
+		"art/tools":                             Bp2BuildDefaultTrue,
+		"bionic":                                Bp2BuildDefaultTrueRecursively,
+		"bootable/recovery/tools/recovery_l10n": Bp2BuildDefaultTrue,
+		"build/bazel/examples/soong_config_variables":        Bp2BuildDefaultTrueRecursively,
+		"build/bazel/examples/apex/minimal":                  Bp2BuildDefaultTrueRecursively,
+		"build/make/tools/signapk":                           Bp2BuildDefaultTrue,
+		"build/make/target/product/security":                 Bp2BuildDefaultTrue,
+		"build/soong":                                        Bp2BuildDefaultTrue,
+		"build/soong/cc/libbuildversion":                     Bp2BuildDefaultTrue, // Skip tests subdir
+		"build/soong/cc/ndkstubgen":                          Bp2BuildDefaultTrue,
+		"build/soong/cc/symbolfile":                          Bp2BuildDefaultTrue,
+		"build/soong/linkerconfig":                           Bp2BuildDefaultTrueRecursively,
+		"build/soong/scripts":                                Bp2BuildDefaultTrueRecursively,
+		"cts/common/device-side/nativetesthelper/jni":        Bp2BuildDefaultTrueRecursively,
+		"development/apps/DevelopmentSettings":               Bp2BuildDefaultTrue,
+		"development/apps/Fallback":                          Bp2BuildDefaultTrue,
+		"development/apps/WidgetPreview":                     Bp2BuildDefaultTrue,
+		"development/samples/BasicGLSurfaceView":             Bp2BuildDefaultTrue,
+		"development/samples/BluetoothChat":                  Bp2BuildDefaultTrue,
+		"development/samples/BrokenKeyDerivation":            Bp2BuildDefaultTrue,
+		"development/samples/Compass":                        Bp2BuildDefaultTrue,
+		"development/samples/ContactManager":                 Bp2BuildDefaultTrue,
+		"development/samples/FixedGridLayout":                Bp2BuildDefaultTrue,
+		"development/samples/HelloEffects":                   Bp2BuildDefaultTrue,
+		"development/samples/Home":                           Bp2BuildDefaultTrue,
+		"development/samples/HoneycombGallery":               Bp2BuildDefaultTrue,
+		"development/samples/JetBoy":                         Bp2BuildDefaultTrue,
+		"development/samples/KeyChainDemo":                   Bp2BuildDefaultTrue,
+		"development/samples/LceDemo":                        Bp2BuildDefaultTrue,
+		"development/samples/LunarLander":                    Bp2BuildDefaultTrue,
+		"development/samples/MultiResolution":                Bp2BuildDefaultTrue,
+		"development/samples/MultiWindow":                    Bp2BuildDefaultTrue,
+		"development/samples/NotePad":                        Bp2BuildDefaultTrue,
+		"development/samples/Obb":                            Bp2BuildDefaultTrue,
+		"development/samples/RSSReader":                      Bp2BuildDefaultTrue,
+		"development/samples/ReceiveShareDemo":               Bp2BuildDefaultTrue,
+		"development/samples/SearchableDictionary":           Bp2BuildDefaultTrue,
+		"development/samples/SipDemo":                        Bp2BuildDefaultTrue,
+		"development/samples/SkeletonApp":                    Bp2BuildDefaultTrue,
+		"development/samples/Snake":                          Bp2BuildDefaultTrue,
+		"development/samples/SpellChecker/":                  Bp2BuildDefaultTrueRecursively,
+		"development/samples/ThemedNavBarKeyboard":           Bp2BuildDefaultTrue,
+		"development/samples/ToyVpn":                         Bp2BuildDefaultTrue,
+		"development/samples/TtsEngine":                      Bp2BuildDefaultTrue,
+		"development/samples/USB/AdbTest":                    Bp2BuildDefaultTrue,
+		"development/samples/USB/MissileLauncher":            Bp2BuildDefaultTrue,
+		"development/samples/VoiceRecognitionService":        Bp2BuildDefaultTrue,
+		"development/samples/VoicemailProviderDemo":          Bp2BuildDefaultTrue,
+		"development/samples/WiFiDirectDemo":                 Bp2BuildDefaultTrue,
+		"development/sdk":                                    Bp2BuildDefaultTrueRecursively,
+		"external/arm-optimized-routines":                    Bp2BuildDefaultTrueRecursively,
+		"external/auto/android-annotation-stubs":             Bp2BuildDefaultTrueRecursively,
+		"external/auto/common":                               Bp2BuildDefaultTrueRecursively,
+		"external/auto/service":                              Bp2BuildDefaultTrueRecursively,
+		"external/boringssl":                                 Bp2BuildDefaultTrueRecursively,
+		"external/bouncycastle":                              Bp2BuildDefaultTrue,
+		"external/brotli":                                    Bp2BuildDefaultTrue,
+		"external/conscrypt":                                 Bp2BuildDefaultTrue,
+		"external/e2fsprogs":                                 Bp2BuildDefaultTrueRecursively,
+		"external/error_prone":                               Bp2BuildDefaultTrueRecursively,
+		"external/fmtlib":                                    Bp2BuildDefaultTrueRecursively,
+		"external/google-benchmark":                          Bp2BuildDefaultTrueRecursively,
+		"external/googletest":                                Bp2BuildDefaultTrueRecursively,
+		"external/gwp_asan":                                  Bp2BuildDefaultTrueRecursively,
+		"external/icu":                                       Bp2BuildDefaultTrueRecursively,
+		"external/icu/android_icu4j":                         Bp2BuildDefaultFalse, // java rules incomplete
+		"external/icu/icu4j":                                 Bp2BuildDefaultFalse, // java rules incomplete
+		"external/javapoet":                                  Bp2BuildDefaultTrueRecursively,
+		"external/jemalloc_new":                              Bp2BuildDefaultTrueRecursively,
+		"external/jsoncpp":                                   Bp2BuildDefaultTrueRecursively,
+		"external/libcap":                                    Bp2BuildDefaultTrueRecursively,
+		"external/libcxx":                                    Bp2BuildDefaultTrueRecursively,
+		"external/libcxxabi":                                 Bp2BuildDefaultTrueRecursively,
+		"external/libevent":                                  Bp2BuildDefaultTrueRecursively,
+		"external/libpng":                                    Bp2BuildDefaultTrueRecursively,
+		"external/lz4/lib":                                   Bp2BuildDefaultTrue,
+		"external/lzma/C":                                    Bp2BuildDefaultTrueRecursively,
+		"external/mdnsresponder":                             Bp2BuildDefaultTrueRecursively,
+		"external/minijail":                                  Bp2BuildDefaultTrueRecursively,
+		"external/pcre":                                      Bp2BuildDefaultTrueRecursively,
+		"external/protobuf":                                  Bp2BuildDefaultTrueRecursively,
+		"external/python/six":                                Bp2BuildDefaultTrueRecursively,
+		"external/scudo":                                     Bp2BuildDefaultTrueRecursively,
+		"external/selinux/libselinux":                        Bp2BuildDefaultTrueRecursively,
+		"external/selinux/libsepol":                          Bp2BuildDefaultTrueRecursively,
+		"external/zlib":                                      Bp2BuildDefaultTrueRecursively,
+		"external/zstd":                                      Bp2BuildDefaultTrueRecursively,
+		"frameworks/base/media/tests/MediaDump":              Bp2BuildDefaultTrue,
+		"frameworks/base/startop/apps/test":                  Bp2BuildDefaultTrue,
+		"frameworks/base/tests/appwidgets/AppWidgetHostTest": Bp2BuildDefaultTrueRecursively,
+		"frameworks/native/libs/adbd_auth":                   Bp2BuildDefaultTrueRecursively,
+		"frameworks/native/opengl/tests/gl2_cameraeye":       Bp2BuildDefaultTrue,
+		"frameworks/native/opengl/tests/gl2_java":            Bp2BuildDefaultTrue,
+		"frameworks/native/opengl/tests/testLatency":         Bp2BuildDefaultTrue,
+		"frameworks/native/opengl/tests/testPauseResume":     Bp2BuildDefaultTrue,
+		"frameworks/native/opengl/tests/testViewport":        Bp2BuildDefaultTrue,
+		"frameworks/proto_logging/stats/stats_log_api_gen":   Bp2BuildDefaultTrueRecursively,
+		"libnativehelper":                                    Bp2BuildDefaultTrueRecursively,
+		"packages/apps/DevCamera":                            Bp2BuildDefaultTrue,
+		"packages/apps/HTMLViewer":                           Bp2BuildDefaultTrue,
+		"packages/apps/Protips":                              Bp2BuildDefaultTrue,
+		"packages/modules/StatsD/lib/libstatssocket":         Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb":                               Bp2BuildDefaultTrue,
+		"packages/modules/adb/apex":                          Bp2BuildDefaultTrue,
+		"packages/modules/adb/crypto":                        Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb/libs":                          Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb/pairing_auth":                  Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb/pairing_connection":            Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb/proto":                         Bp2BuildDefaultTrueRecursively,
+		"packages/modules/adb/tls":                           Bp2BuildDefaultTrueRecursively,
+		"packages/providers/MediaProvider/tools/dialogs":     Bp2BuildDefaultTrue,
+		"packages/screensavers/Basic":                        Bp2BuildDefaultTrue,
+		"packages/services/Car/tests/SampleRearViewCamera":   Bp2BuildDefaultTrue,
+		"prebuilts/clang/host/linux-x86":                     Bp2BuildDefaultTrueRecursively,
+		"prebuilts/tools/common/m2":                          Bp2BuildDefaultTrue,
+		"system/apex":                                        Bp2BuildDefaultFalse, // TODO(b/207466993): flaky failures
+		"system/apex/proto":                                  Bp2BuildDefaultTrueRecursively,
+		"system/apex/libs":                                   Bp2BuildDefaultTrueRecursively,
+		"system/core/debuggerd":                              Bp2BuildDefaultTrueRecursively,
+		"system/core/diagnose_usb":                           Bp2BuildDefaultTrueRecursively,
+		"system/core/libasyncio":                             Bp2BuildDefaultTrue,
+		"system/core/libcrypto_utils":                        Bp2BuildDefaultTrueRecursively,
+		"system/core/libcutils":                              Bp2BuildDefaultTrueRecursively,
+		"system/core/libpackagelistparser":                   Bp2BuildDefaultTrueRecursively,
+		"system/core/libprocessgroup":                        Bp2BuildDefaultTrue,
+		"system/core/libprocessgroup/cgrouprc":               Bp2BuildDefaultTrue,
+		"system/core/libprocessgroup/cgrouprc_format":        Bp2BuildDefaultTrue,
+		"system/core/libsystem":                              Bp2BuildDefaultTrueRecursively,
+		"system/core/libutils":                               Bp2BuildDefaultTrueRecursively,
+		"system/core/libvndksupport":                         Bp2BuildDefaultTrueRecursively,
+		"system/core/property_service/libpropertyinfoparser": Bp2BuildDefaultTrueRecursively,
+		"system/libbase":                                     Bp2BuildDefaultTrueRecursively,
+		"system/libprocinfo":                                 Bp2BuildDefaultTrue,
+		"system/libziparchive":                               Bp2BuildDefaultTrueRecursively,
+		"system/logging/liblog":                              Bp2BuildDefaultTrueRecursively,
+		"system/sepolicy/apex":                               Bp2BuildDefaultTrueRecursively,
+		"system/timezone/apex":                               Bp2BuildDefaultTrueRecursively,
+		"system/timezone/output_data":                        Bp2BuildDefaultTrueRecursively,
+		"system/unwinding/libbacktrace":                      Bp2BuildDefaultTrueRecursively,
+		"system/unwinding/libunwindstack":                    Bp2BuildDefaultTrueRecursively,
+		"tools/apksig":                                       Bp2BuildDefaultTrue,
+		"tools/platform-compat/java/android/compat":          Bp2BuildDefaultTrueRecursively,
+	}
+
+	Bp2buildKeepExistingBuildFile = map[string]bool{
+		// This is actually build/bazel/build.BAZEL symlinked to ./BUILD
+		".":/*recursive = */ false,
+
+		// build/bazel/examples/apex/... BUILD files should be generated, so
+		// build/bazel is not recursive. Instead list each subdirectory under
+		// build/bazel explicitly.
+		"build/bazel":/* recursive = */ false,
+		"build/bazel/ci/dist":/* recursive = */ false,
+		"build/bazel/examples/android_app":/* recursive = */ true,
+		"build/bazel/examples/java":/* recursive = */ true,
+		"build/bazel/bazel_skylib":/* recursive = */ true,
+		"build/bazel/rules":/* recursive = */ true,
+		"build/bazel/rules_cc":/* recursive = */ true,
+		"build/bazel/scripts":/* recursive = */ true,
+		"build/bazel/tests":/* recursive = */ true,
+		"build/bazel/platforms":/* recursive = */ true,
+		"build/bazel/product_variables":/* recursive = */ true,
+		"build/bazel/vendor/google":/* recursive = */ true,
+		"build/bazel_common_rules":/* recursive = */ true,
+		// build/make/tools/signapk BUILD file is generated, so build/make/tools is not recursive.
+		"build/make/tools":/* recursive = */ false,
+		"build/pesto":/* recursive = */ true,
+
+		// external/bazelbuild-rules_android/... is needed by mixed builds, otherwise mixed builds analysis fails
+		// e.g. ERROR: Analysis of target '@soong_injection//mixed_builds:buildroot' failed
+		"external/bazelbuild-rules_android":/* recursive = */ true,
+		"external/bazel-skylib":/* recursive = */ true,
+		"external/guava":/* recursive = */ true,
+		"external/jsr305":/* recursive = */ true,
+		"frameworks/ex/common":/* recursive = */ true,
+
+		"packages/apps/Music":/* recursive = */ true,
+		"packages/apps/QuickSearchBox":/* recursive = */ true,
+		"packages/apps/WallpaperPicker":/* recursive = */ false,
+
+		"prebuilts/bundletool":/* recursive = */ true,
+		"prebuilts/gcc":/* recursive = */ true,
+		"prebuilts/build-tools":/* recursive = */ false,
+		"prebuilts/jdk/jdk11":/* recursive = */ false,
+		"prebuilts/sdk":/* recursive = */ false,
+		"prebuilts/sdk/current/extras/app-toolkit":/* recursive = */ false,
+		"prebuilts/sdk/current/support":/* recursive = */ false,
+		"prebuilts/sdk/tools":/* recursive = */ false,
+		"prebuilts/r8":/* recursive = */ false,
+	}
+
+	Bp2buildModuleAlwaysConvertList = []string{
+		//external/avb
+		"avbtool",
+		"libavb",
+		"avb_headers",
+
+		//external/fec
+		"libfec_rs",
+
+		//system/core/libsparse
+		"libsparse",
+
+		//system/extras/ext4_utils
+		"libext4_utils",
+
+		//system/extras/libfec
+		"libfec",
+
+		//system/extras/squashfs_utils
+		"libsquashfs_utils",
+
+		//system/extras/verity/fec
+		"fec",
+
+		//packages/apps/Car/libs/car-ui-lib/car-ui-androidx
+		// genrule dependencies for java_imports
+		"car-ui-androidx-annotation-nodeps",
+		"car-ui-androidx-collection-nodeps",
+		"car-ui-androidx-core-common-nodeps",
+		"car-ui-androidx-lifecycle-common-nodeps",
+		"car-ui-androidx-constraintlayout-solver-nodeps",
+	}
+
+	Bp2buildModuleTypeAlwaysConvertList = []string{
+		"java_import",
+		"java_import_host",
+	}
+
+	Bp2buildModuleDoNotConvertList = []string{
+		// cc bugs
+		"libsepol",                                  // TODO(b/207408632): Unsupported case of .l sources in cc library rules
+		"libactivitymanager_aidl",                   // TODO(b/207426160): Unsupported use of aidl sources (via Dactivity_manager_procstate_aidl) in a cc_library
+		"gen-kotlin-build-file.py",                  // TODO(b/198619163) module has same name as source
+		"libgtest_ndk_c++", "libgtest_main_ndk_c++", // TODO(b/201816222): Requires sdk_version support.
+		"linkerconfig", "mdnsd", // TODO(b/202876379): has arch-variant static_executable
+		"linker",       // TODO(b/228316882): cc_binary uses link_crt
+		"libdebuggerd", // TODO(b/228314770): support product variable-specific header_libs
+		"versioner",    // TODO(b/228313961):  depends on prebuilt shared library libclang-cpp_host as a shared library, which does not supply expected providers for a shared library
+
+		// java bugs
+		"libbase_ndk", // TODO(b/186826477): fails to link libctscamera2_jni for device (required for CtsCameraTestCases)
+
+		// python protos
+		"libprotobuf-python",                           // TODO(b/196084681): contains .proto sources
+		"apex_build_info_proto", "apex_manifest_proto", // TODO(b/196084681): a python lib with proto sources
+		"linker_config_proto", // TODO(b/196084681): contains .proto sources
+
+		// genrule incompatibilities
+		"brotli-fuzzer-corpus",                                       // TODO(b/202015218): outputs are in location incompatible with bazel genrule handling.
+		"platform_tools_properties", "build_tools_source_properties", // TODO(b/203369847): multiple genrules in the same package creating the same file
+
+		// aar support
+		"prebuilt_car-ui-androidx-core-common",         // TODO(b/224773339), genrule dependency creates an .aar, not a .jar
+		"prebuilt_platform-robolectric-4.4-prebuilt",   // aosp/1999250, needs .aar support in Jars
+		"prebuilt_platform-robolectric-4.5.1-prebuilt", // aosp/1999250, needs .aar support in Jars
+
+		// path property for filegroups
+		"conscrypt",                        // TODO(b/210751803), we don't handle path property for filegroups
+		"conscrypt-for-host",               // TODO(b/210751803), we don't handle path property for filegroups
+		"host-libprotobuf-java-full",       // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-internal-protos",      // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-internal-python-srcs", // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-java-full",            // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-java-util-full",       // TODO(b/210751803), we don't handle path property for filegroups
+
+		// go deps:
+		"analyze_bcpf",                                                                               // depends on bpmodify a blueprint_go_binary.
+		"apex-protos",                                                                                // depends on soong_zip, a go binary
+		"generated_android_icu4j_src_files", "generated_android_icu4j_test_files", "icu4c_test_data", // depends on unconverted modules: soong_zip
+		"host_bionic_linker_asm",                                                  // depends on extract_linker, a go binary.
+		"host_bionic_linker_script",                                               // depends on extract_linker, a go binary.
+		"libc_musl_sysroot_bionic_arch_headers",                                   // depends on soong_zip
+		"libc_musl_sysroot_bionic_headers",                                        // 218405924, depends on soong_zip and generates duplicate srcs
+		"libc_musl_sysroot_libc++_headers", "libc_musl_sysroot_libc++abi_headers", // depends on soong_zip, zip2zip
+		"robolectric-sqlite4java-native", // depends on soong_zip, a go binary
+		"robolectric_tzdata",             // depends on soong_zip, a go binary
+
+		// rust support
+		"libtombstoned_client_rust_bridge_code", "libtombstoned_client_wrapper", // rust conversions are not supported
+
+		// unconverted deps
+		"CarHTMLViewer",                // depends on unconverted modules android.car-stubs, car-ui-lib
+		"abb",                          // depends on unconverted modules: libcmd, libbinder
+		"adb",                          // depends on unconverted modules: AdbWinApi, libandroidfw, libopenscreen-discovery, libopenscreen-platform-impl, libusb, bin2c_fastdeployagent, AdbWinUsbApi
+		"android_icu4j_srcgen",         // depends on unconverted modules: currysrc
+		"android_icu4j_srcgen_binary",  // depends on unconverted modules: android_icu4j_srcgen, currysrc
+		"apex_manifest_proto_java",     // b/210751803, depends on libprotobuf-java-full
+		"art-script",                   // depends on unconverted modules: dalvikvm, dex2oat
+		"bin2c_fastdeployagent",        // depends on unconverted modules: deployagent
+		"chkcon", "sefcontext_compile", // depends on unconverted modules: libsepol
+		"com.android.runtime",                                        // depends on unconverted modules: bionic-linker-config, linkerconfig
+		"conv_linker_config",                                         // depends on unconverted modules: linker_config_proto
+		"currysrc",                                                   // depends on unconverted modules: currysrc_org.eclipse, guavalib, jopt-simple-4.9
+		"dex2oat-script",                                             // depends on unconverted modules: dex2oat
+		"generated_android_icu4j_resources",                          // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
+		"generated_android_icu4j_test_resources",                     // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
+		"host-libprotobuf-java-nano",                                 // b/220869005, depends on libprotobuf-java-nano
+		"libadb_host",                                                // depends on unconverted modules: AdbWinApi, libopenscreen-discovery, libopenscreen-platform-impl, libusb
+		"libart",                                                     // depends on unconverted modules: apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, heapprofd_client_api, art_operator_srcs, libcpu_features, libodrstatslog, libelffile, art_cmdlineparser_headers, cpp-define-generator-definitions, libdexfile, libnativebridge, libnativeloader, libsigchain, libartbase, libprofile, cpp-define-generator-asm-support
+		"libart-runtime-gtest",                                       // depends on unconverted modules: libgtest_isolated, libart-compiler, libdexfile, libprofile, libartbase, libartbase-art-gtest
+		"libart_headers",                                             // depends on unconverted modules: art_libartbase_headers
+		"libartd",                                                    // depends on unconverted modules: art_operator_srcs, libcpu_features, libodrstatslog, libelffiled, art_cmdlineparser_headers, cpp-define-generator-definitions, libdexfiled, libnativebridge, libnativeloader, libsigchain, libartbased, libprofiled, cpp-define-generator-asm-support, apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, heapprofd_client_api
+		"libartd-runtime-gtest",                                      // depends on unconverted modules: libgtest_isolated, libartd-compiler, libdexfiled, libprofiled, libartbased, libartbased-art-gtest
+		"libdebuggerd_handler",                                       // depends on unconverted module libdebuggerd_handler_core
+		"libdebuggerd_handler_core", "libdebuggerd_handler_fallback", // depends on unconverted module libdebuggerd
+		"libdexfile",                                              // depends on unconverted modules: dexfile_operator_srcs, libartbase, libartpalette,
+		"libdexfile_static",                                       // depends on unconverted modules: libartbase, libdexfile
+		"libdexfiled",                                             // depends on unconverted modules: dexfile_operator_srcs, libartbased, libartpalette
+		"libfastdeploy_host",                                      // depends on unconverted modules: libandroidfw, libusb, AdbWinApi
+		"libgmock_main_ndk",                                       // depends on unconverted modules: libgtest_ndk_c++
+		"libgmock_ndk",                                            // depends on unconverted modules: libgtest_ndk_c++
+		"libnativehelper_lazy_mts_jni", "libnativehelper_mts_jni", // depends on unconverted modules: libnativetesthelper_jni, libgmock_ndk
+		"libnativetesthelper_jni",   // depends on unconverted modules: libgtest_ndk_c++
+		"libprotobuf-java-nano",     // b/220869005, depends on non-public_current SDK
+		"libstatslog",               // depends on unconverted modules: libstatspull, statsd-aidl-ndk, libbinder_ndk
+		"libstatslog_art",           // depends on unconverted modules: statslog_art.cpp, statslog_art.h
+		"linker_reloc_bench_main",   // depends on unconverted modules: liblinker_reloc_bench_*
+		"pbtombstone", "crash_dump", // depends on libdebuggerd, libunwindstack
+		"robolectric-sqlite4java-0.282",             // depends on unconverted modules: robolectric-sqlite4java-import, robolectric-sqlite4java-native
+		"static_crasher",                            // depends on unconverted modules: libdebuggerd_handler
+		"stats-log-api-gen",                         // depends on unconverted modules: libstats_proto_host
+		"statslog.cpp", "statslog.h", "statslog.rs", // depends on unconverted modules: stats-log-api-gen
+		"statslog_art.cpp", "statslog_art.h", "statslog_header.rs", // depends on unconverted modules: stats-log-api-gen
+		"timezone-host",       // depends on unconverted modules: art.module.api.annotations
+		"truth-host-prebuilt", // depends on unconverted modules: truth-prebuilt
+		"truth-prebuilt",      // depends on unconverted modules: asm-7.0, guava
+
+		// b/215723302; awaiting tz{data,_version} to then rename targets conflicting with srcs
+		"tzdata",
+		"tz_version",
+	}
+
+	Bp2buildCcLibraryStaticOnlyList = []string{}
+
+	MixedBuildsDisabledList = []string{
+		"art_libdexfile_dex_instruction_list_header", // breaks libart_mterp.armng, header not found
+
+		"libbrotli",               // http://b/198585397, ld.lld: error: bionic/libc/arch-arm64/generic/bionic/memmove.S:95:(.text+0x10): relocation R_AARCH64_CONDBR19 out of range: -1404176 is not in [-1048576, 1048575]; references __memcpy
+		"minijail_constants_json", // http://b/200899432, bazel-built cc_genrule does not work in mixed build when it is a dependency of another soong module.
+
+		"cap_names.h",                                  // TODO(b/204913827) runfiles need to be handled in mixed builds
+		"libcap",                                       // TODO(b/204913827) runfiles need to be handled in mixed builds
+		"libprotobuf-cpp-full", "libprotobuf-cpp-lite", // Unsupported product&vendor suffix. b/204811222 and b/204810610.
+
+		// Depends on libprotobuf-cpp-*
+		"libadb_pairing_connection",
+		"libadb_pairing_connection_static",
+		"libadb_pairing_server", "libadb_pairing_server_static",
+
+		// TODO(b/204811222) support suffix in cc_binary
+		"acvp_modulewrapper",
+		"android.hardware.media.c2@1.0-service-v4l2",
+		"app_process",
+		"bar_test",
+		"bench_cxa_atexit",
+		"bench_noop",
+		"bench_noop_nostl",
+		"bench_noop_static",
+		"boringssl_self_test",
+		"boringssl_self_test_vendor",
+		"bssl",
+		"cavp",
+		"crash_dump",
+		"crasher",
+		"libcxx_test_template",
+		"linker",
+		"memory_replay",
+		"native_bridge_guest_linker",
+		"native_bridge_stub_library_defaults",
+		"noop",
+		"simpleperf_ndk",
+		"toybox-static",
+		"zlib_bench",
+	}
+)
diff --git a/android/androidmk.go b/android/androidmk.go
index 72b6584..5c715b4 100644
--- a/android/androidmk.go
+++ b/android/androidmk.go
@@ -148,6 +148,14 @@
 	// without worrying about the variables being mixed up in the actual mk file.
 	// 3. Makes troubleshooting and spotting errors easier.
 	entryOrder []string
+
+	// Provides data typically stored by Context objects that are commonly needed by
+	//AndroidMkEntries objects.
+	entryContext AndroidMkEntriesContext
+}
+
+type AndroidMkEntriesContext interface {
+	Config() Config
 }
 
 type AndroidMkExtraEntriesContext interface {
@@ -408,10 +416,19 @@
 				}
 			}
 
+			ext := filepath.Ext(dest)
+			suffix := ""
 			if dist.Suffix != nil {
-				ext := filepath.Ext(dest)
-				suffix := *dist.Suffix
-				dest = strings.TrimSuffix(dest, ext) + suffix + ext
+				suffix = *dist.Suffix
+			}
+
+			productString := ""
+			if dist.Append_artifact_with_product != nil && *dist.Append_artifact_with_product {
+				productString = fmt.Sprintf("_%s", a.entryContext.Config().DeviceProduct())
+			}
+
+			if suffix != "" || productString != "" {
+				dest = strings.TrimSuffix(dest, ext) + suffix + productString + ext
 			}
 
 			if dist.Dir != nil {
@@ -478,6 +495,7 @@
 }
 
 func (a *AndroidMkEntries) fillInEntries(ctx fillInEntriesContext, mod blueprint.Module) {
+	a.entryContext = ctx
 	a.EntryMap = make(map[string][]string)
 	amod := mod.(Module)
 	base := amod.base()
@@ -560,7 +578,7 @@
 			}
 		}
 
-		if !base.InRamdisk() && !base.InVendorRamdisk() {
+		if !base.InVendorRamdisk() {
 			a.AddPaths("LOCAL_FULL_INIT_RC", base.initRcPaths)
 		}
 		if len(base.vintfFragmentsPaths) > 0 {
diff --git a/android/androidmk_test.go b/android/androidmk_test.go
index ecfb008..caf11f1 100644
--- a/android/androidmk_test.go
+++ b/android/androidmk_test.go
@@ -148,6 +148,9 @@
 		FixtureRegisterWithContext(func(ctx RegistrationContext) {
 			ctx.RegisterModuleType("custom", customModuleFactory)
 		}),
+		FixtureModifyProductVariables(func(variables FixtureProductVariables) {
+			variables.DeviceProduct = proptools.StringPtr("bar")
+		}),
 		FixtureWithRootAndroidBp(bp),
 	).RunTest(t)
 
@@ -400,6 +403,25 @@
 			},
 		})
 
+	testHelper(t, "append-artifact-with-product", `
+			custom {
+				name: "foo",
+				dist: {
+					targets: ["my_goal"],
+					append_artifact_with_product: true,
+				}
+			}
+`, &distContributions{
+		copiesForGoals: []*copiesForGoals{
+			{
+				goals: "my_goal",
+				copies: []distCopy{
+					distCopyForTest("one.out", "one_bar.out"),
+				},
+			},
+		},
+	})
+
 	testHelper(t, "dists-with-tag", `
 			custom {
 				name: "foo",
diff --git a/android/apex.go b/android/apex.go
index cf1bcfe..b127f74 100644
--- a/android/apex.go
+++ b/android/apex.go
@@ -113,9 +113,6 @@
 	for _, sdk := range i.RequiredSdks {
 		name += "_" + sdk.Name + "_" + sdk.Version
 	}
-	if i.UsePlatformApis {
-		name += "_private"
-	}
 	return name
 }
 
@@ -546,10 +543,9 @@
 			merged[index].InApexModules = append(merged[index].InApexModules, apexInfo.InApexModules...)
 			merged[index].ApexContents = append(merged[index].ApexContents, apexInfo.ApexContents...)
 			merged[index].Updatable = merged[index].Updatable || apexInfo.Updatable
-			if merged[index].UsePlatformApis != apexInfo.UsePlatformApis {
-				panic(fmt.Errorf("variants having different UsePlatformApis can't be merged"))
-			}
-			merged[index].UsePlatformApis = apexInfo.UsePlatformApis
+			// Platform APIs is allowed for this module only when all APEXes containing
+			// the module are with `use_platform_apis: true`.
+			merged[index].UsePlatformApis = merged[index].UsePlatformApis && apexInfo.UsePlatformApis
 		} else {
 			seen[mergedName] = len(merged)
 			apexInfo.ApexVariationName = mergedName
diff --git a/android/apex_test.go b/android/apex_test.go
index 60a639b..1e2f3bd 100644
--- a/android/apex_test.go
+++ b/android/apex_test.go
@@ -118,17 +118,30 @@
 			},
 		},
 		{
-			name: "don't merge different UsePlatformApis",
+			name: "merge different UsePlatformApis but don't allow using platform api",
 			in: []ApexInfo{
 				{"foo", FutureApiLevel, false, false, nil, []string{"foo"}, []string{"foo"}, nil, NotForPrebuiltApex},
 				{"bar", FutureApiLevel, false, true, nil, []string{"bar"}, []string{"bar"}, nil, NotForPrebuiltApex},
 			},
 			wantMerged: []ApexInfo{
-				{"apex10000_private", FutureApiLevel, false, true, nil, []string{"bar"}, []string{"bar"}, nil, NotForPrebuiltApex},
-				{"apex10000", FutureApiLevel, false, false, nil, []string{"foo"}, []string{"foo"}, nil, NotForPrebuiltApex},
+				{"apex10000", FutureApiLevel, false, false, nil, []string{"bar", "foo"}, []string{"bar", "foo"}, nil, NotForPrebuiltApex},
 			},
 			wantAliases: [][2]string{
-				{"bar", "apex10000_private"},
+				{"bar", "apex10000"},
+				{"foo", "apex10000"},
+			},
+		},
+		{
+			name: "merge same UsePlatformApis and allow using platform api",
+			in: []ApexInfo{
+				{"foo", FutureApiLevel, false, true, nil, []string{"foo"}, []string{"foo"}, nil, NotForPrebuiltApex},
+				{"bar", FutureApiLevel, false, true, nil, []string{"bar"}, []string{"bar"}, nil, NotForPrebuiltApex},
+			},
+			wantMerged: []ApexInfo{
+				{"apex10000", FutureApiLevel, false, true, nil, []string{"bar", "foo"}, []string{"bar", "foo"}, nil, NotForPrebuiltApex},
+			},
+			wantAliases: [][2]string{
+				{"bar", "apex10000"},
 				{"foo", "apex10000"},
 			},
 		},
diff --git a/android/api_levels.go b/android/api_levels.go
index 926d297..8163894 100644
--- a/android/api_levels.go
+++ b/android/api_levels.go
@@ -18,6 +18,9 @@
 	"encoding/json"
 	"fmt"
 	"strconv"
+
+	"android/soong/bazel"
+	"android/soong/starlark_fmt"
 )
 
 func init() {
@@ -321,6 +324,7 @@
 			"Q":     29,
 			"R":     30,
 			"S":     31,
+			"S-V2":  32,
 		}
 
 		// TODO: Differentiate "current" and "future".
@@ -364,6 +368,7 @@
 			"Q":     29,
 			"R":     30,
 			"S":     31,
+			"S-V2":  32,
 		}
 		for i, codename := range config.PlatformVersionActiveCodenames() {
 			apiLevelsMap[codename] = previewAPILevelBase + i
@@ -378,3 +383,21 @@
 	apiLevelsJson := GetApiLevelsJson(ctx)
 	createApiLevelsJson(ctx, apiLevelsJson, apiLevelsMap)
 }
+
+func printApiLevelsStarlarkDict(config Config) string {
+	apiLevelsMap := GetApiLevelsMap(config)
+	valDict := make(map[string]string, len(apiLevelsMap))
+	for k, v := range apiLevelsMap {
+		valDict[k] = strconv.Itoa(v)
+	}
+	return starlark_fmt.PrintDict(valDict, 0)
+}
+
+func StarlarkApiLevelConfigs(config Config) string {
+	return fmt.Sprintf(bazel.GeneratedBazelFileWarning+`
+_api_levels = %s
+
+api_levels = _api_levels
+`, printApiLevelsStarlarkDict(config),
+	)
+}
diff --git a/android/arch.go b/android/arch.go
index a719cf3..f732a7d 100644
--- a/android/arch.go
+++ b/android/arch.go
@@ -22,6 +22,7 @@
 	"strings"
 
 	"android/soong/bazel"
+	"android/soong/starlark_fmt"
 
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/bootstrap"
@@ -829,7 +830,7 @@
 	const maxArchTypeNameSize = 500
 
 	// Convert the type to a new set of types that contains only the arch-specific properties
-	// (those that are tagged with `android:"arch_specific"`), and sharded into multiple types
+	// (those that are tagged with `android:"arch_variant"`), and sharded into multiple types
 	// to keep the runtime-generated names under the limit.
 	propShards, _ := proptools.FilterPropertyStructSharded(props, maxArchTypeNameSize, filterArchStruct)
 
@@ -864,6 +865,10 @@
 				archVariant := variantReplacer.Replace(archVariant)
 				variants = append(variants, proptools.FieldNameForProperty(archVariant))
 			}
+			for _, cpuVariant := range cpuVariants[arch] {
+				cpuVariant := variantReplacer.Replace(cpuVariant)
+				variants = append(variants, proptools.FieldNameForProperty(cpuVariant))
+			}
 			for _, feature := range archFeatures[arch] {
 				feature := variantReplacer.Replace(feature)
 				variants = append(variants, proptools.FieldNameForProperty(feature))
@@ -904,6 +909,7 @@
 			"Glibc",
 			"Musl",
 			"Linux",
+			"Host_linux",
 			"Not_windows",
 			"Arm_on_x86",
 			"Arm_on_x86_64",
@@ -925,6 +931,12 @@
 						targets = append(targets, target)
 					}
 				}
+				if os.Linux() && os.Class == Host {
+					target := "Host_linux_" + archType.Name
+					if !InList(target, targets) {
+						targets = append(targets, target)
+					}
+				}
 				if os.Bionic() {
 					target := "Bionic_" + archType.Name
 					if !InList(target, targets) {
@@ -1157,6 +1169,14 @@
 				}
 			}
 
+			if os.Linux() && os.Class == Host {
+				field := "Host_linux"
+				prefix := "target.host_linux"
+				if linuxProperties, ok := getChildPropertyStruct(ctx, targetProp, field, prefix); ok {
+					mergePropertyStruct(ctx, genProps, linuxProperties)
+				}
+			}
+
 			if os.Bionic() {
 				field := "Bionic"
 				prefix := "target.bionic"
@@ -1179,14 +1199,6 @@
 				if bionicProperties, ok := getChildPropertyStruct(ctx, targetProp, field, prefix); ok {
 					mergePropertyStruct(ctx, genProps, bionicProperties)
 				}
-
-				// Special case:  to ease the transition from glibc to musl, apply linux_glibc
-				// properties (which has historically mean host linux) to musl variants.
-				field = "Linux_glibc"
-				prefix = "target.linux_glibc"
-				if bionicProperties, ok := getChildPropertyStruct(ctx, targetProp, field, prefix); ok {
-					mergePropertyStruct(ctx, genProps, bionicProperties)
-				}
 			}
 
 			// Handle target OS properties in the form:
@@ -1406,14 +1418,6 @@
 			if osArchProperties, ok := getChildPropertyStruct(ctx, targetProp, field, userFriendlyField); ok {
 				result = append(result, osArchProperties)
 			}
-
-			// Special case:  to ease the transition from glibc to musl, apply linux_glibc
-			// properties (which has historically mean host linux) to musl variants.
-			field = "Linux_glibc_" + archType.Name
-			userFriendlyField = "target.linux_glibc_" + archType.Name
-			if osArchProperties, ok := getChildPropertyStruct(ctx, targetProp, field, userFriendlyField); ok {
-				result = append(result, osArchProperties)
-			}
 		}
 	}
 
@@ -1514,23 +1518,32 @@
 	targets := make(map[OsType][]Target)
 	var targetErr error
 
-	addTarget := func(os OsType, archName string, archVariant, cpuVariant *string, abi []string,
-		nativeBridgeEnabled NativeBridgeSupport, nativeBridgeHostArchName *string,
-		nativeBridgeRelativePath *string) {
+	type targetConfig struct {
+		os                       OsType
+		archName                 string
+		archVariant              *string
+		cpuVariant               *string
+		abi                      []string
+		nativeBridgeEnabled      NativeBridgeSupport
+		nativeBridgeHostArchName *string
+		nativeBridgeRelativePath *string
+	}
+
+	addTarget := func(target targetConfig) {
 		if targetErr != nil {
 			return
 		}
 
-		arch, err := decodeArch(os, archName, archVariant, cpuVariant, abi)
+		arch, err := decodeArch(target.os, target.archName, target.archVariant, target.cpuVariant, target.abi)
 		if err != nil {
 			targetErr = err
 			return
 		}
-		nativeBridgeRelativePathStr := String(nativeBridgeRelativePath)
-		nativeBridgeHostArchNameStr := String(nativeBridgeHostArchName)
+		nativeBridgeRelativePathStr := String(target.nativeBridgeRelativePath)
+		nativeBridgeHostArchNameStr := String(target.nativeBridgeHostArchName)
 
 		// Use guest arch as relative install path by default
-		if nativeBridgeEnabled && nativeBridgeRelativePathStr == "" {
+		if target.nativeBridgeEnabled && nativeBridgeRelativePathStr == "" {
 			nativeBridgeRelativePathStr = arch.ArchType.String()
 		}
 
@@ -1538,11 +1551,11 @@
 		// the currently configured build machine (either because the OS is different or because of
 		// the unsupported arch)
 		hostCross := false
-		if os.Class == Host {
+		if target.os.Class == Host {
 			var osSupported bool
-			if os == config.BuildOS {
+			if target.os == config.BuildOS {
 				osSupported = true
-			} else if config.BuildOS.Linux() && os.Linux() {
+			} else if config.BuildOS.Linux() && target.os.Linux() {
 				// LinuxBionic and Linux are compatible
 				osSupported = true
 			} else {
@@ -1564,11 +1577,11 @@
 			}
 		}
 
-		targets[os] = append(targets[os],
+		targets[target.os] = append(targets[target.os],
 			Target{
-				Os:                       os,
+				Os:                       target.os,
 				Arch:                     arch,
-				NativeBridge:             nativeBridgeEnabled,
+				NativeBridge:             target.nativeBridgeEnabled,
 				NativeBridgeHostArchName: nativeBridgeHostArchNameStr,
 				NativeBridgeRelativePath: nativeBridgeRelativePathStr,
 				HostCross:                hostCross,
@@ -1580,11 +1593,11 @@
 	}
 
 	// The primary host target, which must always exist.
-	addTarget(config.BuildOS, *variables.HostArch, nil, nil, nil, NativeBridgeDisabled, nil, nil)
+	addTarget(targetConfig{os: config.BuildOS, archName: *variables.HostArch, nativeBridgeEnabled: NativeBridgeDisabled})
 
 	// An optional secondary host target.
 	if variables.HostSecondaryArch != nil && *variables.HostSecondaryArch != "" {
-		addTarget(config.BuildOS, *variables.HostSecondaryArch, nil, nil, nil, NativeBridgeDisabled, nil, nil)
+		addTarget(targetConfig{os: config.BuildOS, archName: *variables.HostSecondaryArch, nativeBridgeEnabled: NativeBridgeDisabled})
 	}
 
 	// Optional cross-compiled host targets, generally Windows.
@@ -1599,45 +1612,65 @@
 		}
 
 		// The primary cross-compiled host target.
-		addTarget(crossHostOs, *variables.CrossHostArch, nil, nil, nil, NativeBridgeDisabled, nil, nil)
+		addTarget(targetConfig{os: crossHostOs, archName: *variables.CrossHostArch, nativeBridgeEnabled: NativeBridgeDisabled})
 
 		// An optional secondary cross-compiled host target.
 		if variables.CrossHostSecondaryArch != nil && *variables.CrossHostSecondaryArch != "" {
-			addTarget(crossHostOs, *variables.CrossHostSecondaryArch, nil, nil, nil, NativeBridgeDisabled, nil, nil)
+			addTarget(targetConfig{os: crossHostOs, archName: *variables.CrossHostSecondaryArch, nativeBridgeEnabled: NativeBridgeDisabled})
 		}
 	}
 
 	// Optional device targets
 	if variables.DeviceArch != nil && *variables.DeviceArch != "" {
 		// The primary device target.
-		addTarget(Android, *variables.DeviceArch, variables.DeviceArchVariant,
-			variables.DeviceCpuVariant, variables.DeviceAbi, NativeBridgeDisabled, nil, nil)
+		addTarget(targetConfig{
+			os:                  Android,
+			archName:            *variables.DeviceArch,
+			archVariant:         variables.DeviceArchVariant,
+			cpuVariant:          variables.DeviceCpuVariant,
+			abi:                 variables.DeviceAbi,
+			nativeBridgeEnabled: NativeBridgeDisabled,
+		})
 
 		// An optional secondary device target.
 		if variables.DeviceSecondaryArch != nil && *variables.DeviceSecondaryArch != "" {
-			addTarget(Android, *variables.DeviceSecondaryArch,
-				variables.DeviceSecondaryArchVariant, variables.DeviceSecondaryCpuVariant,
-				variables.DeviceSecondaryAbi, NativeBridgeDisabled, nil, nil)
+			addTarget(targetConfig{
+				os:                  Android,
+				archName:            *variables.DeviceSecondaryArch,
+				archVariant:         variables.DeviceSecondaryArchVariant,
+				cpuVariant:          variables.DeviceSecondaryCpuVariant,
+				abi:                 variables.DeviceSecondaryAbi,
+				nativeBridgeEnabled: NativeBridgeDisabled,
+			})
 		}
 
 		// An optional NativeBridge device target.
 		if variables.NativeBridgeArch != nil && *variables.NativeBridgeArch != "" {
-			addTarget(Android, *variables.NativeBridgeArch,
-				variables.NativeBridgeArchVariant, variables.NativeBridgeCpuVariant,
-				variables.NativeBridgeAbi, NativeBridgeEnabled, variables.DeviceArch,
-				variables.NativeBridgeRelativePath)
+			addTarget(targetConfig{
+				os:                       Android,
+				archName:                 *variables.NativeBridgeArch,
+				archVariant:              variables.NativeBridgeArchVariant,
+				cpuVariant:               variables.NativeBridgeCpuVariant,
+				abi:                      variables.NativeBridgeAbi,
+				nativeBridgeEnabled:      NativeBridgeEnabled,
+				nativeBridgeHostArchName: variables.DeviceArch,
+				nativeBridgeRelativePath: variables.NativeBridgeRelativePath,
+			})
 		}
 
 		// An optional secondary NativeBridge device target.
 		if variables.DeviceSecondaryArch != nil && *variables.DeviceSecondaryArch != "" &&
 			variables.NativeBridgeSecondaryArch != nil && *variables.NativeBridgeSecondaryArch != "" {
-			addTarget(Android, *variables.NativeBridgeSecondaryArch,
-				variables.NativeBridgeSecondaryArchVariant,
-				variables.NativeBridgeSecondaryCpuVariant,
-				variables.NativeBridgeSecondaryAbi,
-				NativeBridgeEnabled,
-				variables.DeviceSecondaryArch,
-				variables.NativeBridgeSecondaryRelativePath)
+			addTarget(targetConfig{
+				os:                       Android,
+				archName:                 *variables.NativeBridgeSecondaryArch,
+				archVariant:              variables.NativeBridgeSecondaryArchVariant,
+				cpuVariant:               variables.NativeBridgeSecondaryCpuVariant,
+				abi:                      variables.NativeBridgeSecondaryAbi,
+				nativeBridgeEnabled:      NativeBridgeEnabled,
+				nativeBridgeHostArchName: variables.DeviceSecondaryArch,
+				nativeBridgeRelativePath: variables.NativeBridgeSecondaryRelativePath,
+			})
 		}
 	}
 
@@ -1697,11 +1730,11 @@
 }
 
 // decodeArchSettings converts a list of archConfigs into a list of Targets for the given OsType.
-func decodeArchSettings(os OsType, archConfigs []archConfig) ([]Target, error) {
+func decodeAndroidArchSettings(archConfigs []archConfig) ([]Target, error) {
 	var ret []Target
 
 	for _, config := range archConfigs {
-		arch, err := decodeArch(os, config.arch, &config.archVariant,
+		arch, err := decodeArch(Android, config.arch, &config.archVariant,
 			&config.cpuVariant, config.abi)
 		if err != nil {
 			return nil, err
@@ -1741,6 +1774,18 @@
 		a.CpuVariant = ""
 	}
 
+	if a.ArchVariant != "" {
+		if validArchVariants := archVariants[archType]; !InList(a.ArchVariant, validArchVariants) {
+			return Arch{}, fmt.Errorf("[%q] unknown arch variant %q, support variants: %q", archType, a.ArchVariant, validArchVariants)
+		}
+	}
+
+	if a.CpuVariant != "" {
+		if validCpuVariants := cpuVariants[archType]; !InList(a.CpuVariant, validCpuVariants) {
+			return Arch{}, fmt.Errorf("[%q] unknown cpu variant %q, support variants: %q", archType, a.CpuVariant, validCpuVariants)
+		}
+	}
+
 	// Filter empty ABIs out of the list.
 	for i := 0; i < len(a.Abi); i++ {
 		if a.Abi[i] == "" {
@@ -1749,14 +1794,9 @@
 		}
 	}
 
-	if a.ArchVariant == "" {
-		// Set ArchFeatures from the default arch features.
-		if featureMap, ok := defaultArchFeatureMap[os]; ok {
-			a.ArchFeatures = featureMap[archType]
-		}
-	} else {
-		// Set ArchFeatures from the arch type.
-		if featureMap, ok := archFeatureMap[archType]; ok {
+	// Set ArchFeatures from the arch type. for Android OS, other os-es do not specify features
+	if os == Android {
+		if featureMap, ok := androidArchFeatureMap[archType]; ok {
 			a.ArchFeatures = featureMap[a.ArchVariant]
 		}
 	}
@@ -2086,6 +2126,7 @@
 	linuxStructs := getTargetStructs(ctx, archProperties, "Linux")
 	bionicStructs := getTargetStructs(ctx, archProperties, "Bionic")
 	hostStructs := getTargetStructs(ctx, archProperties, "Host")
+	hostLinuxStructs := getTargetStructs(ctx, archProperties, "Host_linux")
 	hostNotWindowsStructs := getTargetStructs(ctx, archProperties, "Not_windows")
 
 	// For android, linux, ...
@@ -2106,6 +2147,9 @@
 		if os.Bionic() {
 			osStructs = append(osStructs, bionicStructs...)
 		}
+		if os.Linux() && os.Class == Host {
+			osStructs = append(osStructs, hostLinuxStructs...)
+		}
 
 		if os == LinuxMusl {
 			osStructs = append(osStructs, getTargetStructs(ctx, archProperties, "Musl")...)
@@ -2138,6 +2182,16 @@
 				targetStructs := getTargetStructs(ctx, archProperties, targetField)
 				osArchStructs = append(osArchStructs, targetStructs...)
 			}
+			if os == LinuxMusl {
+				targetField := "Musl_" + arch.Name
+				targetStructs := getTargetStructs(ctx, archProperties, targetField)
+				osArchStructs = append(osArchStructs, targetStructs...)
+			}
+			if os == Linux {
+				targetField := "Glibc_" + arch.Name
+				targetStructs := getTargetStructs(ctx, archProperties, targetField)
+				osArchStructs = append(osArchStructs, targetStructs...)
+			}
 
 			targetField := GetCompoundTargetField(os, arch)
 			targetName := fmt.Sprintf("%s_%s", os.Name, arch.Name)
@@ -2194,3 +2248,40 @@
 
 	return value
 }
+
+func printArchTypeStarlarkDict(dict map[ArchType][]string) string {
+	valDict := make(map[string]string, len(dict))
+	for k, v := range dict {
+		valDict[k.String()] = starlark_fmt.PrintStringList(v, 1)
+	}
+	return starlark_fmt.PrintDict(valDict, 0)
+}
+
+func printArchTypeNestedStarlarkDict(dict map[ArchType]map[string][]string) string {
+	valDict := make(map[string]string, len(dict))
+	for k, v := range dict {
+		valDict[k.String()] = starlark_fmt.PrintStringListDict(v, 1)
+	}
+	return starlark_fmt.PrintDict(valDict, 0)
+}
+
+func StarlarkArchConfigurations() string {
+	return fmt.Sprintf(`
+_arch_to_variants = %s
+
+_arch_to_cpu_variants = %s
+
+_arch_to_features = %s
+
+_android_arch_feature_for_arch_variant = %s
+
+arch_to_variants = _arch_to_variants
+arch_to_cpu_variants = _arch_to_cpu_variants
+arch_to_features = _arch_to_features
+android_arch_feature_for_arch_variants = _android_arch_feature_for_arch_variant
+`, printArchTypeStarlarkDict(archVariants),
+		printArchTypeStarlarkDict(cpuVariants),
+		printArchTypeStarlarkDict(archFeatures),
+		printArchTypeNestedStarlarkDict(androidArchFeatureMap),
+	)
+}
diff --git a/android/arch_list.go b/android/arch_list.go
index d68a0d1..cbf8e7a 100644
--- a/android/arch_list.go
+++ b/android/arch_list.go
@@ -14,46 +14,18 @@
 
 package android
 
-import "fmt"
-
 var archVariants = map[ArchType][]string{
 	Arm: {
 		"armv7-a",
 		"armv7-a-neon",
 		"armv8-a",
 		"armv8-2a",
-		"cortex-a7",
-		"cortex-a8",
-		"cortex-a9",
-		"cortex-a15",
-		"cortex-a53",
-		"cortex-a53-a57",
-		"cortex-a55",
-		"cortex-a72",
-		"cortex-a73",
-		"cortex-a75",
-		"cortex-a76",
-		"krait",
-		"kryo",
-		"kryo385",
-		"exynos-m1",
-		"exynos-m2",
 	},
 	Arm64: {
-		"armv8_a",
-		"armv8_a_branchprot",
-		"armv8_2a",
+		"armv8-a",
+		"armv8-a-branchprot",
+		"armv8-2a",
 		"armv8-2a-dotprod",
-		"cortex-a53",
-		"cortex-a55",
-		"cortex-a72",
-		"cortex-a73",
-		"cortex-a75",
-		"cortex-a76",
-		"kryo",
-		"kryo385",
-		"exynos-m1",
-		"exynos-m2",
 	},
 	X86: {
 		"amberlake",
@@ -87,6 +59,41 @@
 	},
 }
 
+var cpuVariants = map[ArchType][]string{
+	Arm: {
+		"cortex-a7",
+		"cortex-a8",
+		"cortex-a9",
+		"cortex-a15",
+		"cortex-a53",
+		"cortex-a53.a57",
+		"cortex-a55",
+		"cortex-a72",
+		"cortex-a73",
+		"cortex-a75",
+		"cortex-a76",
+		"krait",
+		"kryo",
+		"kryo385",
+		"exynos-m1",
+		"exynos-m2",
+	},
+	Arm64: {
+		"cortex-a53",
+		"cortex-a55",
+		"cortex-a72",
+		"cortex-a73",
+		"cortex-a75",
+		"cortex-a76",
+		"kryo",
+		"kryo385",
+		"exynos-m1",
+		"exynos-m2",
+	},
+	X86:    {},
+	X86_64: {},
+}
+
 var archFeatures = map[ArchType][]string{
 	Arm: {
 		"neon",
@@ -119,7 +126,7 @@
 	},
 }
 
-var archFeatureMap = map[ArchType]map[string][]string{
+var androidArchFeatureMap = map[ArchType]map[string][]string{
 	Arm: {
 		"armv7-a-neon": {
 			"neon",
@@ -270,6 +277,13 @@
 		},
 	},
 	X86_64: {
+		"" /*default */ : {
+			"ssse3",
+			"sse4",
+			"sse4_1",
+			"sse4_2",
+			"popcnt",
+		},
 		"amberlake": {
 			"ssse3",
 			"sse4",
@@ -389,23 +403,3 @@
 		},
 	},
 }
-
-var defaultArchFeatureMap = map[OsType]map[ArchType][]string{}
-
-// RegisterDefaultArchVariantFeatures is called by files that define Toolchains to specify the
-// arch features that are available for the default arch variant.  It must be called from an
-// init() function.
-func RegisterDefaultArchVariantFeatures(os OsType, arch ArchType, features ...string) {
-	checkCalledFromInit()
-
-	for _, feature := range features {
-		if !InList(feature, archFeatures[arch]) {
-			panic(fmt.Errorf("Invalid feature %q for arch %q variant \"\"", feature, arch))
-		}
-	}
-
-	if defaultArchFeatureMap[os] == nil {
-		defaultArchFeatureMap[os] = make(map[ArchType][]string)
-	}
-	defaultArchFeatureMap[os][arch] = features
-}
diff --git a/android/arch_test.go b/android/arch_test.go
index a828321..dd0b115 100644
--- a/android/arch_test.go
+++ b/android/arch_test.go
@@ -491,11 +491,9 @@
 			arch: {
 				arm: {
 					a:  ["arm"],
-					armv7_a_neon: { a: ["armv7_a_neon"] },
 				},
 				arm64: {
 					a:  ["arm64"],
-					armv8_a: { a: ["armv8_a"] },
 				},
 				x86: { a:  ["x86"] },
 				x86_64: { a:  ["x86_64"] },
@@ -512,6 +510,7 @@
 				musl: { a:  ["musl"] },
 				linux_bionic: { a:  ["linux_bionic"] },
 				linux: { a:  ["linux"] },
+				host_linux: { a: ["host_linux"] },
 				linux_glibc: { a:  ["linux_glibc"] },
 				linux_musl: { a:  ["linux_musl"] },
 				windows: { a:  ["windows"], enabled: true },
@@ -552,12 +551,12 @@
 				{
 					module:   "foo",
 					variant:  "android_arm64_armv8-a",
-					property: []string{"root", "linux", "bionic", "android", "android64", "arm64", "armv8_a", "lib64", "android_arm64"},
+					property: []string{"root", "linux", "bionic", "android", "android64", "arm64", "lib64", "android_arm64"},
 				},
 				{
 					module:   "foo",
 					variant:  "android_arm_armv7-a-neon",
-					property: []string{"root", "linux", "bionic", "android", "android64", "arm", "armv7_a_neon", "lib32", "android_arm"},
+					property: []string{"root", "linux", "bionic", "android", "android64", "arm", "lib32", "android_arm"},
 				},
 			},
 		},
@@ -568,12 +567,12 @@
 				{
 					module:   "foo",
 					variant:  "linux_glibc_x86_64",
-					property: []string{"root", "host", "linux", "glibc", "linux_glibc", "not_windows", "x86_64", "lib64", "linux_x86_64", "linux_glibc_x86_64"},
+					property: []string{"root", "host", "linux", "host_linux", "glibc", "linux_glibc", "not_windows", "x86_64", "lib64", "linux_x86_64", "linux_glibc_x86_64"},
 				},
 				{
 					module:   "foo",
 					variant:  "linux_glibc_x86",
-					property: []string{"root", "host", "linux", "glibc", "linux_glibc", "not_windows", "x86", "lib32", "linux_x86", "linux_glibc_x86"},
+					property: []string{"root", "host", "linux", "host_linux", "glibc", "linux_glibc", "not_windows", "x86", "lib32", "linux_x86", "linux_glibc_x86"},
 				},
 			},
 		},
@@ -607,12 +606,12 @@
 				{
 					module:   "foo",
 					variant:  "linux_musl_x86_64",
-					property: []string{"root", "host", "linux", "musl", "linux_glibc", "linux_musl", "not_windows", "x86_64", "lib64", "linux_x86_64", "linux_musl_x86_64", "linux_glibc_x86_64"},
+					property: []string{"root", "host", "linux", "host_linux", "musl", "linux_musl", "not_windows", "x86_64", "lib64", "linux_x86_64", "linux_musl_x86_64"},
 				},
 				{
 					module:   "foo",
 					variant:  "linux_musl_x86",
-					property: []string{"root", "host", "linux", "musl", "linux_glibc", "linux_musl", "not_windows", "x86", "lib32", "linux_x86", "linux_musl_x86", "linux_glibc_x86"},
+					property: []string{"root", "host", "linux", "host_linux", "musl", "linux_musl", "not_windows", "x86", "lib32", "linux_x86", "linux_musl_x86"},
 				},
 			},
 		},
diff --git a/android/bazel.go b/android/bazel.go
index f4fc038..4ef8d78 100644
--- a/android/bazel.go
+++ b/android/bazel.go
@@ -24,6 +24,15 @@
 
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
+
+	"android/soong/android/allowlists"
+)
+
+const (
+	// A sentinel value to be used as a key in Bp2BuildConfig for modules with
+	// no package path. This is also the module dir for top level Android.bp
+	// modules.
+	Bp2BuildTopLevel = "."
 )
 
 type bazelModuleProperties struct {
@@ -87,7 +96,7 @@
 	HandcraftedLabel() string
 	GetBazelLabel(ctx BazelConversionPathContext, module blueprint.Module) string
 	ShouldConvertWithBp2build(ctx BazelConversionContext) bool
-	shouldConvertWithBp2build(ctx BazelConversionContext, module blueprint.Module) bool
+	shouldConvertWithBp2build(ctx bazelOtherModuleContext, module blueprint.Module) bool
 	GetBazelBuildFileContents(c Config, path, name string) (string, error)
 	ConvertWithBp2build(ctx TopDownMutatorContext)
 
@@ -161,443 +170,164 @@
 	return "" // no label for unconverted module
 }
 
-// Configuration to decide if modules in a directory should default to true/false for bp2build_available
-type Bp2BuildConfig map[string]BazelConversionConfigEntry
-type BazelConversionConfigEntry int
+type bp2BuildConversionAllowlist struct {
+	// Configure modules in these directories to enable bp2build_available: true or false by default.
+	defaultConfig allowlists.Bp2BuildConfig
 
-const (
-	// A sentinel value to be used as a key in Bp2BuildConfig for modules with
-	// no package path. This is also the module dir for top level Android.bp
-	// modules.
-	BP2BUILD_TOPLEVEL = "."
-
-	// iota + 1 ensures that the int value is not 0 when used in the Bp2buildAllowlist map,
-	// which can also mean that the key doesn't exist in a lookup.
-
-	// all modules in this package and subpackages default to bp2build_available: true.
-	// allows modules to opt-out.
-	Bp2BuildDefaultTrueRecursively BazelConversionConfigEntry = iota + 1
-
-	// all modules in this package (not recursively) default to bp2build_available: true.
-	// allows modules to opt-out.
-	Bp2BuildDefaultTrue
-
-	// all modules in this package (not recursively) default to bp2build_available: false.
-	// allows modules to opt-in.
-	Bp2BuildDefaultFalse
-)
-
-var (
 	// Keep any existing BUILD files (and do not generate new BUILD files) for these directories
 	// in the synthetic Bazel workspace.
-	bp2buildKeepExistingBuildFile = map[string]bool{
-		// This is actually build/bazel/build.BAZEL symlinked to ./BUILD
-		".":/*recursive = */ false,
+	keepExistingBuildFile map[string]bool
 
-		// build/bazel/examples/apex/... BUILD files should be generated, so
-		// build/bazel is not recursive. Instead list each subdirectory under
-		// build/bazel explicitly.
-		"build/bazel":/* recursive = */ false,
-		"build/bazel/ci/dist":/* recursive = */ false,
-		"build/bazel/examples/android_app":/* recursive = */ true,
-		"build/bazel/examples/java":/* recursive = */ true,
-		"build/bazel/bazel_skylib":/* recursive = */ true,
-		"build/bazel/rules":/* recursive = */ true,
-		"build/bazel/rules_cc":/* recursive = */ true,
-		"build/bazel/scripts":/* recursive = */ true,
-		"build/bazel/tests":/* recursive = */ true,
-		"build/bazel/platforms":/* recursive = */ true,
-		"build/bazel/product_variables":/* recursive = */ true,
-		"build/bazel_common_rules":/* recursive = */ true,
-		// build/make/tools/signapk BUILD file is generated, so build/make/tools is not recursive.
-		"build/make/tools":/* recursive = */ false,
-		"build/pesto":/* recursive = */ true,
+	// Per-module allowlist to always opt modules in of both bp2build and mixed builds.
+	// These modules are usually in directories with many other modules that are not ready for
+	// conversion.
+	//
+	// A module can either be in this list or its directory allowlisted entirely
+	// in bp2buildDefaultConfig, but not both at the same time.
+	moduleAlwaysConvert map[string]bool
 
-		// external/bazelbuild-rules_android/... is needed by mixed builds, otherwise mixed builds analysis fails
-		// e.g. ERROR: Analysis of target '@soong_injection//mixed_builds:buildroot' failed
-		"external/bazelbuild-rules_android":/* recursive = */ true,
-		"external/bazel-skylib":/* recursive = */ true,
-		"external/guava":/* recursive = */ true,
-		"external/jsr305":/* recursive = */ true,
-		"frameworks/ex/common":/* recursive = */ true,
-
-		"packages/apps/Music":/* recursive = */ true,
-		"packages/apps/QuickSearchBox":/* recursive = */ true,
-		"packages/apps/WallpaperPicker":/* recursive = */ false,
-
-		"prebuilts/gcc":/* recursive = */ true,
-		"prebuilts/build-tools":/* recursive = */ false,
-		"prebuilts/sdk":/* recursive = */ false,
-		"prebuilts/sdk/current/extras/app-toolkit":/* recursive = */ false,
-		"prebuilts/sdk/current/support":/* recursive = */ false,
-		"prebuilts/sdk/tools":/* recursive = */ false,
-		"prebuilts/r8":/* recursive = */ false,
-	}
-
-	// Configure modules in these directories to enable bp2build_available: true or false by default.
-	bp2buildDefaultConfig = Bp2BuildConfig{
-		"art/libartpalette":                     Bp2BuildDefaultTrueRecursively,
-		"art/libdexfile":                        Bp2BuildDefaultTrueRecursively,
-		"art/runtime":                           Bp2BuildDefaultTrueRecursively,
-		"art/tools":                             Bp2BuildDefaultTrue,
-		"bionic":                                Bp2BuildDefaultTrueRecursively,
-		"bootable/recovery/tools/recovery_l10n": Bp2BuildDefaultTrue,
-		"build/bazel/examples/soong_config_variables":        Bp2BuildDefaultTrueRecursively,
-		"build/bazel/examples/apex/minimal":                  Bp2BuildDefaultTrueRecursively,
-		"build/make/tools/signapk":                           Bp2BuildDefaultTrue,
-		"build/soong":                                        Bp2BuildDefaultTrue,
-		"build/soong/cc/libbuildversion":                     Bp2BuildDefaultTrue, // Skip tests subdir
-		"build/soong/cc/ndkstubgen":                          Bp2BuildDefaultTrue,
-		"build/soong/cc/symbolfile":                          Bp2BuildDefaultTrue,
-		"build/soong/linkerconfig":                           Bp2BuildDefaultTrueRecursively,
-		"build/soong/scripts":                                Bp2BuildDefaultTrueRecursively,
-		"cts/common/device-side/nativetesthelper/jni":        Bp2BuildDefaultTrueRecursively,
-		"development/apps/DevelopmentSettings":               Bp2BuildDefaultTrue,
-		"development/apps/Fallback":                          Bp2BuildDefaultTrue,
-		"development/apps/WidgetPreview":                     Bp2BuildDefaultTrue,
-		"development/samples/BasicGLSurfaceView":             Bp2BuildDefaultTrue,
-		"development/samples/BluetoothChat":                  Bp2BuildDefaultTrue,
-		"development/samples/BrokenKeyDerivation":            Bp2BuildDefaultTrue,
-		"development/samples/Compass":                        Bp2BuildDefaultTrue,
-		"development/samples/ContactManager":                 Bp2BuildDefaultTrue,
-		"development/samples/FixedGridLayout":                Bp2BuildDefaultTrue,
-		"development/samples/HelloEffects":                   Bp2BuildDefaultTrue,
-		"development/samples/Home":                           Bp2BuildDefaultTrue,
-		"development/samples/HoneycombGallery":               Bp2BuildDefaultTrue,
-		"development/samples/JetBoy":                         Bp2BuildDefaultTrue,
-		"development/samples/KeyChainDemo":                   Bp2BuildDefaultTrue,
-		"development/samples/LceDemo":                        Bp2BuildDefaultTrue,
-		"development/samples/LunarLander":                    Bp2BuildDefaultTrue,
-		"development/samples/MultiResolution":                Bp2BuildDefaultTrue,
-		"development/samples/MultiWindow":                    Bp2BuildDefaultTrue,
-		"development/samples/NotePad":                        Bp2BuildDefaultTrue,
-		"development/samples/Obb":                            Bp2BuildDefaultTrue,
-		"development/samples/RSSReader":                      Bp2BuildDefaultTrue,
-		"development/samples/ReceiveShareDemo":               Bp2BuildDefaultTrue,
-		"development/samples/SearchableDictionary":           Bp2BuildDefaultTrue,
-		"development/samples/SipDemo":                        Bp2BuildDefaultTrue,
-		"development/samples/SkeletonApp":                    Bp2BuildDefaultTrue,
-		"development/samples/Snake":                          Bp2BuildDefaultTrue,
-		"development/samples/SpellChecker/":                  Bp2BuildDefaultTrueRecursively,
-		"development/samples/ThemedNavBarKeyboard":           Bp2BuildDefaultTrue,
-		"development/samples/ToyVpn":                         Bp2BuildDefaultTrue,
-		"development/samples/TtsEngine":                      Bp2BuildDefaultTrue,
-		"development/samples/USB/AdbTest":                    Bp2BuildDefaultTrue,
-		"development/samples/USB/MissileLauncher":            Bp2BuildDefaultTrue,
-		"development/samples/VoiceRecognitionService":        Bp2BuildDefaultTrue,
-		"development/samples/VoicemailProviderDemo":          Bp2BuildDefaultTrue,
-		"development/samples/WiFiDirectDemo":                 Bp2BuildDefaultTrue,
-		"development/sdk":                                    Bp2BuildDefaultTrueRecursively,
-		"external/arm-optimized-routines":                    Bp2BuildDefaultTrueRecursively,
-		"external/auto/common":                               Bp2BuildDefaultTrueRecursively,
-		"external/auto/service":                              Bp2BuildDefaultTrueRecursively,
-		"external/boringssl":                                 Bp2BuildDefaultTrueRecursively,
-		"external/bouncycastle":                              Bp2BuildDefaultTrue,
-		"external/brotli":                                    Bp2BuildDefaultTrue,
-		"external/conscrypt":                                 Bp2BuildDefaultTrue,
-		"external/error_prone":                               Bp2BuildDefaultTrueRecursively,
-		"external/fmtlib":                                    Bp2BuildDefaultTrueRecursively,
-		"external/google-benchmark":                          Bp2BuildDefaultTrueRecursively,
-		"external/googletest":                                Bp2BuildDefaultTrueRecursively,
-		"external/gwp_asan":                                  Bp2BuildDefaultTrueRecursively,
-		"external/icu":                                       Bp2BuildDefaultTrueRecursively,
-		"external/icu/android_icu4j":                         Bp2BuildDefaultFalse, // java rules incomplete
-		"external/icu/icu4j":                                 Bp2BuildDefaultFalse, // java rules incomplete
-		"external/javapoet":                                  Bp2BuildDefaultTrueRecursively,
-		"external/jemalloc_new":                              Bp2BuildDefaultTrueRecursively,
-		"external/jsoncpp":                                   Bp2BuildDefaultTrueRecursively,
-		"external/libcap":                                    Bp2BuildDefaultTrueRecursively,
-		"external/libcxx":                                    Bp2BuildDefaultTrueRecursively,
-		"external/libcxxabi":                                 Bp2BuildDefaultTrueRecursively,
-		"external/libevent":                                  Bp2BuildDefaultTrueRecursively,
-		"external/libpng":                                    Bp2BuildDefaultTrueRecursively,
-		"external/lz4/lib":                                   Bp2BuildDefaultTrue,
-		"external/lzma/C":                                    Bp2BuildDefaultTrueRecursively,
-		"external/mdnsresponder":                             Bp2BuildDefaultTrueRecursively,
-		"external/minijail":                                  Bp2BuildDefaultTrueRecursively,
-		"external/pcre":                                      Bp2BuildDefaultTrueRecursively,
-		"external/protobuf":                                  Bp2BuildDefaultTrueRecursively,
-		"external/python/six":                                Bp2BuildDefaultTrueRecursively,
-		"external/scudo":                                     Bp2BuildDefaultTrueRecursively,
-		"external/selinux/libselinux":                        Bp2BuildDefaultTrueRecursively,
-		"external/selinux/libsepol":                          Bp2BuildDefaultTrueRecursively,
-		"external/zlib":                                      Bp2BuildDefaultTrueRecursively,
-		"external/zstd":                                      Bp2BuildDefaultTrueRecursively,
-		"frameworks/base/media/tests/MediaDump":              Bp2BuildDefaultTrue,
-		"frameworks/base/startop/apps/test":                  Bp2BuildDefaultTrue,
-		"frameworks/native/libs/adbd_auth":                   Bp2BuildDefaultTrueRecursively,
-		"frameworks/native/opengl/tests/gl2_cameraeye":       Bp2BuildDefaultTrue,
-		"frameworks/native/opengl/tests/gl2_java":            Bp2BuildDefaultTrue,
-		"frameworks/native/opengl/tests/testLatency":         Bp2BuildDefaultTrue,
-		"frameworks/native/opengl/tests/testPauseResume":     Bp2BuildDefaultTrue,
-		"frameworks/native/opengl/tests/testViewport":        Bp2BuildDefaultTrue,
-		"frameworks/proto_logging/stats/stats_log_api_gen":   Bp2BuildDefaultTrueRecursively,
-		"libnativehelper":                                    Bp2BuildDefaultTrueRecursively,
-		"packages/apps/DevCamera":                            Bp2BuildDefaultTrue,
-		"packages/apps/HTMLViewer":                           Bp2BuildDefaultTrue,
-		"packages/apps/Protips":                              Bp2BuildDefaultTrue,
-		"packages/modules/StatsD/lib/libstatssocket":         Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb":                               Bp2BuildDefaultTrue,
-		"packages/modules/adb/apex":                          Bp2BuildDefaultTrue,
-		"packages/modules/adb/crypto":                        Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb/libs":                          Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb/pairing_auth":                  Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb/pairing_connection":            Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb/proto":                         Bp2BuildDefaultTrueRecursively,
-		"packages/modules/adb/tls":                           Bp2BuildDefaultTrueRecursively,
-		"packages/providers/MediaProvider/tools/dialogs":     Bp2BuildDefaultTrue,
-		"packages/screensavers/Basic":                        Bp2BuildDefaultTrue,
-		"packages/services/Car/tests/SampleRearViewCamera":   Bp2BuildDefaultTrue,
-		"prebuilts/clang/host/linux-x86":                     Bp2BuildDefaultTrueRecursively,
-		"prebuilts/tools/common/m2":                          Bp2BuildDefaultTrue,
-		"system/apex":                                        Bp2BuildDefaultFalse, // TODO(b/207466993): flaky failures
-		"system/apex/proto":                                  Bp2BuildDefaultTrueRecursively,
-		"system/apex/libs":                                   Bp2BuildDefaultTrueRecursively,
-		"system/core/debuggerd":                              Bp2BuildDefaultTrueRecursively,
-		"system/core/diagnose_usb":                           Bp2BuildDefaultTrueRecursively,
-		"system/core/libasyncio":                             Bp2BuildDefaultTrue,
-		"system/core/libcrypto_utils":                        Bp2BuildDefaultTrueRecursively,
-		"system/core/libcutils":                              Bp2BuildDefaultTrueRecursively,
-		"system/core/libpackagelistparser":                   Bp2BuildDefaultTrueRecursively,
-		"system/core/libprocessgroup":                        Bp2BuildDefaultTrue,
-		"system/core/libprocessgroup/cgrouprc":               Bp2BuildDefaultTrue,
-		"system/core/libprocessgroup/cgrouprc_format":        Bp2BuildDefaultTrue,
-		"system/core/libsystem":                              Bp2BuildDefaultTrueRecursively,
-		"system/core/libutils":                               Bp2BuildDefaultTrueRecursively,
-		"system/core/libvndksupport":                         Bp2BuildDefaultTrueRecursively,
-		"system/core/property_service/libpropertyinfoparser": Bp2BuildDefaultTrueRecursively,
-		"system/libbase":                                     Bp2BuildDefaultTrueRecursively,
-		"system/libprocinfo":                                 Bp2BuildDefaultTrue,
-		"system/libziparchive":                               Bp2BuildDefaultTrueRecursively,
-		"system/logging/liblog":                              Bp2BuildDefaultTrueRecursively,
-		"system/sepolicy/apex":                               Bp2BuildDefaultTrueRecursively,
-		"system/timezone/apex":                               Bp2BuildDefaultTrueRecursively,
-		"system/timezone/output_data":                        Bp2BuildDefaultTrueRecursively,
-		"system/unwinding/libbacktrace":                      Bp2BuildDefaultTrueRecursively,
-		"system/unwinding/libunwindstack":                    Bp2BuildDefaultTrueRecursively,
-		"tools/apksig":                                       Bp2BuildDefaultTrue,
-		"tools/platform-compat/java/android/compat":          Bp2BuildDefaultTrueRecursively,
-	}
+	// Per-module-type allowlist to always opt modules in to both bp2build and mixed builds
+	// when they have the same type as one listed.
+	moduleTypeAlwaysConvert map[string]bool
 
 	// Per-module denylist to always opt modules out of both bp2build and mixed builds.
-	bp2buildModuleDoNotConvertList = []string{
-		"libnativehelper_compat_libc", // Broken compile: implicit declaration of function 'strerror_r' is invalid in C99
-
-		"libart",                             // depends on unconverted modules: art_operator_srcs, libodrstatslog, libelffile, art_cmdlineparser_headers, cpp-define-generator-definitions, libcpu_features, libdexfile, libartpalette, libbacktrace, libnativebridge, libnativeloader, libsigchain, libunwindstack, libartbase, libprofile, cpp-define-generator-asm-support, apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, libstatssocket, heapprofd_client_api
-		"libart-runtime-gtest",               // depends on unconverted modules: libgtest_isolated, libart-compiler, libdexfile, libprofile, libartbase, libbacktrace, libartbase-art-gtest
-		"libart_headers",                     // depends on unconverted modules: art_libartbase_headers
-		"libartd",                            // depends on unconverted modules: apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, libstatssocket, heapprofd_client_api, art_operator_srcs, libodrstatslog, libelffiled, art_cmdlineparser_headers, cpp-define-generator-definitions, libcpu_features, libdexfiled, libartpalette, libbacktrace, libnativebridge, libnativeloader, libsigchain, libunwindstack, libartbased, libprofiled, cpp-define-generator-asm-support
-		"libartd-runtime-gtest",              // depends on unconverted modules: libgtest_isolated, libartd-compiler, libdexfiled, libprofiled, libartbased, libbacktrace, libartbased-art-gtest
-		"libstatslog_art",                    // depends on unconverted modules: statslog_art.cpp, statslog_art.h
-		"statslog_art.h", "statslog_art.cpp", // depends on unconverted modules: stats-log-api-gen
-
-		"libandroid_runtime_lazy", // depends on unconverted modules: libbinder_headers
-		"libcmd",                  // depends on unconverted modules: libbinder
-
-		"libdexfile_support_static",                                                       // Depends on unconverted module: libdexfile_external_headers
-		"libunwindstack_local", "libunwindstack_utils", "libc_malloc_debug", "libfdtrack", // Depends on unconverted module: libunwindstack
-
-		"libdexfile_support",          // TODO(b/210546943): Enabled based on product variables.
-		"libdexfile_external_headers", // TODO(b/210546943): Enabled based on product variables.
-
-		"libunwindstack",                // Depends on unconverted module libdexfile_support.
-		"libnativehelper_compat_libc++", // Broken compile: implicit declaration of function 'strerror_r' is invalid in C99
-
-		"chkcon", "sefcontext_compile", // depends on unconverted modules: libsepol
-
-		"libsepol", // TODO(b/207408632): Unsupported case of .l sources in cc library rules
-
-		"gen-kotlin-build-file.py", // module has same name as source
-
-		"libactivitymanager_aidl", // TODO(b/207426160): Depends on activity_manager_procstate_aidl, which is an aidl filegroup.
-
-		"libnativehelper_lazy_mts_jni", "libnativehelper_mts_jni", // depends on unconverted modules: libgmock_ndk
-		"libnativetesthelper_jni", "libgmock_main_ndk", "libgmock_ndk", // depends on unconverted module: libgtest_ndk_c++
-
-		"statslog-framework-java-gen", "statslog.cpp", "statslog.h", "statslog.rs", "statslog_header.rs", // depends on unconverted modules: stats-log-api-gen
-
-		"stats-log-api-gen", // depends on unconverted modules: libstats_proto_host, libprotobuf-cpp-full
-
-		"libstatslog", // depends on unconverted modules: statslog.cpp, statslog.h, ...
-
-		"cmd",                                                        // depends on unconverted module packagemanager_aidl-cpp, of unsupported type aidl_interface
-		"servicedispatcher",                                          // depends on unconverted module android.debug_aidl, of unsupported type aidl_interface
-		"libutilscallstack",                                          // depends on unconverted module libbacktrace
-		"libbacktrace",                                               // depends on unconverted module libunwindstack
-		"libdebuggerd_handler",                                       // depends on unconverted module libdebuggerd_handler_core
-		"libdebuggerd_handler_core", "libdebuggerd_handler_fallback", // depends on unconverted module libdebuggerd
-		"unwind_for_offline", // depends on unconverted module libunwindstack_utils
-		"libdebuggerd",       // depends on unconverted modules libdexfile_support, libunwindstack, gwp_asan_crash_handler, libtombstone_proto, libprotobuf-cpp-lite
-		"libdexfile_static",  // depends on libartpalette, libartbase, libdexfile, which are of unsupported type: art_cc_library.
-
-		"static_crasher", // depends on unconverted modules: libdebuggerd_handler
-
-		"pbtombstone", "crash_dump", // depends on libdebuggerd, libunwindstack
-
-		"libbase_ndk", // http://b/186826477, fails to link libctscamera2_jni for device (required for CtsCameraTestCases)
-
-		"libprotobuf-internal-protos",      // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-internal-python-srcs", // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-java-full",            // b/210751803, we don't handle path property for filegroups
-		"host-libprotobuf-java-full",       // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-java-util-full",       // b/210751803, we don't handle path property for filegroups
-
-		"conscrypt",          // b/210751803, we don't handle path property for filegroups
-		"conscrypt-for-host", // b/210751803, we don't handle path property for filegroups
-
-		"host-libprotobuf-java-lite",  // b/217236083, java_library cannot have deps without srcs
-		"host-libprotobuf-java-micro", // b/217236083, java_library cannot have deps without srcs
-		"host-libprotobuf-java-nano",  // b/217236083, java_library cannot have deps without srcs
-		"error_prone_core",            // b/217236083, java_library cannot have deps without srcs
-		"bouncycastle-host",           // b/217236083, java_library cannot have deps without srcs
-
-		"apex_manifest_proto_java", // b/215230097, we don't handle .proto files in java_library srcs attribute
-
-		"libc_musl_sysroot_bionic_arch_headers", // b/218405924, depends on soong_zip
-		"libc_musl_sysroot_bionic_headers",      // b/218405924, depends on soong_zip and generates duplicate srcs
-
-		// python protos
-		"libprotobuf-python",                           // contains .proto sources
-		"conv_linker_config",                           // depends on linker_config_proto, a python lib with proto sources
-		"apex_build_info_proto", "apex_manifest_proto", // a python lib with proto sources
-		"linker_config_proto", // contains .proto sources
-
-		"brotli-fuzzer-corpus", // b/202015218: outputs are in location incompatible with bazel genrule handling.
-
-		// b/203369847: multiple genrules in the same package creating the same file
-		// //development/sdk/...
-		"platform_tools_properties",
-		"build_tools_source_properties",
-
-		// APEX support
-		"com.android.runtime", // depends on unconverted modules: bionic-linker-config, linkerconfig
-
-		"libgtest_ndk_c++",      // b/201816222: Requires sdk_version support.
-		"libgtest_main_ndk_c++", // b/201816222: Requires sdk_version support.
-
-		"abb",                     // depends on unconverted modules: libcmd, libbinder
-		"adb",                     // depends on unconverted modules: AdbWinApi, libadb_host, libandroidfw, libapp_processes_protos_full, libfastdeploy_host, libopenscreen-discovery, libopenscreen-platform-impl, libusb, bin2c_fastdeployagent, AdbWinUsbApi
-		"libadb_host",             // depends on unconverted modules: libopenscreen-discovery, libopenscreen-platform-impl, libusb, AdbWinApi
-		"libfastdeploy_host",      // depends on unconverted modules: libandroidfw, libusb, AdbWinApi
-		"linker",                  // depends on unconverted modules: libdebuggerd_handler_fallback
-		"linker_reloc_bench_main", // depends on unconverted modules: liblinker_reloc_bench_*
-		"versioner",               // depends on unconverted modules: libclang_cxx_host, libLLVM_host, of unsupported type llvm_host_prebuilt_library_shared
-
-		"linkerconfig", // http://b/202876379 has arch-variant static_executable
-		"mdnsd",        // http://b/202876379 has arch-variant static_executable
-
-		"CarHTMLViewer", // depends on unconverted modules android.car-stubs, car-ui-lib
-
-		"libdexfile",  // depends on unconverted modules: dexfile_operator_srcs, libartbase, libartpalette,
-		"libdexfiled", // depends on unconverted modules: dexfile_operator_srcs, libartbased, libartpalette
-
-		// go deps:
-		"apex-protos",                                                                                // depends on soong_zip, a go binary
-		"generated_android_icu4j_src_files", "generated_android_icu4j_test_files", "icu4c_test_data", // depends on unconverted modules: soong_zip
-		"host_bionic_linker_asm",         // depends on extract_linker, a go binary.
-		"host_bionic_linker_script",      // depends on extract_linker, a go binary.
-		"robolectric-sqlite4java-native", // depends on soong_zip, a go binary
-		"robolectric_tzdata",             // depends on soong_zip, a go binary
-
-		"android_icu4j_srcgen_binary", // Bazel build error: deps not allowed without srcs; move to runtime_deps
-		"core-icu4j-for-host",         // Bazel build error: deps not allowed without srcs; move to runtime_deps
-
-		// java deps
-		"android_icu4j_srcgen",          // depends on unconverted modules: currysrc
-		"bin2c_fastdeployagent",         // depends on deployagent, a java binary
-		"currysrc",                      // depends on unconverted modules: currysrc_org.eclipse, guavalib, jopt-simple-4.9
-		"robolectric-sqlite4java-0.282", // depends on unconverted modules: robolectric-sqlite4java-import, robolectric-sqlite4java-native
-		"timezone-host",                 // depends on unconverted modules: art.module.api.annotations
-		"truth-host-prebuilt",           // depends on unconverted modules: truth-prebuilt
-		"truth-prebuilt",                // depends on unconverted modules: asm-7.0, guava
-
-		"generated_android_icu4j_resources",      // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
-		"generated_android_icu4j_test_resources", // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
-
-		"art-script",     // depends on unconverted modules: dalvikvm, dex2oat
-		"dex2oat-script", // depends on unconverted modules: dex2oat
-
-		"error_prone_checkerframework_dataflow_nullaway", // TODO(b/219908977): "Error in fail: deps not allowed without srcs; move to runtime_deps?"
-	}
+	moduleDoNotConvert map[string]bool
 
 	// Per-module denylist of cc_library modules to only generate the static
 	// variant if their shared variant isn't ready or buildable by Bazel.
-	bp2buildCcLibraryStaticOnlyList = []string{}
+	ccLibraryStaticOnly map[string]bool
 
 	// Per-module denylist to opt modules out of mixed builds. Such modules will
 	// still be generated via bp2build.
-	mixedBuildsDisabledList = []string{
-		"art_libdexfile_dex_instruction_list_header", // breaks libart_mterp.armng, header not found
+	mixedBuildsDisabled map[string]bool
+}
 
-		"libbrotli",               // http://b/198585397, ld.lld: error: bionic/libc/arch-arm64/generic/bionic/memmove.S:95:(.text+0x10): relocation R_AARCH64_CONDBR19 out of range: -1404176 is not in [-1048576, 1048575]; references __memcpy
-		"minijail_constants_json", // http://b/200899432, bazel-built cc_genrule does not work in mixed build when it is a dependency of another soong module.
-
-		"cap_names.h",                                  // TODO(b/204913827) runfiles need to be handled in mixed builds
-		"libcap",                                       // TODO(b/204913827) runfiles need to be handled in mixed builds
-		"libprotobuf-cpp-full", "libprotobuf-cpp-lite", // Unsupported product&vendor suffix. b/204811222 and b/204810610.
-
-		// Depends on libprotobuf-cpp-*
-		"libadb_pairing_connection",
-		"libadb_pairing_connection_static",
-		"libadb_pairing_server", "libadb_pairing_server_static",
-
-		// TODO(b/204811222) support suffix in cc_binary
-		"acvp_modulewrapper",
-		"android.hardware.media.c2@1.0-service-v4l2",
-		"app_process",
-		"bar_test",
-		"bench_cxa_atexit",
-		"bench_noop",
-		"bench_noop_nostl",
-		"bench_noop_static",
-		"boringssl_self_test",
-		"boringssl_self_test_vendor",
-		"bssl",
-		"cavp",
-		"crash_dump",
-		"crasher",
-		"libcxx_test_template",
-		"linker",
-		"memory_replay",
-		"native_bridge_guest_linker",
-		"native_bridge_stub_library_defaults",
-		"noop",
-		"simpleperf_ndk",
-		"toybox-static",
-		"zlib_bench",
-	}
-
-	// Used for quicker lookups
-	bp2buildModuleDoNotConvert  = map[string]bool{}
-	bp2buildCcLibraryStaticOnly = map[string]bool{}
-	mixedBuildsDisabled         = map[string]bool{}
-)
-
-func init() {
-	for _, moduleName := range bp2buildModuleDoNotConvertList {
-		bp2buildModuleDoNotConvert[moduleName] = true
-	}
-
-	for _, moduleName := range bp2buildCcLibraryStaticOnlyList {
-		bp2buildCcLibraryStaticOnly[moduleName] = true
-	}
-
-	for _, moduleName := range mixedBuildsDisabledList {
-		mixedBuildsDisabled[moduleName] = true
+// NewBp2BuildAllowlist creates a new, empty bp2BuildConversionAllowlist
+// which can be populated using builder pattern Set* methods
+func NewBp2BuildAllowlist() bp2BuildConversionAllowlist {
+	return bp2BuildConversionAllowlist{
+		allowlists.Bp2BuildConfig{},
+		map[string]bool{},
+		map[string]bool{},
+		map[string]bool{},
+		map[string]bool{},
+		map[string]bool{},
+		map[string]bool{},
 	}
 }
 
+// SetDefaultConfig copies the entries from defaultConfig into the allowlist
+func (a bp2BuildConversionAllowlist) SetDefaultConfig(defaultConfig allowlists.Bp2BuildConfig) bp2BuildConversionAllowlist {
+	if a.defaultConfig == nil {
+		a.defaultConfig = allowlists.Bp2BuildConfig{}
+	}
+	for k, v := range defaultConfig {
+		a.defaultConfig[k] = v
+	}
+
+	return a
+}
+
+// SetKeepExistingBuildFile copies the entries from keepExistingBuildFile into the allowlist
+func (a bp2BuildConversionAllowlist) SetKeepExistingBuildFile(keepExistingBuildFile map[string]bool) bp2BuildConversionAllowlist {
+	if a.keepExistingBuildFile == nil {
+		a.keepExistingBuildFile = map[string]bool{}
+	}
+	for k, v := range keepExistingBuildFile {
+		a.keepExistingBuildFile[k] = v
+	}
+
+	return a
+}
+
+// SetModuleAlwaysConvertList copies the entries from moduleAlwaysConvert into the allowlist
+func (a bp2BuildConversionAllowlist) SetModuleAlwaysConvertList(moduleAlwaysConvert []string) bp2BuildConversionAllowlist {
+	if a.moduleAlwaysConvert == nil {
+		a.moduleAlwaysConvert = map[string]bool{}
+	}
+	for _, m := range moduleAlwaysConvert {
+		a.moduleAlwaysConvert[m] = true
+	}
+
+	return a
+}
+
+// SetModuleTypeAlwaysConvertList copies the entries from moduleTypeAlwaysConvert into the allowlist
+func (a bp2BuildConversionAllowlist) SetModuleTypeAlwaysConvertList(moduleTypeAlwaysConvert []string) bp2BuildConversionAllowlist {
+	if a.moduleTypeAlwaysConvert == nil {
+		a.moduleTypeAlwaysConvert = map[string]bool{}
+	}
+	for _, m := range moduleTypeAlwaysConvert {
+		a.moduleTypeAlwaysConvert[m] = true
+	}
+
+	return a
+}
+
+// SetModuleDoNotConvertList copies the entries from moduleDoNotConvert into the allowlist
+func (a bp2BuildConversionAllowlist) SetModuleDoNotConvertList(moduleDoNotConvert []string) bp2BuildConversionAllowlist {
+	if a.moduleDoNotConvert == nil {
+		a.moduleDoNotConvert = map[string]bool{}
+	}
+	for _, m := range moduleDoNotConvert {
+		a.moduleDoNotConvert[m] = true
+	}
+
+	return a
+}
+
+// SetCcLibraryStaticOnlyList copies the entries from ccLibraryStaticOnly into the allowlist
+func (a bp2BuildConversionAllowlist) SetCcLibraryStaticOnlyList(ccLibraryStaticOnly []string) bp2BuildConversionAllowlist {
+	if a.ccLibraryStaticOnly == nil {
+		a.ccLibraryStaticOnly = map[string]bool{}
+	}
+	for _, m := range ccLibraryStaticOnly {
+		a.ccLibraryStaticOnly[m] = true
+	}
+
+	return a
+}
+
+// SetMixedBuildsDisabledList copies the entries from mixedBuildsDisabled into the allowlist
+func (a bp2BuildConversionAllowlist) SetMixedBuildsDisabledList(mixedBuildsDisabled []string) bp2BuildConversionAllowlist {
+	if a.mixedBuildsDisabled == nil {
+		a.mixedBuildsDisabled = map[string]bool{}
+	}
+	for _, m := range mixedBuildsDisabled {
+		a.mixedBuildsDisabled[m] = true
+	}
+
+	return a
+}
+
+var bp2buildAllowlist = NewBp2BuildAllowlist().
+	SetDefaultConfig(allowlists.Bp2buildDefaultConfig).
+	SetKeepExistingBuildFile(allowlists.Bp2buildKeepExistingBuildFile).
+	SetModuleAlwaysConvertList(allowlists.Bp2buildModuleAlwaysConvertList).
+	SetModuleTypeAlwaysConvertList(allowlists.Bp2buildModuleTypeAlwaysConvertList).
+	SetModuleDoNotConvertList(allowlists.Bp2buildModuleDoNotConvertList).
+	SetCcLibraryStaticOnlyList(allowlists.Bp2buildCcLibraryStaticOnlyList).
+	SetMixedBuildsDisabledList(allowlists.MixedBuildsDisabledList)
+
+// GenerateCcLibraryStaticOnly returns whether a cc_library module should only
+// generate a static version of itself based on the current global configuration.
 func GenerateCcLibraryStaticOnly(moduleName string) bool {
-	return bp2buildCcLibraryStaticOnly[moduleName]
+	return bp2buildAllowlist.ccLibraryStaticOnly[moduleName]
 }
 
+// ShouldKeepExistingBuildFileForDir returns whether an existing BUILD file should be
+// added to the build symlink forest based on the current global configuration.
 func ShouldKeepExistingBuildFileForDir(dir string) bool {
-	if _, ok := bp2buildKeepExistingBuildFile[dir]; ok {
+	return shouldKeepExistingBuildFileForDir(bp2buildAllowlist, dir)
+}
+
+func shouldKeepExistingBuildFileForDir(allowlist bp2BuildConversionAllowlist, dir string) bool {
+	if _, ok := allowlist.keepExistingBuildFile[dir]; ok {
 		// Exact dir match
 		return true
 	}
 	// Check if subtree match
-	for prefix, recursive := range bp2buildKeepExistingBuildFile {
+	for prefix, recursive := range allowlist.keepExistingBuildFile {
 		if recursive {
 			if strings.HasPrefix(dir, prefix+"/") {
 				return true
@@ -631,7 +361,7 @@
 		// variants of a cc_library.
 		return false
 	}
-	return !mixedBuildsDisabled[ctx.Module().Name()]
+	return !bp2buildAllowlist.mixedBuildsDisabled[ctx.Module().Name()]
 }
 
 // ConvertedToBazel returns whether this module has been converted (with bp2build or manually) to Bazel.
@@ -643,32 +373,74 @@
 	return b.shouldConvertWithBp2build(ctx, module) || b.HasHandcraftedLabel()
 }
 
-// ShouldConvertWithBp2build returns whether the given BazelModuleBase should be converted with bp2build.
+// ShouldConvertWithBp2build returns whether the given BazelModuleBase should be converted with bp2build
 func (b *BazelModuleBase) ShouldConvertWithBp2build(ctx BazelConversionContext) bool {
 	return b.shouldConvertWithBp2build(ctx, ctx.Module())
 }
 
-func (b *BazelModuleBase) shouldConvertWithBp2build(ctx BazelConversionContext, module blueprint.Module) bool {
-	if bp2buildModuleDoNotConvert[module.Name()] {
-		return false
-	}
+type bazelOtherModuleContext interface {
+	ModuleErrorf(format string, args ...interface{})
+	Config() Config
+	OtherModuleType(m blueprint.Module) string
+	OtherModuleName(m blueprint.Module) string
+	OtherModuleDir(m blueprint.Module) string
+}
 
+func (b *BazelModuleBase) shouldConvertWithBp2build(ctx bazelOtherModuleContext, module blueprint.Module) bool {
 	if !b.bazelProps().Bazel_module.CanConvertToBazel {
 		return false
 	}
 
+	propValue := b.bazelProperties.Bazel_module.Bp2build_available
 	packagePath := ctx.OtherModuleDir(module)
-	config := ctx.Config().bp2buildPackageConfig
+
+	// Modules in unit tests which are enabled in the allowlist by type or name
+	// trigger this conditional because unit tests run under the "." package path
+	isTestModule := packagePath == Bp2BuildTopLevel && proptools.BoolDefault(propValue, false)
+	if isTestModule {
+		return true
+	}
+
+	moduleName := module.Name()
+	allowlist := ctx.Config().bp2buildPackageConfig
+	moduleNameAllowed := allowlist.moduleAlwaysConvert[moduleName]
+	moduleTypeAllowed := allowlist.moduleTypeAlwaysConvert[ctx.OtherModuleType(module)]
+	allowlistConvert := moduleNameAllowed || moduleTypeAllowed
+	if moduleNameAllowed && moduleTypeAllowed {
+		ctx.ModuleErrorf("A module cannot be in moduleAlwaysConvert and also be in moduleTypeAlwaysConvert")
+		return false
+	}
+
+	if allowlist.moduleDoNotConvert[moduleName] {
+		if moduleNameAllowed {
+			ctx.ModuleErrorf("a module cannot be in moduleDoNotConvert and also be in moduleAlwaysConvert")
+		}
+		return false
+	}
+
+	if allowlistConvert && shouldKeepExistingBuildFileForDir(allowlist, packagePath) {
+		if moduleNameAllowed {
+			ctx.ModuleErrorf("A module cannot be in a directory listed in keepExistingBuildFile"+
+				" and also be in moduleAlwaysConvert. Directory: '%s'", packagePath)
+			return false
+		}
+	}
 
 	// This is a tristate value: true, false, or unset.
-	propValue := b.bazelProperties.Bazel_module.Bp2build_available
-	if bp2buildDefaultTrueRecursively(packagePath, config) {
+	if ok, directoryPath := bp2buildDefaultTrueRecursively(packagePath, allowlist.defaultConfig); ok {
+		if moduleNameAllowed {
+			ctx.ModuleErrorf("A module cannot be in a directory marked Bp2BuildDefaultTrue"+
+				" or Bp2BuildDefaultTrueRecursively and also be in moduleAlwaysConvert. Directory: '%s'",
+				directoryPath)
+			return false
+		}
+
 		// Allow modules to explicitly opt-out.
 		return proptools.BoolDefault(propValue, true)
 	}
 
 	// Allow modules to explicitly opt-in.
-	return proptools.BoolDefault(propValue, false)
+	return proptools.BoolDefault(propValue, allowlistConvert)
 }
 
 // bp2buildDefaultTrueRecursively checks that the package contains a prefix from the
@@ -681,14 +453,16 @@
 //
 // This function will also return false if the package doesn't match anything in
 // the config.
-func bp2buildDefaultTrueRecursively(packagePath string, config Bp2BuildConfig) bool {
-	ret := false
-
+//
+// This function will also return the allowlist entry which caused a particular
+// package to be enabled. Since packages can be enabled via a recursive declaration,
+// the path returned will not always be the same as the one provided.
+func bp2buildDefaultTrueRecursively(packagePath string, config allowlists.Bp2BuildConfig) (bool, string) {
 	// Check if the package path has an exact match in the config.
-	if config[packagePath] == Bp2BuildDefaultTrue || config[packagePath] == Bp2BuildDefaultTrueRecursively {
-		return true
-	} else if config[packagePath] == Bp2BuildDefaultFalse {
-		return false
+	if config[packagePath] == allowlists.Bp2BuildDefaultTrue || config[packagePath] == allowlists.Bp2BuildDefaultTrueRecursively {
+		return true, packagePath
+	} else if config[packagePath] == allowlists.Bp2BuildDefaultFalse {
+		return false, packagePath
 	}
 
 	// If not, check for the config recursively.
@@ -696,15 +470,15 @@
 	// e.g. for x/y/z, iterate over x, x/y, then x/y/z, taking the final value from the allowlist.
 	for _, part := range strings.Split(packagePath, "/") {
 		packagePrefix += part
-		if config[packagePrefix] == Bp2BuildDefaultTrueRecursively {
+		if config[packagePrefix] == allowlists.Bp2BuildDefaultTrueRecursively {
 			// package contains this prefix and this prefix should convert all modules
-			return true
+			return true, packagePrefix
 		}
 		// Continue to the next part of the package dir.
 		packagePrefix += "/"
 	}
 
-	return ret
+	return false, packagePath
 }
 
 // GetBazelBuildFileContents returns the file contents of a hand-crafted BUILD file if available or
diff --git a/android/bazel_handler.go b/android/bazel_handler.go
index cb45f2a..6b2be69 100644
--- a/android/bazel_handler.go
+++ b/android/bazel_handler.go
@@ -50,8 +50,8 @@
 
 // Portion of cquery map key to describe target configuration.
 type configKey struct {
-	archType ArchType
-	osType   OsType
+	arch   string
+	osType OsType
 }
 
 // Map key to describe bazel cquery requests.
@@ -73,7 +73,7 @@
 }
 
 type BazelContext interface {
-	// The below methods involve queuing cquery requests to be later invoked
+	// The methods below involve queuing cquery requests to be later invoked
 	// by bazel. If any of these methods return (_, false), then the request
 	// has been queued to be run later.
 
@@ -561,7 +561,7 @@
     return id_string + ">>" + %s(target)
 `
 
-	for requestType, _ := range requestTypeToCqueryIdEntries {
+	for requestType := range requestTypeToCqueryIdEntries {
 		labelMapName := requestType.Name() + "_Labels"
 		functionName := requestType.Name() + "_Fn"
 		labelRegistrationMapSection += fmt.Sprintf(mapDeclarationFormatString,
@@ -664,7 +664,12 @@
 	if err != nil {
 		return err
 	}
-
+	if metricsDir := context.paths.BazelMetricsDir(); metricsDir != "" {
+		err = os.MkdirAll(metricsDir, 0777)
+		if err != nil {
+			return err
+		}
+	}
 	err = ioutil.WriteFile(filepath.Join(soongInjectionPath, "WORKSPACE.bazel"), []byte{}, 0666)
 	if err != nil {
 		return err
@@ -716,9 +721,9 @@
 		}
 	}
 
-	for val, _ := range context.requests {
+	for val := range context.requests {
 		if cqueryResult, ok := cqueryResults[getCqueryId(val)]; ok {
-			context.results[val] = string(cqueryResult)
+			context.results[val] = cqueryResult
 		} else {
 			return fmt.Errorf("missing result for bazel target %s. query output: [%s], cquery err: [%s]",
 				getCqueryId(val), cqueryOutput, cqueryErr)
@@ -858,7 +863,7 @@
 }
 
 func getConfigString(key cqueryKey) string {
-	arch := key.configKey.archType.Name
+	arch := key.configKey.arch
 	if len(arch) == 0 || arch == "common" {
 		// Use host platform, which is currently hardcoded to be x86_64.
 		arch = "x86_64"
@@ -872,5 +877,9 @@
 }
 
 func GetConfigKey(ctx ModuleContext) configKey {
-	return configKey{archType: ctx.Arch().ArchType, osType: ctx.Os()}
+	return configKey{
+		// use string because Arch is not a valid key in go
+		arch:   ctx.Arch().String(),
+		osType: ctx.Os(),
+	}
 }
diff --git a/android/bazel_handler_test.go b/android/bazel_handler_test.go
index ad5b63b..e5cff90 100644
--- a/android/bazel_handler_test.go
+++ b/android/bazel_handler_test.go
@@ -9,9 +9,9 @@
 
 func TestRequestResultsAfterInvokeBazel(t *testing.T) {
 	label := "//foo:bar"
-	cfg := configKey{Arm64, Android}
+	cfg := configKey{"arm64_armv8-a", Android}
 	bazelContext, _ := testBazelContext(t, map[bazelCommand]string{
-		bazelCommand{command: "cquery", expression: "deps(@soong_injection//mixed_builds:buildroot, 2)"}: `//foo:bar|arm64|android>>out/foo/bar.txt`,
+		bazelCommand{command: "cquery", expression: "deps(@soong_injection//mixed_builds:buildroot, 2)"}: `//foo:bar|arm64_armv8-a|android>>out/foo/bar.txt`,
 	})
 	g, ok := bazelContext.GetOutputFiles(label, cfg)
 	if ok {
diff --git a/android/bazel_paths.go b/android/bazel_paths.go
index f353a9d..fa10f62 100644
--- a/android/bazel_paths.go
+++ b/android/bazel_paths.go
@@ -79,6 +79,7 @@
 	OtherModuleType(m blueprint.Module) string
 	OtherModuleName(m blueprint.Module) string
 	OtherModuleDir(m blueprint.Module) string
+	ModuleErrorf(format string, args ...interface{})
 }
 
 // A subset of the ModuleContext methods which are sufficient to resolve references to paths/deps in
diff --git a/android/bazel_test.go b/android/bazel_test.go
index e5d8fbb..482df2a 100644
--- a/android/bazel_test.go
+++ b/android/bazel_test.go
@@ -13,59 +13,67 @@
 // limitations under the License.
 package android
 
-import "testing"
+import (
+	"android/soong/android/allowlists"
+	"android/soong/bazel"
+	"fmt"
+	"testing"
+
+	"github.com/google/blueprint"
+	"github.com/google/blueprint/proptools"
+)
 
 func TestConvertAllModulesInPackage(t *testing.T) {
 	testCases := []struct {
-		prefixes   Bp2BuildConfig
+		prefixes   allowlists.Bp2BuildConfig
 		packageDir string
 	}{
 		{
-			prefixes: Bp2BuildConfig{
-				"a": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a/b": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a/b": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a/b":   Bp2BuildDefaultTrueRecursively,
-				"a/b/c": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a/b":   allowlists.Bp2BuildDefaultTrueRecursively,
+				"a/b/c": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultTrueRecursively,
-				"d/e/f": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultTrueRecursively,
+				"d/e/f": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultFalse,
-				"a/b":   Bp2BuildDefaultTrueRecursively,
-				"a/b/c": Bp2BuildDefaultFalse,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultFalse,
+				"a/b":   allowlists.Bp2BuildDefaultTrueRecursively,
+				"a/b/c": allowlists.Bp2BuildDefaultFalse,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultTrueRecursively,
-				"a/b":   Bp2BuildDefaultFalse,
-				"a/b/c": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultTrueRecursively,
+				"a/b":   allowlists.Bp2BuildDefaultFalse,
+				"a/b/c": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a",
 		},
 	}
 
 	for _, test := range testCases {
-		if !bp2buildDefaultTrueRecursively(test.packageDir, test.prefixes) {
+		if ok, _ := bp2buildDefaultTrueRecursively(test.packageDir, test.prefixes); !ok {
 			t.Errorf("Expected to convert all modules in %s based on %v, but failed.", test.packageDir, test.prefixes)
 		}
 	}
@@ -73,62 +81,308 @@
 
 func TestModuleOptIn(t *testing.T) {
 	testCases := []struct {
-		prefixes   Bp2BuildConfig
+		prefixes   allowlists.Bp2BuildConfig
 		packageDir string
 	}{
 		{
-			prefixes: Bp2BuildConfig{
-				"a/b": Bp2BuildDefaultFalse,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a/b": allowlists.Bp2BuildDefaultFalse,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":   Bp2BuildDefaultFalse,
-				"a/b": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":   allowlists.Bp2BuildDefaultFalse,
+				"a/b": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a/b": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a/b": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a", // opt-in by default
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a/b/c": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a/b/c": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultTrueRecursively,
-				"d/e/f": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultTrueRecursively,
+				"d/e/f": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "foo/bar",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultTrueRecursively,
-				"a/b":   Bp2BuildDefaultFalse,
-				"a/b/c": Bp2BuildDefaultTrueRecursively,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultTrueRecursively,
+				"a/b":   allowlists.Bp2BuildDefaultFalse,
+				"a/b/c": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			packageDir: "a/b",
 		},
 		{
-			prefixes: Bp2BuildConfig{
-				"a":     Bp2BuildDefaultFalse,
-				"a/b":   Bp2BuildDefaultTrueRecursively,
-				"a/b/c": Bp2BuildDefaultFalse,
+			prefixes: allowlists.Bp2BuildConfig{
+				"a":     allowlists.Bp2BuildDefaultFalse,
+				"a/b":   allowlists.Bp2BuildDefaultTrueRecursively,
+				"a/b/c": allowlists.Bp2BuildDefaultFalse,
 			},
 			packageDir: "a",
 		},
 	}
 
 	for _, test := range testCases {
-		if bp2buildDefaultTrueRecursively(test.packageDir, test.prefixes) {
+		if ok, _ := bp2buildDefaultTrueRecursively(test.packageDir, test.prefixes); ok {
 			t.Errorf("Expected to allow module opt-in in %s based on %v, but failed.", test.packageDir, test.prefixes)
 		}
 	}
 }
+
+type TestBazelModule struct {
+	bazel.TestModuleInfo
+	BazelModuleBase
+}
+
+var _ blueprint.Module = TestBazelModule{}
+
+func (m TestBazelModule) Name() string {
+	return m.TestModuleInfo.ModuleName
+}
+
+func (m TestBazelModule) GenerateBuildActions(blueprint.ModuleContext) {
+}
+
+type TestBazelConversionContext struct {
+	omc       bazel.OtherModuleTestContext
+	allowlist bp2BuildConversionAllowlist
+	errors    []string
+}
+
+var _ bazelOtherModuleContext = &TestBazelConversionContext{}
+
+func (bcc *TestBazelConversionContext) OtherModuleType(m blueprint.Module) string {
+	return bcc.omc.OtherModuleType(m)
+}
+
+func (bcc *TestBazelConversionContext) OtherModuleName(m blueprint.Module) string {
+	return bcc.omc.OtherModuleName(m)
+}
+
+func (bcc *TestBazelConversionContext) OtherModuleDir(m blueprint.Module) string {
+	return bcc.omc.OtherModuleDir(m)
+}
+
+func (bcc *TestBazelConversionContext) ModuleErrorf(format string, args ...interface{}) {
+	bcc.errors = append(bcc.errors, fmt.Sprintf(format, args...))
+}
+
+func (bcc *TestBazelConversionContext) Config() Config {
+	return Config{
+		&config{
+			bp2buildPackageConfig: bcc.allowlist,
+		},
+	}
+}
+
+var bazelableBazelModuleBase = BazelModuleBase{
+	bazelProperties: properties{
+		Bazel_module: bazelModuleProperties{
+			CanConvertToBazel: true,
+		},
+	},
+}
+
+func TestBp2BuildAllowlist(t *testing.T) {
+	testCases := []struct {
+		description    string
+		shouldConvert  bool
+		expectedErrors []string
+		module         TestBazelModule
+		allowlist      bp2BuildConversionAllowlist
+	}{
+		{
+			description:   "allowlist enables module",
+			shouldConvert: true,
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "dir1",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+			},
+		},
+		{
+			description:    "module in name allowlist and type allowlist fails",
+			shouldConvert:  false,
+			expectedErrors: []string{"A module cannot be in moduleAlwaysConvert and also be in moduleTypeAlwaysConvert"},
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "dir1",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				moduleTypeAlwaysConvert: map[string]bool{
+					"rule1": true,
+				},
+			},
+		},
+		{
+			description:    "module in allowlist and denylist fails",
+			shouldConvert:  false,
+			expectedErrors: []string{"a module cannot be in moduleDoNotConvert and also be in moduleAlwaysConvert"},
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "dir1",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				moduleDoNotConvert: map[string]bool{
+					"foo": true,
+				},
+			},
+		},
+		{
+			description:    "module in allowlist and existing BUILD file",
+			shouldConvert:  false,
+			expectedErrors: []string{"A module cannot be in a directory listed in keepExistingBuildFile and also be in moduleAlwaysConvert. Directory: 'existing/build/dir'"},
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "existing/build/dir",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				keepExistingBuildFile: map[string]bool{
+					"existing/build/dir": true,
+				},
+			},
+		},
+		{
+			description:    "module allowlist and enabled directory",
+			shouldConvert:  false,
+			expectedErrors: []string{"A module cannot be in a directory marked Bp2BuildDefaultTrue or Bp2BuildDefaultTrueRecursively and also be in moduleAlwaysConvert. Directory: 'existing/build/dir'"},
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "existing/build/dir",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				defaultConfig: allowlists.Bp2BuildConfig{
+					"existing/build/dir": allowlists.Bp2BuildDefaultTrue,
+				},
+			},
+		},
+		{
+			description:    "module allowlist and enabled subdirectory",
+			shouldConvert:  false,
+			expectedErrors: []string{"A module cannot be in a directory marked Bp2BuildDefaultTrue or Bp2BuildDefaultTrueRecursively and also be in moduleAlwaysConvert. Directory: 'existing/build/dir'"},
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        "existing/build/dir/subdir",
+				},
+				BazelModuleBase: bazelableBazelModuleBase,
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				defaultConfig: allowlists.Bp2BuildConfig{
+					"existing/build/dir": allowlists.Bp2BuildDefaultTrueRecursively,
+				},
+			},
+		},
+		{
+			description:   "module enabled in unit test short-circuits other allowlists",
+			shouldConvert: true,
+			module: TestBazelModule{
+				TestModuleInfo: bazel.TestModuleInfo{
+					ModuleName: "foo",
+					Typ:        "rule1",
+					Dir:        ".",
+				},
+				BazelModuleBase: BazelModuleBase{
+					bazelProperties: properties{
+						Bazel_module: bazelModuleProperties{
+							CanConvertToBazel:  true,
+							Bp2build_available: proptools.BoolPtr(true),
+						},
+					},
+				},
+			},
+			allowlist: bp2BuildConversionAllowlist{
+				moduleAlwaysConvert: map[string]bool{
+					"foo": true,
+				},
+				moduleDoNotConvert: map[string]bool{
+					"foo": true,
+				},
+			},
+		},
+	}
+
+	for _, test := range testCases {
+		t.Run(test.description, func(t *testing.T) {
+			bcc := &TestBazelConversionContext{
+				omc: bazel.OtherModuleTestContext{
+					Modules: []bazel.TestModuleInfo{
+						test.module.TestModuleInfo,
+					},
+				},
+				allowlist: test.allowlist,
+			}
+
+			shouldConvert := test.module.shouldConvertWithBp2build(bcc, test.module.TestModuleInfo)
+			if test.shouldConvert != shouldConvert {
+				t.Errorf("Module shouldConvert expected to be: %v, but was: %v", test.shouldConvert, shouldConvert)
+			}
+
+			errorsMatch := true
+			if len(test.expectedErrors) != len(bcc.errors) {
+				errorsMatch = false
+			} else {
+				for i, err := range test.expectedErrors {
+					if err != bcc.errors[i] {
+						errorsMatch = false
+					}
+				}
+			}
+			if !errorsMatch {
+				t.Errorf("Expected errors to be: %v, but were: %v", test.expectedErrors, bcc.errors)
+			}
+		})
+	}
+}
diff --git a/android/config.go b/android/config.go
index 57eff92..9f1fd6a 100644
--- a/android/config.go
+++ b/android/config.go
@@ -158,7 +158,7 @@
 	mockBpList string
 
 	runningAsBp2Build              bool
-	bp2buildPackageConfig          Bp2BuildConfig
+	bp2buildPackageConfig          bp2BuildConversionAllowlist
 	Bp2buildSoongConfigDefinitions soongconfig.Bp2BuildSoongConfigDefinitions
 
 	// If testAllowNonExistentPaths is true then PathForSource and PathForModuleSrc won't error
@@ -350,18 +350,20 @@
 
 	config := &config{
 		productVariables: productVariables{
-			DeviceName:                        stringPtr("test_device"),
-			Platform_sdk_version:              intPtr(30),
-			Platform_sdk_codename:             stringPtr("S"),
-			Platform_version_active_codenames: []string{"S", "Tiramisu"},
-			DeviceSystemSdkVersions:           []string{"14", "15"},
-			Platform_systemsdk_versions:       []string{"29", "30"},
-			AAPTConfig:                        []string{"normal", "large", "xlarge", "hdpi", "xhdpi", "xxhdpi"},
-			AAPTPreferredConfig:               stringPtr("xhdpi"),
-			AAPTCharacteristics:               stringPtr("nosdcard"),
-			AAPTPrebuiltDPI:                   []string{"xhdpi", "xxhdpi"},
-			UncompressPrivAppDex:              boolPtr(true),
-			ShippingApiLevel:                  stringPtr("30"),
+			DeviceName:                          stringPtr("test_device"),
+			DeviceProduct:                       stringPtr("test_product"),
+			Platform_sdk_version:                intPtr(30),
+			Platform_sdk_codename:               stringPtr("S"),
+			Platform_base_sdk_extension_version: intPtr(1),
+			Platform_version_active_codenames:   []string{"S", "Tiramisu"},
+			DeviceSystemSdkVersions:             []string{"14", "15"},
+			Platform_systemsdk_versions:         []string{"29", "30"},
+			AAPTConfig:                          []string{"normal", "large", "xlarge", "hdpi", "xhdpi", "xxhdpi"},
+			AAPTPreferredConfig:                 stringPtr("xhdpi"),
+			AAPTCharacteristics:                 stringPtr("nosdcard"),
+			AAPTPrebuiltDPI:                     []string{"xhdpi", "xxhdpi"},
+			UncompressPrivAppDex:                boolPtr(true),
+			ShippingApiLevel:                    stringPtr("30"),
 		},
 
 		outDir:       buildDir,
@@ -519,7 +521,7 @@
 	}
 
 	if archConfig != nil {
-		androidTargets, err := decodeArchSettings(Android, archConfig)
+		androidTargets, err := decodeAndroidArchSettings(archConfig)
 		if err != nil {
 			return Config{}, err
 		}
@@ -548,7 +550,7 @@
 	}
 
 	config.BazelContext, err = NewBazelContext(config)
-	config.bp2buildPackageConfig = bp2buildDefaultConfig
+	config.bp2buildPackageConfig = bp2buildAllowlist
 
 	return Config{config}, err
 }
@@ -722,6 +724,15 @@
 	return *c.productVariables.DeviceName
 }
 
+// DeviceProduct returns the current product target. There could be multiple of
+// these per device type.
+//
+// NOTE: Do not base conditional logic on this value. It may break product
+//       inheritance.
+func (c *config) DeviceProduct() string {
+	return *c.productVariables.DeviceProduct
+}
+
 func (c *config) DeviceResourceOverlays() []string {
 	return c.productVariables.DeviceResourceOverlays
 }
@@ -742,6 +753,14 @@
 	return String(c.productVariables.Platform_sdk_codename)
 }
 
+func (c *config) PlatformSdkExtensionVersion() int {
+	return *c.productVariables.Platform_sdk_extension_version
+}
+
+func (c *config) PlatformBaseSdkExtensionVersion() int {
+	return *c.productVariables.Platform_base_sdk_extension_version
+}
+
 func (c *config) PlatformSecurityPatch() string {
 	return String(c.productVariables.Platform_security_patch)
 }
@@ -759,7 +778,7 @@
 }
 
 func (c *config) MinSupportedSdkVersion() ApiLevel {
-	return uncheckedFinalApiLevel(16)
+	return uncheckedFinalApiLevel(19)
 }
 
 func (c *config) FinalApiLevels() []ApiLevel {
@@ -1247,6 +1266,10 @@
 	return Bool(c.config.productVariables.ClangCoverage)
 }
 
+func (c *deviceConfig) ClangCoverageContinuousMode() bool {
+	return Bool(c.config.productVariables.ClangCoverageContinuousMode)
+}
+
 func (c *deviceConfig) GcovCoverageEnabled() bool {
 	return Bool(c.config.productVariables.GcovCoverage)
 }
@@ -1337,6 +1360,10 @@
 	return "", false
 }
 
+func (c *deviceConfig) ApexGlobalMinSdkVersionOverride() string {
+	return String(c.config.productVariables.ApexGlobalMinSdkVersionOverride)
+}
+
 func (c *config) IntegerOverflowDisabledForPath(path string) bool {
 	if len(c.productVariables.IntegerOverflowExcludePaths) == 0 {
 		return false
@@ -1642,6 +1669,10 @@
 	return c.config.productVariables.BuildBrokenVendorPropertyNamespace
 }
 
+func (c *deviceConfig) BuildBrokenInputDir(name string) bool {
+	return InList(name, c.config.productVariables.BuildBrokenInputDirModules)
+}
+
 func (c *deviceConfig) RequiresInsecureExecmemForSwiftshader() bool {
 	return c.config.productVariables.RequiresInsecureExecmemForSwiftshader
 }
diff --git a/android/config_bp2build.go b/android/config_bp2build.go
new file mode 100644
index 0000000..748be62
--- /dev/null
+++ b/android/config_bp2build.go
@@ -0,0 +1,487 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package android
+
+import (
+	"fmt"
+	"reflect"
+	"regexp"
+	"sort"
+	"strings"
+
+	"android/soong/bazel"
+	"android/soong/starlark_fmt"
+
+	"github.com/google/blueprint"
+)
+
+// BazelVarExporter is a collection of configuration variables that can be exported for use in Bazel rules
+type BazelVarExporter interface {
+	// asBazel expands strings of configuration variables into their concrete values
+	asBazel(Config, ExportedStringVariables, ExportedStringListVariables, ExportedConfigDependingVariables) []bazelConstant
+}
+
+// ExportedVariables is a collection of interdependent configuration variables
+type ExportedVariables struct {
+	// Maps containing toolchain variables that are independent of the
+	// environment variables of the build.
+	exportedStringVars         ExportedStringVariables
+	exportedStringListVars     ExportedStringListVariables
+	exportedStringListDictVars ExportedStringListDictVariables
+
+	exportedVariableReferenceDictVars ExportedVariableReferenceDictVariables
+
+	/// Maps containing variables that are dependent on the build config.
+	exportedConfigDependingVars ExportedConfigDependingVariables
+
+	pctx PackageContext
+}
+
+// NewExportedVariables creats an empty ExportedVariables struct with non-nil maps
+func NewExportedVariables(pctx PackageContext) ExportedVariables {
+	return ExportedVariables{
+		exportedStringVars:                ExportedStringVariables{},
+		exportedStringListVars:            ExportedStringListVariables{},
+		exportedStringListDictVars:        ExportedStringListDictVariables{},
+		exportedVariableReferenceDictVars: ExportedVariableReferenceDictVariables{},
+		exportedConfigDependingVars:       ExportedConfigDependingVariables{},
+		pctx:                              pctx,
+	}
+}
+
+func (ev ExportedVariables) asBazel(config Config,
+	stringVars ExportedStringVariables, stringListVars ExportedStringListVariables, cfgDepVars ExportedConfigDependingVariables) []bazelConstant {
+	ret := []bazelConstant{}
+	ret = append(ret, ev.exportedStringVars.asBazel(config, stringVars, stringListVars, cfgDepVars)...)
+	ret = append(ret, ev.exportedStringListVars.asBazel(config, stringVars, stringListVars, cfgDepVars)...)
+	ret = append(ret, ev.exportedStringListDictVars.asBazel(config, stringVars, stringListVars, cfgDepVars)...)
+	// Note: ExportedVariableReferenceDictVars collections can only contain references to other variables and must be printed last
+	ret = append(ret, ev.exportedVariableReferenceDictVars.asBazel(config, stringVars, stringListVars, cfgDepVars)...)
+	return ret
+}
+
+// ExportStringStaticVariable declares a static string variable and exports it to
+// Bazel's toolchain.
+func (ev ExportedVariables) ExportStringStaticVariable(name string, value string) {
+	ev.pctx.StaticVariable(name, value)
+	ev.exportedStringVars.set(name, value)
+}
+
+// ExportStringListStaticVariable declares a static variable and exports it to
+// Bazel's toolchain.
+func (ev ExportedVariables) ExportStringListStaticVariable(name string, value []string) {
+	ev.pctx.StaticVariable(name, strings.Join(value, " "))
+	ev.exportedStringListVars.set(name, value)
+}
+
+// ExportVariableConfigMethod declares a variable whose value is evaluated at
+// runtime via a function with access to the Config and exports it to Bazel's
+// toolchain.
+func (ev ExportedVariables) ExportVariableConfigMethod(name string, method interface{}) blueprint.Variable {
+	ev.exportedConfigDependingVars.set(name, method)
+	return ev.pctx.VariableConfigMethod(name, method)
+}
+
+// ExportSourcePathVariable declares a static "source path" variable and exports
+// it to Bazel's toolchain.
+func (ev ExportedVariables) ExportSourcePathVariable(name string, value string) {
+	ev.pctx.SourcePathVariable(name, value)
+	ev.exportedStringVars.set(name, value)
+}
+
+// ExportVariableFuncVariable declares a variable whose value is evaluated at
+// runtime via a function and exports it to Bazel's toolchain.
+func (ev ExportedVariables) ExportVariableFuncVariable(name string, f func() string) {
+	ev.exportedConfigDependingVars.set(name, func(config Config) string {
+		return f()
+	})
+	ev.pctx.VariableFunc(name, func(PackageVarContext) string {
+		return f()
+	})
+}
+
+// ExportString only exports a variable to Bazel, but does not declare it in Soong
+func (ev ExportedVariables) ExportString(name string, value string) {
+	ev.exportedStringVars.set(name, value)
+}
+
+// ExportStringList only exports a variable to Bazel, but does not declare it in Soong
+func (ev ExportedVariables) ExportStringList(name string, value []string) {
+	ev.exportedStringListVars.set(name, value)
+}
+
+// ExportStringListDict only exports a variable to Bazel, but does not declare it in Soong
+func (ev ExportedVariables) ExportStringListDict(name string, value map[string][]string) {
+	ev.exportedStringListDictVars.set(name, value)
+}
+
+// ExportVariableReferenceDict only exports a variable to Bazel, but does not declare it in Soong
+func (ev ExportedVariables) ExportVariableReferenceDict(name string, value map[string]string) {
+	ev.exportedVariableReferenceDictVars.set(name, value)
+}
+
+// ExportedConfigDependingVariables is a mapping of variable names to functions
+// of type func(config Config) string which return the runtime-evaluated string
+// value of a particular variable
+type ExportedConfigDependingVariables map[string]interface{}
+
+func (m ExportedConfigDependingVariables) set(k string, v interface{}) {
+	m[k] = v
+}
+
+// Ensure that string s has no invalid characters to be generated into the bzl file.
+func validateCharacters(s string) string {
+	for _, c := range []string{`\n`, `"`, `\`} {
+		if strings.Contains(s, c) {
+			panic(fmt.Errorf("%s contains illegal character %s", s, c))
+		}
+	}
+	return s
+}
+
+type bazelConstant struct {
+	variableName       string
+	internalDefinition string
+	sortLast           bool
+}
+
+// ExportedStringVariables is a mapping of variable names to string values
+type ExportedStringVariables map[string]string
+
+func (m ExportedStringVariables) set(k string, v string) {
+	m[k] = v
+}
+
+func (m ExportedStringVariables) asBazel(config Config,
+	stringVars ExportedStringVariables, stringListVars ExportedStringListVariables, cfgDepVars ExportedConfigDependingVariables) []bazelConstant {
+	ret := make([]bazelConstant, 0, len(m))
+	for k, variableValue := range m {
+		expandedVar, err := expandVar(config, variableValue, stringVars, stringListVars, cfgDepVars)
+		if err != nil {
+			panic(fmt.Errorf("error expanding config variable %s: %s", k, err))
+		}
+		if len(expandedVar) > 1 {
+			panic(fmt.Errorf("%s expands to more than one string value: %s", variableValue, expandedVar))
+		}
+		ret = append(ret, bazelConstant{
+			variableName:       k,
+			internalDefinition: fmt.Sprintf(`"%s"`, validateCharacters(expandedVar[0])),
+		})
+	}
+	return ret
+}
+
+// ExportedStringListVariables is a mapping of variable names to a list of strings
+type ExportedStringListVariables map[string][]string
+
+func (m ExportedStringListVariables) set(k string, v []string) {
+	m[k] = v
+}
+
+func (m ExportedStringListVariables) asBazel(config Config,
+	stringScope ExportedStringVariables, stringListScope ExportedStringListVariables,
+	exportedVars ExportedConfigDependingVariables) []bazelConstant {
+	ret := make([]bazelConstant, 0, len(m))
+	// For each exported variable, recursively expand elements in the variableValue
+	// list to ensure that interpolated variables are expanded according to their values
+	// in the variable scope.
+	for k, variableValue := range m {
+		var expandedVars []string
+		for _, v := range variableValue {
+			expandedVar, err := expandVar(config, v, stringScope, stringListScope, exportedVars)
+			if err != nil {
+				panic(fmt.Errorf("Error expanding config variable %s=%s: %s", k, v, err))
+			}
+			expandedVars = append(expandedVars, expandedVar...)
+		}
+		// Assign the list as a bzl-private variable; this variable will be exported
+		// out through a constants struct later.
+		ret = append(ret, bazelConstant{
+			variableName:       k,
+			internalDefinition: starlark_fmt.PrintStringList(expandedVars, 0),
+		})
+	}
+	return ret
+}
+
+// ExportedStringListDictVariables is a mapping from variable names to a
+// dictionary which maps keys to lists of strings
+type ExportedStringListDictVariables map[string]map[string][]string
+
+func (m ExportedStringListDictVariables) set(k string, v map[string][]string) {
+	m[k] = v
+}
+
+// Since dictionaries are not supported in Ninja, we do not expand variables for dictionaries
+func (m ExportedStringListDictVariables) asBazel(_ Config, _ ExportedStringVariables,
+	_ ExportedStringListVariables, _ ExportedConfigDependingVariables) []bazelConstant {
+	ret := make([]bazelConstant, 0, len(m))
+	for k, dict := range m {
+		ret = append(ret, bazelConstant{
+			variableName:       k,
+			internalDefinition: starlark_fmt.PrintStringListDict(dict, 0),
+		})
+	}
+	return ret
+}
+
+// ExportedVariableReferenceDictVariables is a mapping from variable names to a
+// dictionary which references previously defined variables. This is used to
+// create a Starlark output such as:
+// 		string_var1 = "string1
+// 		var_ref_dict_var1 = {
+// 			"key1": string_var1
+// 		}
+// This type of variable collection must be expanded last so that it recognizes
+// previously defined variables.
+type ExportedVariableReferenceDictVariables map[string]map[string]string
+
+func (m ExportedVariableReferenceDictVariables) set(k string, v map[string]string) {
+	m[k] = v
+}
+
+func (m ExportedVariableReferenceDictVariables) asBazel(_ Config, _ ExportedStringVariables,
+	_ ExportedStringListVariables, _ ExportedConfigDependingVariables) []bazelConstant {
+	ret := make([]bazelConstant, 0, len(m))
+	for n, dict := range m {
+		for k, v := range dict {
+			matches, err := variableReference(v)
+			if err != nil {
+				panic(err)
+			} else if !matches.matches {
+				panic(fmt.Errorf("Expected a variable reference, got %q", v))
+			} else if len(matches.fullVariableReference) != len(v) {
+				panic(fmt.Errorf("Expected only a variable reference, got %q", v))
+			}
+			dict[k] = "_" + matches.variable
+		}
+		ret = append(ret, bazelConstant{
+			variableName:       n,
+			internalDefinition: starlark_fmt.PrintDict(dict, 0),
+			sortLast:           true,
+		})
+	}
+	return ret
+}
+
+// BazelToolchainVars expands an ExportedVariables collection and returns a string
+// of formatted Starlark variable definitions
+func BazelToolchainVars(config Config, exportedVars ExportedVariables) string {
+	results := exportedVars.asBazel(
+		config,
+		exportedVars.exportedStringVars,
+		exportedVars.exportedStringListVars,
+		exportedVars.exportedConfigDependingVars,
+	)
+
+	sort.Slice(results, func(i, j int) bool {
+		if results[i].sortLast != results[j].sortLast {
+			return !results[i].sortLast
+		}
+		return results[i].variableName < results[j].variableName
+	})
+
+	definitions := make([]string, 0, len(results))
+	constants := make([]string, 0, len(results))
+	for _, b := range results {
+		definitions = append(definitions,
+			fmt.Sprintf("_%s = %s", b.variableName, b.internalDefinition))
+		constants = append(constants,
+			fmt.Sprintf("%[1]s%[2]s = _%[2]s,", starlark_fmt.Indention(1), b.variableName))
+	}
+
+	// Build the exported constants struct.
+	ret := bazel.GeneratedBazelFileWarning
+	ret += "\n\n"
+	ret += strings.Join(definitions, "\n\n")
+	ret += "\n\n"
+	ret += "constants = struct(\n"
+	ret += strings.Join(constants, "\n")
+	ret += "\n)"
+
+	return ret
+}
+
+type match struct {
+	matches               bool
+	fullVariableReference string
+	variable              string
+}
+
+func variableReference(input string) (match, error) {
+	// e.g. "${ExternalCflags}"
+	r := regexp.MustCompile(`\${(?:config\.)?([a-zA-Z0-9_]+)}`)
+
+	matches := r.FindStringSubmatch(input)
+	if len(matches) == 0 {
+		return match{}, nil
+	}
+	if len(matches) != 2 {
+		return match{}, fmt.Errorf("Expected to only match 1 subexpression in %s, got %d", input, len(matches)-1)
+	}
+	return match{
+		matches:               true,
+		fullVariableReference: matches[0],
+		// Index 1 of FindStringSubmatch contains the subexpression match
+		// (variable name) of the capture group.
+		variable: matches[1],
+	}, nil
+}
+
+// expandVar recursively expand interpolated variables in the exportedVars scope.
+//
+// We're using a string slice to track the seen variables to avoid
+// stackoverflow errors with infinite recursion. it's simpler to use a
+// string slice than to handle a pass-by-referenced map, which would make it
+// quite complex to track depth-first interpolations. It's also unlikely the
+// interpolation stacks are deep (n > 1).
+func expandVar(config Config, toExpand string, stringScope ExportedStringVariables,
+	stringListScope ExportedStringListVariables, exportedVars ExportedConfigDependingVariables) ([]string, error) {
+
+	// Internal recursive function.
+	var expandVarInternal func(string, map[string]bool) (string, error)
+	expandVarInternal = func(toExpand string, seenVars map[string]bool) (string, error) {
+		var ret string
+		remainingString := toExpand
+		for len(remainingString) > 0 {
+			matches, err := variableReference(remainingString)
+			if err != nil {
+				panic(err)
+			}
+			if !matches.matches {
+				return ret + remainingString, nil
+			}
+			matchIndex := strings.Index(remainingString, matches.fullVariableReference)
+			ret += remainingString[:matchIndex]
+			remainingString = remainingString[matchIndex+len(matches.fullVariableReference):]
+
+			variable := matches.variable
+			// toExpand contains a variable.
+			if _, ok := seenVars[variable]; ok {
+				return ret, fmt.Errorf(
+					"Unbounded recursive interpolation of variable: %s", variable)
+			}
+			// A map is passed-by-reference. Create a new map for
+			// this scope to prevent variables seen in one depth-first expansion
+			// to be also treated as "seen" in other depth-first traversals.
+			newSeenVars := map[string]bool{}
+			for k := range seenVars {
+				newSeenVars[k] = true
+			}
+			newSeenVars[variable] = true
+			if unexpandedVars, ok := stringListScope[variable]; ok {
+				expandedVars := []string{}
+				for _, unexpandedVar := range unexpandedVars {
+					expandedVar, err := expandVarInternal(unexpandedVar, newSeenVars)
+					if err != nil {
+						return ret, err
+					}
+					expandedVars = append(expandedVars, expandedVar)
+				}
+				ret += strings.Join(expandedVars, " ")
+			} else if unexpandedVar, ok := stringScope[variable]; ok {
+				expandedVar, err := expandVarInternal(unexpandedVar, newSeenVars)
+				if err != nil {
+					return ret, err
+				}
+				ret += expandedVar
+			} else if unevaluatedVar, ok := exportedVars[variable]; ok {
+				evalFunc := reflect.ValueOf(unevaluatedVar)
+				validateVariableMethod(variable, evalFunc)
+				evaluatedResult := evalFunc.Call([]reflect.Value{reflect.ValueOf(config)})
+				evaluatedValue := evaluatedResult[0].Interface().(string)
+				expandedVar, err := expandVarInternal(evaluatedValue, newSeenVars)
+				if err != nil {
+					return ret, err
+				}
+				ret += expandedVar
+			} else {
+				return "", fmt.Errorf("Unbound config variable %s", variable)
+			}
+		}
+		return ret, nil
+	}
+	var ret []string
+	stringFields := splitStringKeepingQuotedSubstring(toExpand, ' ')
+	for _, v := range stringFields {
+		val, err := expandVarInternal(v, map[string]bool{})
+		if err != nil {
+			return ret, err
+		}
+		ret = append(ret, val)
+	}
+
+	return ret, nil
+}
+
+// splitStringKeepingQuotedSubstring splits a string on a provided separator,
+// but it will not split substrings inside unescaped double quotes. If the double
+// quotes are escaped, then the returned string will only include the quote, and
+// not the escape.
+func splitStringKeepingQuotedSubstring(s string, delimiter byte) []string {
+	var ret []string
+	quote := byte('"')
+
+	var substring []byte
+	quoted := false
+	escaped := false
+
+	for i := range s {
+		if !quoted && s[i] == delimiter {
+			ret = append(ret, string(substring))
+			substring = []byte{}
+			continue
+		}
+
+		characterIsEscape := i < len(s)-1 && s[i] == '\\' && s[i+1] == quote
+		if characterIsEscape {
+			escaped = true
+			continue
+		}
+
+		if s[i] == quote {
+			if !escaped {
+				quoted = !quoted
+			}
+			escaped = false
+		}
+
+		substring = append(substring, s[i])
+	}
+
+	ret = append(ret, string(substring))
+
+	return ret
+}
+
+func validateVariableMethod(name string, methodValue reflect.Value) {
+	methodType := methodValue.Type()
+	if methodType.Kind() != reflect.Func {
+		panic(fmt.Errorf("method given for variable %s is not a function",
+			name))
+	}
+	if n := methodType.NumIn(); n != 1 {
+		panic(fmt.Errorf("method for variable %s has %d inputs (should be 1)",
+			name, n))
+	}
+	if n := methodType.NumOut(); n != 1 {
+		panic(fmt.Errorf("method for variable %s has %d outputs (should be 1)",
+			name, n))
+	}
+	if kind := methodType.Out(0).Kind(); kind != reflect.String {
+		panic(fmt.Errorf("method for variable %s does not return a string",
+			name))
+	}
+}
diff --git a/android/config_bp2build_test.go b/android/config_bp2build_test.go
new file mode 100644
index 0000000..1a0ba7b
--- /dev/null
+++ b/android/config_bp2build_test.go
@@ -0,0 +1,454 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package android
+
+import (
+	"android/soong/bazel"
+	"testing"
+)
+
+func TestExpandVars(t *testing.T) {
+	android_arm64_config := TestConfig("out", nil, "", nil)
+	android_arm64_config.BuildOS = Android
+	android_arm64_config.BuildArch = Arm64
+
+	testCases := []struct {
+		description     string
+		config          Config
+		stringScope     ExportedStringVariables
+		stringListScope ExportedStringListVariables
+		configVars      ExportedConfigDependingVariables
+		toExpand        string
+		expectedValues  []string
+	}{
+		{
+			description:    "no expansion for non-interpolated value",
+			toExpand:       "foo",
+			expectedValues: []string{"foo"},
+		},
+		{
+			description: "single level expansion for string var",
+			stringScope: ExportedStringVariables{
+				"foo": "bar",
+			},
+			toExpand:       "${foo}",
+			expectedValues: []string{"bar"},
+		},
+		{
+			description: "single level expansion with short-name for string var",
+			stringScope: ExportedStringVariables{
+				"foo": "bar",
+			},
+			toExpand:       "${config.foo}",
+			expectedValues: []string{"bar"},
+		},
+		{
+			description: "single level expansion string list var",
+			stringListScope: ExportedStringListVariables{
+				"foo": []string{"bar"},
+			},
+			toExpand:       "${foo}",
+			expectedValues: []string{"bar"},
+		},
+		{
+			description: "mixed level expansion for string list var",
+			stringScope: ExportedStringVariables{
+				"foo": "${bar}",
+				"qux": "hello",
+			},
+			stringListScope: ExportedStringListVariables{
+				"bar": []string{"baz", "${qux}"},
+			},
+			toExpand:       "${foo}",
+			expectedValues: []string{"baz hello"},
+		},
+		{
+			description: "double level expansion",
+			stringListScope: ExportedStringListVariables{
+				"foo": []string{"${bar}"},
+				"bar": []string{"baz"},
+			},
+			toExpand:       "${foo}",
+			expectedValues: []string{"baz"},
+		},
+		{
+			description: "double level expansion with a literal",
+			stringListScope: ExportedStringListVariables{
+				"a": []string{"${b}", "c"},
+				"b": []string{"d"},
+			},
+			toExpand:       "${a}",
+			expectedValues: []string{"d c"},
+		},
+		{
+			description: "double level expansion, with two variables in a string",
+			stringListScope: ExportedStringListVariables{
+				"a": []string{"${b} ${c}"},
+				"b": []string{"d"},
+				"c": []string{"e"},
+			},
+			toExpand:       "${a}",
+			expectedValues: []string{"d e"},
+		},
+		{
+			description: "triple level expansion with two variables in a string",
+			stringListScope: ExportedStringListVariables{
+				"a": []string{"${b} ${c}"},
+				"b": []string{"${c}", "${d}"},
+				"c": []string{"${d}"},
+				"d": []string{"foo"},
+			},
+			toExpand:       "${a}",
+			expectedValues: []string{"foo foo foo"},
+		},
+		{
+			description: "expansion with config depending vars",
+			configVars: ExportedConfigDependingVariables{
+				"a": func(c Config) string { return c.BuildOS.String() },
+				"b": func(c Config) string { return c.BuildArch.String() },
+			},
+			config:         android_arm64_config,
+			toExpand:       "${a}-${b}",
+			expectedValues: []string{"android-arm64"},
+		},
+		{
+			description: "double level multi type expansion",
+			stringListScope: ExportedStringListVariables{
+				"platform": []string{"${os}-${arch}"},
+				"const":    []string{"const"},
+			},
+			configVars: ExportedConfigDependingVariables{
+				"os":   func(c Config) string { return c.BuildOS.String() },
+				"arch": func(c Config) string { return c.BuildArch.String() },
+				"foo":  func(c Config) string { return "foo" },
+			},
+			config:         android_arm64_config,
+			toExpand:       "${const}/${platform}/${foo}",
+			expectedValues: []string{"const/android-arm64/foo"},
+		},
+	}
+
+	for _, testCase := range testCases {
+		t.Run(testCase.description, func(t *testing.T) {
+			output, _ := expandVar(testCase.config, testCase.toExpand, testCase.stringScope, testCase.stringListScope, testCase.configVars)
+			if len(output) != len(testCase.expectedValues) {
+				t.Errorf("Expected %d values, got %d", len(testCase.expectedValues), len(output))
+			}
+			for i, actual := range output {
+				expectedValue := testCase.expectedValues[i]
+				if actual != expectedValue {
+					t.Errorf("Actual value '%s' doesn't match expected value '%s'", actual, expectedValue)
+				}
+			}
+		})
+	}
+}
+
+func TestBazelToolchainVars(t *testing.T) {
+	testCases := []struct {
+		name        string
+		config      Config
+		vars        ExportedVariables
+		expectedOut string
+	}{
+		{
+			name: "exports strings",
+			vars: ExportedVariables{
+				exportedStringVars: ExportedStringVariables{
+					"a": "b",
+					"c": "d",
+				},
+			},
+			expectedOut: bazel.GeneratedBazelFileWarning + `
+
+_a = "b"
+
+_c = "d"
+
+constants = struct(
+    a = _a,
+    c = _c,
+)`,
+		},
+		{
+			name: "exports string lists",
+			vars: ExportedVariables{
+				exportedStringListVars: ExportedStringListVariables{
+					"a": []string{"b1", "b2"},
+					"c": []string{"d1", "d2"},
+				},
+			},
+			expectedOut: bazel.GeneratedBazelFileWarning + `
+
+_a = [
+    "b1",
+    "b2",
+]
+
+_c = [
+    "d1",
+    "d2",
+]
+
+constants = struct(
+    a = _a,
+    c = _c,
+)`,
+		},
+		{
+			name: "exports string lists dicts",
+			vars: ExportedVariables{
+				exportedStringListDictVars: ExportedStringListDictVariables{
+					"a": map[string][]string{"b1": {"b2"}},
+					"c": map[string][]string{"d1": {"d2"}},
+				},
+			},
+			expectedOut: bazel.GeneratedBazelFileWarning + `
+
+_a = {
+    "b1": ["b2"],
+}
+
+_c = {
+    "d1": ["d2"],
+}
+
+constants = struct(
+    a = _a,
+    c = _c,
+)`,
+		},
+		{
+			name: "exports dict with var refs",
+			vars: ExportedVariables{
+				exportedVariableReferenceDictVars: ExportedVariableReferenceDictVariables{
+					"a": map[string]string{"b1": "${b2}"},
+					"c": map[string]string{"d1": "${config.d2}"},
+				},
+			},
+			expectedOut: bazel.GeneratedBazelFileWarning + `
+
+_a = {
+    "b1": _b2,
+}
+
+_c = {
+    "d1": _d2,
+}
+
+constants = struct(
+    a = _a,
+    c = _c,
+)`,
+		},
+		{
+			name: "sorts across types with variable references last",
+			vars: ExportedVariables{
+				exportedStringVars: ExportedStringVariables{
+					"b": "b-val",
+					"d": "d-val",
+				},
+				exportedStringListVars: ExportedStringListVariables{
+					"c": []string{"c-val"},
+					"e": []string{"e-val"},
+				},
+				exportedStringListDictVars: ExportedStringListDictVariables{
+					"a": map[string][]string{"a1": {"a2"}},
+					"f": map[string][]string{"f1": {"f2"}},
+				},
+				exportedVariableReferenceDictVars: ExportedVariableReferenceDictVariables{
+					"aa": map[string]string{"b1": "${b}"},
+					"cc": map[string]string{"d1": "${config.d}"},
+				},
+			},
+			expectedOut: bazel.GeneratedBazelFileWarning + `
+
+_a = {
+    "a1": ["a2"],
+}
+
+_b = "b-val"
+
+_c = ["c-val"]
+
+_d = "d-val"
+
+_e = ["e-val"]
+
+_f = {
+    "f1": ["f2"],
+}
+
+_aa = {
+    "b1": _b,
+}
+
+_cc = {
+    "d1": _d,
+}
+
+constants = struct(
+    a = _a,
+    b = _b,
+    c = _c,
+    d = _d,
+    e = _e,
+    f = _f,
+    aa = _aa,
+    cc = _cc,
+)`,
+		},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.name, func(t *testing.T) {
+			out := BazelToolchainVars(tc.config, tc.vars)
+			if out != tc.expectedOut {
+				t.Errorf("Expected \n%s, got \n%s", tc.expectedOut, out)
+			}
+		})
+	}
+}
+
+func TestSplitStringKeepingQuotedSubstring(t *testing.T) {
+	testCases := []struct {
+		description string
+		s           string
+		delimiter   byte
+		split       []string
+	}{
+		{
+			description: "empty string returns single empty string",
+			s:           "",
+			delimiter:   ' ',
+			split: []string{
+				"",
+			},
+		},
+		{
+			description: "string with single space returns two empty strings",
+			s:           " ",
+			delimiter:   ' ',
+			split: []string{
+				"",
+				"",
+			},
+		},
+		{
+			description: "string with two spaces returns three empty strings",
+			s:           "  ",
+			delimiter:   ' ',
+			split: []string{
+				"",
+				"",
+				"",
+			},
+		},
+		{
+			description: "string with four words returns four word string",
+			s:           "hello world with words",
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				"world",
+				"with",
+				"words",
+			},
+		},
+		{
+			description: "string with words and nested quote returns word strings and quote string",
+			s:           `hello "world with" words`,
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				`"world with"`,
+				"words",
+			},
+		},
+		{
+			description: "string with escaped quote inside real quotes",
+			s:           `hello \"world "with\" words"`,
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				`"world`,
+				`"with" words"`,
+			},
+		},
+		{
+			description: "string with words and escaped quotes returns word strings",
+			s:           `hello \"world with\" words`,
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				`"world`,
+				`with"`,
+				"words",
+			},
+		},
+		{
+			description: "string which is single quoted substring returns only substring",
+			s:           `"hello world with words"`,
+			delimiter:   ' ',
+			split: []string{
+				`"hello world with words"`,
+			},
+		},
+		{
+			description: "string starting with quote returns quoted string",
+			s:           `"hello world with" words`,
+			delimiter:   ' ',
+			split: []string{
+				`"hello world with"`,
+				"words",
+			},
+		},
+		{
+			description: "string with starting quote and no ending quote returns quote to end of string",
+			s:           `hello "world with words`,
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				`"world with words`,
+			},
+		},
+		{
+			description: "quoted string is treated as a single \"word\" unless separated by delimiter",
+			s:           `hello "world"with words`,
+			delimiter:   ' ',
+			split: []string{
+				"hello",
+				`"world"with`,
+				"words",
+			},
+		},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.description, func(t *testing.T) {
+			split := splitStringKeepingQuotedSubstring(tc.s, tc.delimiter)
+			if len(split) != len(tc.split) {
+				t.Fatalf("number of split string elements (%d) differs from expected (%d): split string (%v), expected (%v)",
+					len(split), len(tc.split), split, tc.split,
+				)
+			}
+			for i := range split {
+				if split[i] != tc.split[i] {
+					t.Errorf("split string element (%d), %v, differs from expected, %v", i, split[i], tc.split[i])
+				}
+			}
+		})
+	}
+}
diff --git a/android/filegroup.go b/android/filegroup.go
index c932ffa..50356d1 100644
--- a/android/filegroup.go
+++ b/android/filegroup.go
@@ -18,6 +18,8 @@
 	"strings"
 
 	"android/soong/bazel"
+
+	"github.com/google/blueprint"
 )
 
 func init() {
@@ -28,6 +30,11 @@
 	ctx.RegisterModuleType("filegroup", FileGroupFactory)
 })
 
+// IsFilegroup checks that a module is a filegroup type
+func IsFilegroup(ctx bazel.OtherModuleContext, m blueprint.Module) bool {
+	return ctx.OtherModuleType(m) == "filegroup"
+}
+
 // https://docs.bazel.build/versions/master/be/general.html#filegroup
 type bazelFilegroupAttributes struct {
 	Srcs bazel.LabelListAttribute
@@ -112,12 +119,12 @@
 		return
 	}
 
-	archVariant := ctx.Arch().ArchType
+	archVariant := ctx.Arch().String()
 	osVariant := ctx.Os()
 	if len(fg.Srcs()) == 1 && fg.Srcs()[0].Base() == fg.Name() {
 		// This will be a regular file target, not filegroup, in Bazel.
 		// See FilegroupBp2Build for more information.
-		archVariant = Common
+		archVariant = Common.String()
 		osVariant = CommonOS
 	}
 
diff --git a/android/hooks.go b/android/hooks.go
index bded764..5e3a4a7 100644
--- a/android/hooks.go
+++ b/android/hooks.go
@@ -15,7 +15,10 @@
 package android
 
 import (
+	"fmt"
+	"path"
 	"reflect"
+	"runtime"
 
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
@@ -88,7 +91,19 @@
 
 func (l *loadHookContext) CreateModule(factory ModuleFactory, props ...interface{}) Module {
 	inherited := []interface{}{&l.Module().base().commonProperties}
-	module := l.bp.CreateModule(ModuleFactoryAdaptor(factory), append(inherited, props...)...).(Module)
+
+	var typeName string
+	if typeNameLookup, ok := ModuleTypeByFactory()[reflect.ValueOf(factory)]; ok {
+		typeName = typeNameLookup
+	} else {
+		factoryPtr := reflect.ValueOf(factory).Pointer()
+		factoryFunc := runtime.FuncForPC(factoryPtr)
+		filePath, _ := factoryFunc.FileLine(factoryPtr)
+		typeName = fmt.Sprintf("%s_%s", path.Base(filePath), factoryFunc.Name())
+	}
+	typeName = typeName + "_loadHookModule"
+
+	module := l.bp.CreateModule(ModuleFactoryAdaptor(factory), typeName, append(inherited, props...)...).(Module)
 
 	if l.Module().base().variableProperties != nil && module.base().variableProperties != nil {
 		src := l.Module().base().variableProperties
diff --git a/android/licenses.go b/android/licenses.go
index b51a06b..bd14b26 100644
--- a/android/licenses.go
+++ b/android/licenses.go
@@ -333,4 +333,8 @@
 	ctx.Strict("HTMLNOTICE", ctx.Config().HostToolPath(ctx, "htmlnotice").String())
 	ctx.Strict("XMLNOTICE", ctx.Config().HostToolPath(ctx, "xmlnotice").String())
 	ctx.Strict("TEXTNOTICE", ctx.Config().HostToolPath(ctx, "textnotice").String())
+	ctx.Strict("COMPLIANCENOTICE_BOM", ctx.Config().HostToolPath(ctx, "compliancenotice_bom").String())
+	ctx.Strict("COMPLIANCENOTICE_SHIPPEDLIBS", ctx.Config().HostToolPath(ctx, "compliancenotice_shippedlibs").String())
+	ctx.Strict("COMPLIANCE_LISTSHARE", ctx.Config().HostToolPath(ctx, "compliance_listshare").String())
+	ctx.Strict("COMPLIANCE_CHECKSHARE", ctx.Config().HostToolPath(ctx, "compliance_checkshare").String())
 }
diff --git a/android/metrics.go b/android/metrics.go
index 2cd5efa..9038bde 100644
--- a/android/metrics.go
+++ b/android/metrics.go
@@ -18,6 +18,7 @@
 	"io/ioutil"
 	"runtime"
 
+	"github.com/google/blueprint/metrics"
 	"google.golang.org/protobuf/proto"
 
 	soong_metrics_proto "android/soong/ui/metrics/metrics_proto"
@@ -55,7 +56,7 @@
 	})
 }
 
-func collectMetrics(config Config) *soong_metrics_proto.SoongBuildMetrics {
+func collectMetrics(config Config, eventHandler metrics.EventHandler) *soong_metrics_proto.SoongBuildMetrics {
 	metrics := &soong_metrics_proto.SoongBuildMetrics{}
 
 	soongMetrics := ReadSoongMetrics(config)
@@ -68,11 +69,21 @@
 	metrics.TotalAllocCount = proto.Uint64(memStats.Mallocs)
 	metrics.TotalAllocSize = proto.Uint64(memStats.TotalAlloc)
 
+	for _, event := range eventHandler.CompletedEvents() {
+		perfInfo := soong_metrics_proto.PerfInfo{
+			Description: proto.String(event.Id),
+			Name:        proto.String("soong_build"),
+			StartTime:   proto.Uint64(uint64(event.Start.UnixNano())),
+			RealTime:    proto.Uint64(event.RuntimeNanoseconds()),
+		}
+		metrics.Events = append(metrics.Events, &perfInfo)
+	}
+
 	return metrics
 }
 
-func WriteMetrics(config Config, metricsFile string) error {
-	metrics := collectMetrics(config)
+func WriteMetrics(config Config, eventHandler metrics.EventHandler, metricsFile string) error {
+	metrics := collectMetrics(config, eventHandler)
 
 	buf, err := proto.Marshal(metrics)
 	if err != nil {
diff --git a/android/module.go b/android/module.go
index 03d3f80..ab68e24 100644
--- a/android/module.go
+++ b/android/module.go
@@ -456,6 +456,10 @@
 	// GetMissingDependencies returns the list of dependencies that were passed to AddDependencies or related methods,
 	// but do not exist.
 	GetMissingDependencies() []string
+
+	// LicenseMetadataFile returns the path where the license metadata for this module will be
+	// generated.
+	LicenseMetadataFile() Path
 }
 
 type Module interface {
@@ -609,6 +613,12 @@
 	// A suffix to add to the artifact file name (before any extension).
 	Suffix *string `android:"arch_variant"`
 
+	// If true, then the artifact file will be appended with _<product name>. For
+	// example, if the product is coral and the module is an android_app module
+	// of name foo, then the artifact would be foo_coral.apk. If false, there is
+	// no change to the artifact file name.
+	Append_artifact_with_product *bool `android:"arch_variant"`
+
 	// A string tag to select the OutputFiles associated with the tag.
 	//
 	// If no tag is specified then it will select the default dist paths provided
@@ -1460,8 +1470,10 @@
 }
 
 type propInfo struct {
-	Name string
-	Type string
+	Name   string
+	Type   string
+	Value  string
+	Values []string
 }
 
 func (m *ModuleBase) propertiesWithValues() []propInfo {
@@ -1501,18 +1513,60 @@
 				return
 			}
 			elKind := v.Type().Elem().Kind()
-			info = append(info, propInfo{name, elKind.String() + " " + kind.String()})
+			info = append(info, propInfo{Name: name, Type: elKind.String() + " " + kind.String(), Values: sliceReflectionValue(v)})
 		default:
-			info = append(info, propInfo{name, kind.String()})
+			info = append(info, propInfo{Name: name, Type: kind.String(), Value: reflectionValue(v)})
 		}
 	}
 
 	for _, p := range props {
 		propsWithValues("", reflect.ValueOf(p).Elem())
 	}
+	sort.Slice(info, func(i, j int) bool {
+		return info[i].Name < info[j].Name
+	})
 	return info
 }
 
+func reflectionValue(value reflect.Value) string {
+	switch value.Kind() {
+	case reflect.Bool:
+		return fmt.Sprintf("%t", value.Bool())
+	case reflect.Int64:
+		return fmt.Sprintf("%d", value.Int())
+	case reflect.String:
+		return fmt.Sprintf("%s", value.String())
+	case reflect.Struct:
+		if value.IsZero() {
+			return "{}"
+		}
+		length := value.NumField()
+		vals := make([]string, length, length)
+		for i := 0; i < length; i++ {
+			sTyp := value.Type().Field(i)
+			if proptools.ShouldSkipProperty(sTyp) {
+				continue
+			}
+			name := sTyp.Name
+			vals[i] = fmt.Sprintf("%s: %s", name, reflectionValue(value.Field(i)))
+		}
+		return fmt.Sprintf("%s{%s}", value.Type(), strings.Join(vals, ", "))
+	case reflect.Array, reflect.Slice:
+		vals := sliceReflectionValue(value)
+		return fmt.Sprintf("[%s]", strings.Join(vals, ", "))
+	}
+	return ""
+}
+
+func sliceReflectionValue(value reflect.Value) []string {
+	length := value.Len()
+	vals := make([]string, length, length)
+	for i := 0; i < length; i++ {
+		vals[i] = reflectionValue(value.Index(i))
+	}
+	return vals
+}
+
 func (m *ModuleBase) ComponentDepsMutator(BottomUpMutatorContext) {}
 
 func (m *ModuleBase) DepsMutator(BottomUpMutatorContext) {}
@@ -3109,6 +3163,7 @@
 		symlinkTarget:         "",
 		executable:            executable,
 		effectiveLicenseFiles: &licenseFiles,
+		partition:             fullInstallPath.partition,
 	}
 	m.packagingSpecs = append(m.packagingSpecs, spec)
 	return spec
@@ -3226,6 +3281,7 @@
 		srcPath:          nil,
 		symlinkTarget:    relPath,
 		executable:       false,
+		partition:        fullInstallPath.partition,
 	})
 
 	return fullInstallPath
@@ -3266,6 +3322,7 @@
 		srcPath:          nil,
 		symlinkTarget:    absPath,
 		executable:       false,
+		partition:        fullInstallPath.partition,
 	})
 
 	return fullInstallPath
@@ -3279,6 +3336,10 @@
 	return m.bp
 }
 
+func (m *moduleContext) LicenseMetadataFile() Path {
+	return m.module.base().licenseMetadataFile
+}
+
 // SrcIsModule decodes module references in the format ":unqualified-name" or "//namespace:name"
 // into the module name, or empty string if the input was not a module reference.
 func SrcIsModule(s string) (module string) {
@@ -3673,6 +3734,8 @@
 	Installed_paths   []string `json:"installed,omitempty"`
 	SrcJars           []string `json:"srcjars,omitempty"`
 	Paths             []string `json:"path,omitempty"`
+	Static_libs       []string `json:"static_libs,omitempty"`
+	Libs              []string `json:"libs,omitempty"`
 }
 
 func CheckBlueprintSyntax(ctx BaseModuleContext, filename string, contents string) []error {
diff --git a/android/module_test.go b/android/module_test.go
index a1bab6d..77ef146 100644
--- a/android/module_test.go
+++ b/android/module_test.go
@@ -15,12 +15,9 @@
 package android
 
 import (
-	"bytes"
 	"path/filepath"
 	"runtime"
 	"testing"
-
-	mkparser "android/soong/androidmk/parser"
 )
 
 func TestSrcIsModule(t *testing.T) {
@@ -475,21 +472,10 @@
 		prepareForModuleTests,
 		PrepareForTestWithArchMutator,
 		FixtureModifyConfig(SetKatiEnabledForTests),
-		FixtureRegisterWithContext(func(ctx RegistrationContext) {
-			ctx.RegisterSingletonType("makevars", makeVarsSingletonFunc)
-		}),
+		PrepareForTestWithMakevars,
 	).RunTestWithBp(t, bp)
 
-	installs := result.SingletonForTests("makevars").Singleton().(*makeVarsSingleton).installsForTesting
-	buf := bytes.NewBuffer(append([]byte(nil), installs...))
-	parser := mkparser.NewParser("makevars", buf)
-
-	nodes, errs := parser.Parse()
-	if len(errs) > 0 {
-		t.Fatalf("error parsing install rules: %s", errs[0])
-	}
-
-	rules := parseMkRules(t, result.Config, nodes)
+	rules := result.InstallMakeRulesForTesting(t)
 
 	module := func(name string, host bool) TestingModule {
 		variant := "android_common"
@@ -501,125 +487,88 @@
 
 	outputRule := func(name string) TestingBuildParams { return module(name, false).Output(name) }
 
-	ruleForOutput := func(output string) installMakeRule {
+	ruleForOutput := func(output string) InstallMakeRule {
 		for _, rule := range rules {
-			if rule.target == output {
+			if rule.Target == output {
 				return rule
 			}
 		}
 		t.Fatalf("no make install rule for %s", output)
-		return installMakeRule{}
+		return InstallMakeRule{}
 	}
 
-	installRule := func(name string) installMakeRule {
+	installRule := func(name string) InstallMakeRule {
 		return ruleForOutput(filepath.Join("out/target/product/test_device/system", name))
 	}
 
-	symlinkRule := func(name string) installMakeRule {
+	symlinkRule := func(name string) InstallMakeRule {
 		return ruleForOutput(filepath.Join("out/target/product/test_device/system/symlinks", name))
 	}
 
 	hostOutputRule := func(name string) TestingBuildParams { return module(name, true).Output(name) }
 
-	hostInstallRule := func(name string) installMakeRule {
+	hostInstallRule := func(name string) InstallMakeRule {
 		return ruleForOutput(filepath.Join("out/host/linux-x86", name))
 	}
 
-	hostSymlinkRule := func(name string) installMakeRule {
+	hostSymlinkRule := func(name string) InstallMakeRule {
 		return ruleForOutput(filepath.Join("out/host/linux-x86/symlinks", name))
 	}
 
-	assertDeps := func(rule installMakeRule, deps ...string) {
+	assertDeps := func(rule InstallMakeRule, deps ...string) {
 		t.Helper()
-		AssertArrayString(t, "expected inputs", deps, rule.deps)
+		AssertArrayString(t, "expected inputs", deps, rule.Deps)
 	}
 
-	assertOrderOnlys := func(rule installMakeRule, orderonlys ...string) {
+	assertOrderOnlys := func(rule InstallMakeRule, orderonlys ...string) {
 		t.Helper()
-		AssertArrayString(t, "expected orderonly dependencies", orderonlys, rule.orderOnlyDeps)
+		AssertArrayString(t, "expected orderonly dependencies", orderonlys, rule.OrderOnlyDeps)
 	}
 
 	// Check host install rule dependencies
 	assertDeps(hostInstallRule("foo"),
 		hostOutputRule("foo").Output.String(),
-		hostInstallRule("bar").target,
-		hostSymlinkRule("bar").target,
-		hostInstallRule("baz").target,
-		hostSymlinkRule("baz").target,
-		hostInstallRule("qux").target,
-		hostSymlinkRule("qux").target,
+		hostInstallRule("bar").Target,
+		hostSymlinkRule("bar").Target,
+		hostInstallRule("baz").Target,
+		hostSymlinkRule("baz").Target,
+		hostInstallRule("qux").Target,
+		hostSymlinkRule("qux").Target,
 	)
 	assertOrderOnlys(hostInstallRule("foo"))
 
 	// Check host symlink rule dependencies.  Host symlinks must use a normal dependency, not an
 	// order-only dependency, so that the tool gets updated when the symlink is depended on.
-	assertDeps(hostSymlinkRule("foo"), hostInstallRule("foo").target)
+	assertDeps(hostSymlinkRule("foo"), hostInstallRule("foo").Target)
 	assertOrderOnlys(hostSymlinkRule("foo"))
 
 	// Check device install rule dependencies
 	assertDeps(installRule("foo"), outputRule("foo").Output.String())
 	assertOrderOnlys(installRule("foo"),
-		installRule("bar").target,
-		symlinkRule("bar").target,
-		installRule("baz").target,
-		symlinkRule("baz").target,
-		installRule("qux").target,
-		symlinkRule("qux").target,
+		installRule("bar").Target,
+		symlinkRule("bar").Target,
+		installRule("baz").Target,
+		symlinkRule("baz").Target,
+		installRule("qux").Target,
+		symlinkRule("qux").Target,
 	)
 
 	// Check device symlink rule dependencies.  Device symlinks could use an order-only dependency,
 	// but the current implementation uses a normal dependency.
-	assertDeps(symlinkRule("foo"), installRule("foo").target)
+	assertDeps(symlinkRule("foo"), installRule("foo").Target)
 	assertOrderOnlys(symlinkRule("foo"))
 }
 
-type installMakeRule struct {
-	target        string
-	deps          []string
-	orderOnlyDeps []string
-}
-
-func parseMkRules(t *testing.T, config Config, nodes []mkparser.Node) []installMakeRule {
-	var rules []installMakeRule
-	for _, node := range nodes {
-		if mkParserRule, ok := node.(*mkparser.Rule); ok {
-			var rule installMakeRule
-
-			if targets := mkParserRule.Target.Words(); len(targets) == 0 {
-				t.Fatalf("no targets for rule %s", mkParserRule.Dump())
-			} else if len(targets) > 1 {
-				t.Fatalf("unsupported multiple targets for rule %s", mkParserRule.Dump())
-			} else if !targets[0].Const() {
-				t.Fatalf("unsupported non-const target for rule %s", mkParserRule.Dump())
-			} else {
-				rule.target = normalizeStringRelativeToTop(config, targets[0].Value(nil))
-			}
-
-			prereqList := &rule.deps
-			for _, prereq := range mkParserRule.Prerequisites.Words() {
-				if !prereq.Const() {
-					t.Fatalf("unsupported non-const prerequisite for rule %s", mkParserRule.Dump())
-				}
-
-				if prereq.Value(nil) == "|" {
-					prereqList = &rule.orderOnlyDeps
-					continue
-				}
-
-				*prereqList = append(*prereqList, normalizeStringRelativeToTop(config, prereq.Value(nil)))
-			}
-
-			rules = append(rules, rule)
-		}
-	}
-
-	return rules
-}
-
 type PropsTestModuleEmbedded struct {
 	Embedded_prop *string
 }
 
+type StructInSlice struct {
+	G string
+	H bool
+	I []string
+}
+
 type propsTestModule struct {
 	ModuleBase
 	DefaultableModuleBase
@@ -636,6 +585,8 @@
 			E *string
 		}
 		F *string `blueprint:"mutated"`
+
+		Slice_of_struct []StructInSlice
 	}
 }
 
@@ -678,7 +629,7 @@
 		}
 	`,
 			expectedProps: []propInfo{
-				propInfo{"Name", "string"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
 			},
 		},
 		{
@@ -691,10 +642,10 @@
 		}
 	`,
 			expectedProps: []propInfo{
-				propInfo{"A", "string"},
-				propInfo{"B", "bool"},
-				propInfo{"D", "int64"},
-				propInfo{"Name", "string"},
+				propInfo{Name: "A", Type: "string", Value: "abc"},
+				propInfo{Name: "B", Type: "bool", Value: "true"},
+				propInfo{Name: "D", Type: "int64", Value: "123"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
 			},
 		},
 		{
@@ -707,10 +658,10 @@
 	`,
 			expectedProps: []propInfo{
 				// for non-pointer cannot distinguish between unused and intentionally set to empty
-				propInfo{"A", "string"},
-				propInfo{"B", "bool"},
-				propInfo{"D", "int64"},
-				propInfo{"Name", "string"},
+				propInfo{Name: "A", Type: "string", Value: ""},
+				propInfo{Name: "B", Type: "bool", Value: "true"},
+				propInfo{Name: "D", Type: "int64", Value: "123"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
 			},
 		},
 		{
@@ -723,8 +674,8 @@
 		}
 	`,
 			expectedProps: []propInfo{
-				propInfo{"Nested.E", "string"},
-				propInfo{"Name", "string"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
+				propInfo{Name: "Nested.E", Type: "string", Value: "abc"},
 			},
 		},
 		{
@@ -739,8 +690,8 @@
 		}
 	`,
 			expectedProps: []propInfo{
-				propInfo{"Name", "string"},
-				propInfo{"Arch.X86_64.A", "string"},
+				propInfo{Name: "Arch.X86_64.A", Type: "string", Value: "abc"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
 			},
 		},
 		{
@@ -751,8 +702,34 @@
 		}
 	`,
 			expectedProps: []propInfo{
-				propInfo{"Embedded_prop", "string"},
-				propInfo{"Name", "string"},
+				propInfo{Name: "Embedded_prop", Type: "string", Value: "a"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
+			},
+		},
+		{
+			desc: "struct slice",
+			bp: `test {
+			name: "foo",
+			slice_of_struct: [
+				{
+					g: "abc",
+					h: false,
+					i: ["baz"],
+				},
+				{
+					g: "def",
+					h: true,
+					i: [],
+				},
+			]
+		}
+	`,
+			expectedProps: []propInfo{
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
+				propInfo{Name: "Slice_of_struct", Type: "struct slice", Values: []string{
+					`android.StructInSlice{G: abc, H: false, I: [baz]}`,
+					`android.StructInSlice{G: def, H: true, I: []}`,
+				}},
 			},
 		},
 		{
@@ -762,19 +739,20 @@
 	name: "foo_defaults",
 	a: "a",
 	b: true,
+	c: ["default_c"],
 	embedded_prop:"a",
 	arch: {
 		x86_64: {
-			a: "a",
+			a: "x86_64 a",
 		},
 	},
 }
 test {
 	name: "foo",
 	defaults: ["foo_defaults"],
-	c: ["a"],
+	c: ["c"],
 	nested: {
-		e: "d",
+		e: "nested e",
 	},
 	target: {
 		linux: {
@@ -784,15 +762,15 @@
 }
 	`,
 			expectedProps: []propInfo{
-				propInfo{"A", "string"},
-				propInfo{"B", "bool"},
-				propInfo{"C", "string slice"},
-				propInfo{"Embedded_prop", "string"},
-				propInfo{"Nested.E", "string"},
-				propInfo{"Name", "string"},
-				propInfo{"Arch.X86_64.A", "string"},
-				propInfo{"Target.Linux.A", "string"},
-				propInfo{"Defaults", "string slice"},
+				propInfo{Name: "A", Type: "string", Value: "a"},
+				propInfo{Name: "Arch.X86_64.A", Type: "string", Value: "x86_64 a"},
+				propInfo{Name: "B", Type: "bool", Value: "true"},
+				propInfo{Name: "C", Type: "string slice", Values: []string{"default_c", "c"}},
+				propInfo{Name: "Defaults", Type: "string slice", Values: []string{"foo_defaults"}},
+				propInfo{Name: "Embedded_prop", Type: "string", Value: "a"},
+				propInfo{Name: "Name", Type: "string", Value: "foo"},
+				propInfo{Name: "Nested.E", Type: "string", Value: "nested e"},
+				propInfo{Name: "Target.Linux.A", Type: "string", Value: "a"},
 			},
 		},
 	}
diff --git a/android/neverallow.go b/android/neverallow.go
index 6f9ae58..aa47bca 100644
--- a/android/neverallow.go
+++ b/android/neverallow.go
@@ -57,6 +57,7 @@
 	AddNeverAllowRules(createUncompressDexRules()...)
 	AddNeverAllowRules(createMakefileGoalRules()...)
 	AddNeverAllowRules(createInitFirstStageRules()...)
+	AddNeverAllowRules(createProhibitFrameworkAccessRules()...)
 }
 
 // Add a NeverAllow rule to the set of rules to apply.
@@ -228,6 +229,15 @@
 	}
 }
 
+func createProhibitFrameworkAccessRules() []Rule {
+	return []Rule{
+		NeverAllow().
+			With("libs", "framework").
+			WithoutMatcher("sdk_version", Regexp("(core_.*|^$)")).
+			Because("framework can't be used when building against SDK"),
+	}
+}
+
 func neverallowMutator(ctx BottomUpMutatorContext) {
 	m, ok := ctx.Module().(Module)
 	if !ok {
@@ -249,7 +259,7 @@
 			continue
 		}
 
-		if !n.appliesToProperties(ctx, properties) {
+		if !n.appliesToProperties(properties) {
 			continue
 		}
 
@@ -261,20 +271,12 @@
 			continue
 		}
 
-		if !n.appliesToBootclasspathJar(ctx) {
-			continue
-		}
-
 		ctx.ModuleErrorf("violates " + n.String())
 	}
 }
 
-type ValueMatcherContext interface {
-	Config() Config
-}
-
 type ValueMatcher interface {
-	Test(ValueMatcherContext, string) bool
+	Test(string) bool
 	String() string
 }
 
@@ -282,7 +284,7 @@
 	expected string
 }
 
-func (m *equalMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *equalMatcher) Test(value string) bool {
 	return m.expected == value
 }
 
@@ -293,7 +295,7 @@
 type anyMatcher struct {
 }
 
-func (m *anyMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *anyMatcher) Test(value string) bool {
 	return true
 }
 
@@ -307,7 +309,7 @@
 	prefix string
 }
 
-func (m *startsWithMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *startsWithMatcher) Test(value string) bool {
 	return strings.HasPrefix(value, m.prefix)
 }
 
@@ -319,7 +321,7 @@
 	re *regexp.Regexp
 }
 
-func (m *regexMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *regexMatcher) Test(value string) bool {
 	return m.re.MatchString(value)
 }
 
@@ -331,7 +333,7 @@
 	allowed []string
 }
 
-func (m *notInListMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *notInListMatcher) Test(value string) bool {
 	return !InList(value, m.allowed)
 }
 
@@ -341,7 +343,7 @@
 
 type isSetMatcher struct{}
 
-func (m *isSetMatcher) Test(ctx ValueMatcherContext, value string) bool {
+func (m *isSetMatcher) Test(value string) bool {
 	return value != ""
 }
 
@@ -351,19 +353,6 @@
 
 var isSetMatcherInstance = &isSetMatcher{}
 
-type sdkVersionMatcher struct {
-	condition   func(ctx ValueMatcherContext, spec SdkSpec) bool
-	description string
-}
-
-func (m *sdkVersionMatcher) Test(ctx ValueMatcherContext, value string) bool {
-	return m.condition(ctx, SdkSpecFromWithConfig(ctx.Config(), value))
-}
-
-func (m *sdkVersionMatcher) String() string {
-	return ".sdk-version(" + m.description + ")"
-}
-
 type ruleProperty struct {
 	fields  []string // e.x.: Vndk.Enabled
 	matcher ValueMatcher
@@ -397,8 +386,6 @@
 
 	NotModuleType(types ...string) Rule
 
-	BootclasspathJar() Rule
-
 	With(properties, value string) Rule
 
 	WithMatcher(properties string, matcher ValueMatcher) Rule
@@ -514,12 +501,6 @@
 	return r
 }
 
-// BootclasspathJar whether this rule only applies to Jars in the Bootclasspath
-func (r *rule) BootclasspathJar() Rule {
-	r.onlyBootclasspathJar = true
-	return r
-}
-
 func (r *rule) String() string {
 	s := []string{"neverallow requirements. Not allowed:"}
 	if len(r.paths) > 0 {
@@ -537,9 +518,6 @@
 	if len(r.osClasses) > 0 {
 		s = append(s, fmt.Sprintf("os class(es): %q", r.osClasses))
 	}
-	if r.onlyBootclasspathJar {
-		s = append(s, "in bootclasspath jar")
-	}
 	if len(r.unlessPaths) > 0 {
 		s = append(s, fmt.Sprintf("EXCEPT in dirs: %q", r.unlessPaths))
 	}
@@ -580,14 +558,6 @@
 	return matches
 }
 
-func (r *rule) appliesToBootclasspathJar(ctx BottomUpMutatorContext) bool {
-	if !r.onlyBootclasspathJar {
-		return true
-	}
-
-	return InList(ctx.ModuleName(), ctx.Config().BootJars())
-}
-
 func (r *rule) appliesToOsClass(osClass OsClass) bool {
 	if len(r.osClasses) == 0 {
 		return true
@@ -606,10 +576,9 @@
 	return (len(r.moduleTypes) == 0 || InList(moduleType, r.moduleTypes)) && !InList(moduleType, r.unlessModuleTypes)
 }
 
-func (r *rule) appliesToProperties(ctx ValueMatcherContext,
-	properties []interface{}) bool {
-	includeProps := hasAllProperties(ctx, properties, r.props)
-	excludeProps := hasAnyProperty(ctx, properties, r.unlessProps)
+func (r *rule) appliesToProperties(properties []interface{}) bool {
+	includeProps := hasAllProperties(properties, r.props)
+	excludeProps := hasAnyProperty(properties, r.unlessProps)
 	return includeProps && !excludeProps
 }
 
@@ -629,16 +598,6 @@
 	return &notInListMatcher{allowed}
 }
 
-func LessThanSdkVersion(sdk string) ValueMatcher {
-	return &sdkVersionMatcher{
-		condition: func(ctx ValueMatcherContext, spec SdkSpec) bool {
-			return spec.ApiLevel.LessThan(
-				SdkSpecFromWithConfig(ctx.Config(), sdk).ApiLevel)
-		},
-		description: "lessThan=" + sdk,
-	}
-}
-
 // assorted utils
 
 func cleanPaths(paths []string) []string {
@@ -657,28 +616,25 @@
 	return names
 }
 
-func hasAnyProperty(ctx ValueMatcherContext, properties []interface{},
-	props []ruleProperty) bool {
+func hasAnyProperty(properties []interface{}, props []ruleProperty) bool {
 	for _, v := range props {
-		if hasProperty(ctx, properties, v) {
+		if hasProperty(properties, v) {
 			return true
 		}
 	}
 	return false
 }
 
-func hasAllProperties(ctx ValueMatcherContext, properties []interface{},
-	props []ruleProperty) bool {
+func hasAllProperties(properties []interface{}, props []ruleProperty) bool {
 	for _, v := range props {
-		if !hasProperty(ctx, properties, v) {
+		if !hasProperty(properties, v) {
 			return false
 		}
 	}
 	return true
 }
 
-func hasProperty(ctx ValueMatcherContext, properties []interface{},
-	prop ruleProperty) bool {
+func hasProperty(properties []interface{}, prop ruleProperty) bool {
 	for _, propertyStruct := range properties {
 		propertiesValue := reflect.ValueOf(propertyStruct).Elem()
 		for _, v := range prop.fields {
@@ -692,7 +648,7 @@
 		}
 
 		check := func(value string) bool {
-			return prop.matcher.Test(ctx, value)
+			return prop.matcher.Test(value)
 		}
 
 		if matchValue(propertiesValue, check) {
diff --git a/android/neverallow_test.go b/android/neverallow_test.go
index 59016d4..86f1a37 100644
--- a/android/neverallow_test.go
+++ b/android/neverallow_test.go
@@ -327,46 +327,19 @@
 			"Only boot images may be imported as a makefile goal.",
 		},
 	},
+	// Tests for the rule prohibiting the use of framework
 	{
-		name: "min_sdk too low",
+		name: "prohibit framework",
 		fs: map[string][]byte{
 			"Android.bp": []byte(`
 				java_library {
-					name: "min_sdk_too_low",
-					min_sdk_version: "30",
+					name: "foo",
+					libs: ["framework"],
+					sdk_version: "current",
 				}`),
 		},
-		rules: []Rule{
-			NeverAllow().WithMatcher("min_sdk_version", LessThanSdkVersion("31")),
-		},
 		expectedErrors: []string{
-			"module \"min_sdk_too_low\": violates neverallow",
-		},
-	},
-	{
-		name: "min_sdk high enough",
-		fs: map[string][]byte{
-			"Android.bp": []byte(`
-				java_library {
-					name: "min_sdk_high_enough",
-					min_sdk_version: "31",
-				}`),
-		},
-		rules: []Rule{
-			NeverAllow().WithMatcher("min_sdk_version", LessThanSdkVersion("31")),
-		},
-	},
-	{
-		name: "current min_sdk high enough",
-		fs: map[string][]byte{
-			"Android.bp": []byte(`
-				java_library {
-					name: "current_min_sdk_high_enough",
-					min_sdk_version: "current",
-				}`),
-		},
-		rules: []Rule{
-			NeverAllow().WithMatcher("min_sdk_version", LessThanSdkVersion("31")),
+			"framework can't be used when building against SDK",
 		},
 	},
 }
@@ -452,10 +425,9 @@
 }
 
 type mockJavaLibraryProperties struct {
-	Libs            []string
-	Min_sdk_version *string
-	Sdk_version     *string
-	Uncompress_dex  *bool
+	Libs           []string
+	Sdk_version    *string
+	Uncompress_dex *bool
 }
 
 type mockJavaLibraryModule struct {
diff --git a/android/notices.go b/android/notices.go
index 194a734..2a4c17c 100644
--- a/android/notices.go
+++ b/android/notices.go
@@ -15,93 +15,9 @@
 package android
 
 import (
-	"path/filepath"
 	"strings"
-
-	"github.com/google/blueprint"
 )
 
-func init() {
-	pctx.SourcePathVariable("merge_notices", "build/soong/scripts/mergenotice.py")
-	pctx.SourcePathVariable("generate_notice", "build/soong/scripts/generate-notice-files.py")
-
-	pctx.HostBinToolVariable("minigzip", "minigzip")
-}
-
-type NoticeOutputs struct {
-	Merged       OptionalPath
-	TxtOutput    OptionalPath
-	HtmlOutput   OptionalPath
-	HtmlGzOutput OptionalPath
-}
-
-var (
-	mergeNoticesRule = pctx.AndroidStaticRule("mergeNoticesRule", blueprint.RuleParams{
-		Command:     `${merge_notices} --output $out $in`,
-		CommandDeps: []string{"${merge_notices}"},
-		Description: "merge notice files into $out",
-	})
-
-	generateNoticeRule = pctx.AndroidStaticRule("generateNoticeRule", blueprint.RuleParams{
-		Command: `rm -rf $$(dirname $txtOut) $$(dirname $htmlOut) $$(dirname $out) && ` +
-			`mkdir -p $$(dirname $txtOut) $$(dirname $htmlOut)  $$(dirname $out) && ` +
-			`${generate_notice} --text-output $txtOut --html-output $htmlOut -t "$title" -s $inputDir && ` +
-			`${minigzip} -c $htmlOut > $out`,
-		CommandDeps: []string{"${generate_notice}", "${minigzip}"},
-		Description: "produce notice file $out",
-	}, "txtOut", "htmlOut", "title", "inputDir")
-)
-
-func MergeNotices(ctx ModuleContext, mergedNotice WritablePath, noticePaths []Path) {
-	ctx.Build(pctx, BuildParams{
-		Rule:        mergeNoticesRule,
-		Description: "merge notices",
-		Inputs:      noticePaths,
-		Output:      mergedNotice,
-	})
-}
-
-func BuildNoticeOutput(ctx ModuleContext, installPath InstallPath, installFilename string,
-	noticePaths []Path) NoticeOutputs {
-	// Merge all NOTICE files into one.
-	// TODO(jungjw): We should just produce a well-formatted NOTICE.html file in a single pass.
-	//
-	// generate-notice-files.py, which processes the merged NOTICE file, has somewhat strict rules
-	// about input NOTICE file paths.
-	// 1. Their relative paths to the src root become their NOTICE index titles. We want to use
-	// on-device paths as titles, and so output the merged NOTICE file the corresponding location.
-	// 2. They must end with .txt extension. Otherwise, they're ignored.
-	noticeRelPath := InstallPathToOnDevicePath(ctx, installPath.Join(ctx, installFilename+".txt"))
-	mergedNotice := PathForModuleOut(ctx, filepath.Join("NOTICE_FILES/src", noticeRelPath))
-	MergeNotices(ctx, mergedNotice, noticePaths)
-
-	// Transform the merged NOTICE file into a gzipped HTML file.
-	txtOuptut := PathForModuleOut(ctx, "NOTICE_txt", "NOTICE.txt")
-	htmlOutput := PathForModuleOut(ctx, "NOTICE_html", "NOTICE.html")
-	htmlGzOutput := PathForModuleOut(ctx, "NOTICE", "NOTICE.html.gz")
-	title := "Notices for " + ctx.ModuleName()
-	ctx.Build(pctx, BuildParams{
-		Rule:            generateNoticeRule,
-		Description:     "generate notice output",
-		Input:           mergedNotice,
-		Output:          htmlGzOutput,
-		ImplicitOutputs: WritablePaths{txtOuptut, htmlOutput},
-		Args: map[string]string{
-			"txtOut":   txtOuptut.String(),
-			"htmlOut":  htmlOutput.String(),
-			"title":    title,
-			"inputDir": PathForModuleOut(ctx, "NOTICE_FILES/src").String(),
-		},
-	})
-
-	return NoticeOutputs{
-		Merged:       OptionalPathForPath(mergedNotice),
-		TxtOutput:    OptionalPathForPath(txtOuptut),
-		HtmlOutput:   OptionalPathForPath(htmlOutput),
-		HtmlGzOutput: OptionalPathForPath(htmlGzOutput),
-	}
-}
-
 // BuildNoticeTextOutputFromLicenseMetadata writes out a notice text file based on the module's
 // generated license metadata file.
 func BuildNoticeTextOutputFromLicenseMetadata(ctx ModuleContext, outputFile WritablePath) {
@@ -112,5 +28,18 @@
 		FlagWithOutput("-o ", outputFile).
 		FlagWithDepFile("-d ", depsFile).
 		Input(ctx.Module().base().licenseMetadataFile)
-	rule.Build("container_notice", "container notice file")
+	rule.Build("text_notice", "container notice file")
+}
+
+// BuildNoticeHtmlOutputFromLicenseMetadata writes out a notice text file based on the module's
+// generated license metadata file.
+func BuildNoticeHtmlOutputFromLicenseMetadata(ctx ModuleContext, outputFile WritablePath) {
+	depsFile := outputFile.ReplaceExtension(ctx, strings.TrimPrefix(outputFile.Ext()+".d", "."))
+	rule := NewRuleBuilder(pctx, ctx)
+	rule.Command().
+		BuiltTool("htmlnotice").
+		FlagWithOutput("-o ", outputFile).
+		FlagWithDepFile("-d ", depsFile).
+		Input(ctx.Module().base().licenseMetadataFile)
+	rule.Build("html_notice", "container notice file")
 }
diff --git a/android/packaging.go b/android/packaging.go
index e3a0b54..ecd84a2 100644
--- a/android/packaging.go
+++ b/android/packaging.go
@@ -40,6 +40,8 @@
 	executable bool
 
 	effectiveLicenseFiles *Paths
+
+	partition string
 }
 
 // Get file name of installed package
@@ -67,6 +69,10 @@
 	return *p.effectiveLicenseFiles
 }
 
+func (p *PackagingSpec) Partition() string {
+	return p.partition
+}
+
 type PackageModule interface {
 	Module
 	packagingBase() *PackagingBase
@@ -76,11 +82,14 @@
 	// be copied to a zip in CopyDepsToZip, `depTag` should implement PackagingItem marker interface.
 	AddDeps(ctx BottomUpMutatorContext, depTag blueprint.DependencyTag)
 
+	// GatherPackagingSpecs gathers PackagingSpecs of transitive dependencies.
+	GatherPackagingSpecs(ctx ModuleContext) map[string]PackagingSpec
+
 	// CopyDepsToZip zips the built artifacts of the dependencies into the given zip file and
 	// returns zip entries in it. This is expected to be called in GenerateAndroidBuildActions,
 	// followed by a build rule that unzips it and creates the final output (img, zip, tar.gz,
 	// etc.) from the extracted files
-	CopyDepsToZip(ctx ModuleContext, zipOut WritablePath) []string
+	CopyDepsToZip(ctx ModuleContext, specs map[string]PackagingSpec, zipOut WritablePath) []string
 }
 
 // PackagingBase provides basic functionality for packaging dependencies. A module is expected to
@@ -211,7 +220,7 @@
 	}
 }
 
-// Returns transitive PackagingSpecs from deps
+// See PackageModule.GatherPackagingSpecs
 func (p *PackagingBase) GatherPackagingSpecs(ctx ModuleContext) map[string]PackagingSpec {
 	m := make(map[string]PackagingSpec)
 	ctx.VisitDirectDeps(func(child Module) {
@@ -229,10 +238,10 @@
 
 // CopySpecsToDir is a helper that will add commands to the rule builder to copy the PackagingSpec
 // entries into the specified directory.
-func (p *PackagingBase) CopySpecsToDir(ctx ModuleContext, builder *RuleBuilder, m map[string]PackagingSpec, dir ModuleOutPath) (entries []string) {
+func (p *PackagingBase) CopySpecsToDir(ctx ModuleContext, builder *RuleBuilder, specs map[string]PackagingSpec, dir ModuleOutPath) (entries []string) {
 	seenDir := make(map[string]bool)
-	for _, k := range SortedStringKeys(m) {
-		ps := m[k]
+	for _, k := range SortedStringKeys(specs) {
+		ps := specs[k]
 		destPath := dir.Join(ctx, ps.relPathInPackage).String()
 		destDir := filepath.Dir(destPath)
 		entries = append(entries, ps.relPathInPackage)
@@ -254,14 +263,13 @@
 }
 
 // See PackageModule.CopyDepsToZip
-func (p *PackagingBase) CopyDepsToZip(ctx ModuleContext, zipOut WritablePath) (entries []string) {
-	m := p.GatherPackagingSpecs(ctx)
+func (p *PackagingBase) CopyDepsToZip(ctx ModuleContext, specs map[string]PackagingSpec, zipOut WritablePath) (entries []string) {
 	builder := NewRuleBuilder(pctx, ctx)
 
 	dir := PathForModuleOut(ctx, ".zip")
 	builder.Command().Text("rm").Flag("-rf").Text(dir.String())
 	builder.Command().Text("mkdir").Flag("-p").Text(dir.String())
-	entries = p.CopySpecsToDir(ctx, builder, m, dir)
+	entries = p.CopySpecsToDir(ctx, builder, specs, dir)
 
 	builder.Command().
 		BuiltTool("soong_zip").
diff --git a/android/packaging_test.go b/android/packaging_test.go
index ff7446c..91ac1f3 100644
--- a/android/packaging_test.go
+++ b/android/packaging_test.go
@@ -95,7 +95,7 @@
 
 func (m *packageTestModule) GenerateAndroidBuildActions(ctx ModuleContext) {
 	zipFile := PathForModuleOut(ctx, "myzip.zip")
-	m.entries = m.CopyDepsToZip(ctx, zipFile)
+	m.entries = m.CopyDepsToZip(ctx, m.GatherPackagingSpecs(ctx), zipFile)
 }
 
 func runPackagingTest(t *testing.T, multitarget bool, bp string, expected []string) {
diff --git a/android/paths.go b/android/paths.go
index 4c69de7..e7829b9 100644
--- a/android/paths.go
+++ b/android/paths.go
@@ -405,6 +405,13 @@
 	return PathsForModuleSrcExcludes(ctx, paths, nil)
 }
 
+type SourceInput struct {
+	Context      ModuleMissingDepsPathContext
+	Paths        []string
+	ExcludePaths []string
+	IncludeDirs  bool
+}
+
 // PathsForModuleSrcExcludes returns a Paths{} containing the resolved references in paths, minus
 // those listed in excludes. Elements of paths and excludes are resolved as:
 // * filepath, relative to local module directory, resolves as a filepath relative to the local
@@ -423,12 +430,21 @@
 //     missing dependencies
 //   * otherwise, a ModuleError is thrown.
 func PathsForModuleSrcExcludes(ctx ModuleMissingDepsPathContext, paths, excludes []string) Paths {
-	ret, missingDeps := PathsAndMissingDepsForModuleSrcExcludes(ctx, paths, excludes)
-	if ctx.Config().AllowMissingDependencies() {
-		ctx.AddMissingDependencies(missingDeps)
+	return PathsRelativeToModuleSourceDir(SourceInput{
+		Context:      ctx,
+		Paths:        paths,
+		ExcludePaths: excludes,
+		IncludeDirs:  true,
+	})
+}
+
+func PathsRelativeToModuleSourceDir(input SourceInput) Paths {
+	ret, missingDeps := PathsAndMissingDepsRelativeToModuleSourceDir(input)
+	if input.Context.Config().AllowMissingDependencies() {
+		input.Context.AddMissingDependencies(missingDeps)
 	} else {
 		for _, m := range missingDeps {
-			ctx.ModuleErrorf(`missing dependency on %q, is the property annotated with android:"path"?`, m)
+			input.Context.ModuleErrorf(`missing dependency on %q, is the property annotated with android:"path"?`, m)
 		}
 	}
 	return ret
@@ -543,23 +559,31 @@
 // Properties passed as the paths argument must have been annotated with struct tag
 // `android:"path"` so that dependencies on SourceFileProducer modules will have already been handled by the
 // path_deps mutator.
-func PathsAndMissingDepsForModuleSrcExcludes(ctx ModuleWithDepsPathContext, paths, excludes []string) (Paths, []string) {
-	prefix := pathForModuleSrc(ctx).String()
+func PathsAndMissingDepsForModuleSrcExcludes(ctx ModuleMissingDepsPathContext, paths, excludes []string) (Paths, []string) {
+	return PathsAndMissingDepsRelativeToModuleSourceDir(SourceInput{
+		Context:      ctx,
+		Paths:        paths,
+		ExcludePaths: excludes,
+		IncludeDirs:  true,
+	})
+}
+
+func PathsAndMissingDepsRelativeToModuleSourceDir(input SourceInput) (Paths, []string) {
+	prefix := pathForModuleSrc(input.Context).String()
 
 	var expandedExcludes []string
-	if excludes != nil {
-		expandedExcludes = make([]string, 0, len(excludes))
+	if input.ExcludePaths != nil {
+		expandedExcludes = make([]string, 0, len(input.ExcludePaths))
 	}
 
 	var missingExcludeDeps []string
-
-	for _, e := range excludes {
+	for _, e := range input.ExcludePaths {
 		if m, t := SrcIsModuleWithTag(e); m != "" {
-			modulePaths, err := getPathsFromModuleDep(ctx, e, m, t)
+			modulePaths, err := getPathsFromModuleDep(input.Context, e, m, t)
 			if m, ok := err.(missingDependencyError); ok {
 				missingExcludeDeps = append(missingExcludeDeps, m.missingDeps...)
 			} else if err != nil {
-				reportPathError(ctx, err)
+				reportPathError(input.Context, err)
 			} else {
 				expandedExcludes = append(expandedExcludes, modulePaths.Strings()...)
 			}
@@ -568,19 +592,24 @@
 		}
 	}
 
-	if paths == nil {
+	if input.Paths == nil {
 		return nil, missingExcludeDeps
 	}
 
 	var missingDeps []string
 
-	expandedSrcFiles := make(Paths, 0, len(paths))
-	for _, s := range paths {
-		srcFiles, err := expandOneSrcPath(ctx, s, expandedExcludes)
+	expandedSrcFiles := make(Paths, 0, len(input.Paths))
+	for _, s := range input.Paths {
+		srcFiles, err := expandOneSrcPath(sourcePathInput{
+			context:          input.Context,
+			path:             s,
+			expandedExcludes: expandedExcludes,
+			includeDirs:      input.IncludeDirs,
+		})
 		if depErr, ok := err.(missingDependencyError); ok {
 			missingDeps = append(missingDeps, depErr.missingDeps...)
 		} else if err != nil {
-			reportPathError(ctx, err)
+			reportPathError(input.Context, err)
 		}
 		expandedSrcFiles = append(expandedSrcFiles, srcFiles...)
 	}
@@ -596,44 +625,59 @@
 	return "missing dependencies: " + strings.Join(e.missingDeps, ", ")
 }
 
+type sourcePathInput struct {
+	context          ModuleWithDepsPathContext
+	path             string
+	expandedExcludes []string
+	includeDirs      bool
+}
+
 // Expands one path string to Paths rooted from the module's local source
 // directory, excluding those listed in the expandedExcludes.
 // Expands globs, references to SourceFileProducer or OutputFileProducer modules using the ":name" and ":name{.tag}" syntax.
-func expandOneSrcPath(ctx ModuleWithDepsPathContext, sPath string, expandedExcludes []string) (Paths, error) {
+func expandOneSrcPath(input sourcePathInput) (Paths, error) {
 	excludePaths := func(paths Paths) Paths {
-		if len(expandedExcludes) == 0 {
+		if len(input.expandedExcludes) == 0 {
 			return paths
 		}
 		remainder := make(Paths, 0, len(paths))
 		for _, p := range paths {
-			if !InList(p.String(), expandedExcludes) {
+			if !InList(p.String(), input.expandedExcludes) {
 				remainder = append(remainder, p)
 			}
 		}
 		return remainder
 	}
-	if m, t := SrcIsModuleWithTag(sPath); m != "" {
-		modulePaths, err := getPathsFromModuleDep(ctx, sPath, m, t)
+	if m, t := SrcIsModuleWithTag(input.path); m != "" {
+		modulePaths, err := getPathsFromModuleDep(input.context, input.path, m, t)
 		if err != nil {
 			return nil, err
 		} else {
 			return excludePaths(modulePaths), nil
 		}
-	} else if pathtools.IsGlob(sPath) {
-		paths := GlobFiles(ctx, pathForModuleSrc(ctx, sPath).String(), expandedExcludes)
-		return PathsWithModuleSrcSubDir(ctx, paths, ""), nil
 	} else {
-		p := pathForModuleSrc(ctx, sPath)
-		if exists, _, err := ctx.Config().fs.Exists(p.String()); err != nil {
-			ReportPathErrorf(ctx, "%s: %s", p, err.Error())
-		} else if !exists && !ctx.Config().TestAllowNonExistentPaths {
-			ReportPathErrorf(ctx, "module source path %q does not exist", p)
-		}
+		p := pathForModuleSrc(input.context, input.path)
+		if pathtools.IsGlob(input.path) {
+			paths := GlobFiles(input.context, p.String(), input.expandedExcludes)
+			return PathsWithModuleSrcSubDir(input.context, paths, ""), nil
+		} else {
+			if exists, _, err := input.context.Config().fs.Exists(p.String()); err != nil {
+				ReportPathErrorf(input.context, "%s: %s", p, err.Error())
+			} else if !exists && !input.context.Config().TestAllowNonExistentPaths {
+				ReportPathErrorf(input.context, "module source path %q does not exist", p)
+			} else if !input.includeDirs {
+				if isDir, err := input.context.Config().fs.IsDir(p.String()); exists && err != nil {
+					ReportPathErrorf(input.context, "%s: %s", p, err.Error())
+				} else if isDir {
+					ReportPathErrorf(input.context, "module source path %q is a directory", p)
+				}
+			}
 
-		if InList(p.String(), expandedExcludes) {
-			return nil, nil
+			if InList(p.String(), input.expandedExcludes) {
+				return nil, nil
+			}
+			return Paths{p}, nil
 		}
-		return Paths{p}, nil
 	}
 }
 
@@ -1315,7 +1359,7 @@
 	// validatePath() will corrupt it, e.g. replace "//" with "/". If the path is not a module
 	// reference then it will be validated by expandOneSrcPath anyway when it calls expandOneSrcPath.
 	p := strings.Join(pathComponents, string(filepath.Separator))
-	paths, err := expandOneSrcPath(ctx, p, nil)
+	paths, err := expandOneSrcPath(sourcePathInput{context: ctx, path: p, includeDirs: true})
 	if err != nil {
 		if depErr, ok := err.(missingDependencyError); ok {
 			if ctx.Config().AllowMissingDependencies() {
@@ -1430,14 +1474,11 @@
 func PathForVndkRefAbiDump(ctx ModuleInstallPathContext, version, fileName string,
 	isNdk, isLlndkOrVndk, isGzip bool) OptionalPath {
 
-	arches := ctx.DeviceConfig().Arches()
-	if len(arches) == 0 {
-		panic("device build with no primary arch")
-	}
-	currentArch := ctx.Arch()
-	archNameAndVariant := currentArch.ArchType.String()
-	if currentArch.ArchVariant != "" {
-		archNameAndVariant += "_" + currentArch.ArchVariant
+	currentArchType := ctx.Arch().ArchType
+	primaryArchType := ctx.Config().DevicePrimaryArchType()
+	archName := currentArchType.String()
+	if currentArchType != primaryArchType {
+		archName += "_" + primaryArchType.String()
 	}
 
 	var dirName string
@@ -1459,7 +1500,7 @@
 	}
 
 	return ExistentPathForSource(ctx, "prebuilts", "abi-dumps", dirName,
-		version, binderBitness, archNameAndVariant, "source-based",
+		version, binderBitness, archName, "source-based",
 		fileName+ext)
 }
 
diff --git a/android/proto.go b/android/proto.go
index f466261..c3759f8 100644
--- a/android/proto.go
+++ b/android/proto.go
@@ -16,6 +16,7 @@
 
 import (
 	"android/soong/bazel"
+	"regexp"
 	"strings"
 
 	"github.com/google/blueprint"
@@ -26,6 +27,14 @@
 	canonicalPathFromRootDefault = true
 )
 
+var (
+	// ignoring case, checks for proto or protos as an independent word in the name, whether at the
+	// beginning, end, or middle. e.g. "proto.foo", "bar-protos", "baz_proto_srcs" would all match
+	filegroupLikelyProtoPattern = regexp.MustCompile("(?i)(^|[^a-z])proto(s)?([^a-z]|$)")
+
+	ProtoSrcLabelPartition = bazel.LabelPartition{Extensions: []string{".proto"}, LabelMapper: isProtoFilegroup}
+)
+
 // TODO(ccross): protos are often used to communicate between multiple modules.  If the only
 // way to convert a proto to source is to reference it as a source file, and external modules cannot
 // reference source files in other modules, then every module that owns a proto file will need to
@@ -165,12 +174,11 @@
 
 // Bp2buildProtoProperties converts proto properties, creating a proto_library and returning the
 // information necessary for language-specific handling.
-func Bp2buildProtoProperties(ctx Bp2buildMutatorContext, module Module, srcs bazel.LabelListAttribute) (Bp2buildProtoInfo, bool) {
+func Bp2buildProtoProperties(ctx Bp2buildMutatorContext, m *ModuleBase, srcs bazel.LabelListAttribute) (Bp2buildProtoInfo, bool) {
 	var info Bp2buildProtoInfo
 	if srcs.IsEmpty() {
 		return info, false
 	}
-	m := module.base()
 
 	info.Name = m.Name() + "_proto"
 	attrs := protoAttrs{
@@ -205,3 +213,13 @@
 
 	return info, true
 }
+
+func isProtoFilegroup(ctx bazel.OtherModuleContext, label bazel.Label) (string, bool) {
+	m, exists := ctx.ModuleFromName(label.OriginalModuleName)
+	labelStr := label.Label
+	if !exists || !IsFilegroup(ctx, m) {
+		return labelStr, false
+	}
+	likelyProtos := filegroupLikelyProtoPattern.MatchString(label.OriginalModuleName)
+	return labelStr, likelyProtos
+}
diff --git a/android/register.go b/android/register.go
index 10e14e0..c505833 100644
--- a/android/register.go
+++ b/android/register.go
@@ -59,6 +59,7 @@
 
 var moduleTypes []moduleType
 var moduleTypesForDocs = map[string]reflect.Value{}
+var moduleTypeByFactory = map[reflect.Value]string{}
 
 type singleton struct {
 	// True if this should be registered as a pre-singleton, false otherwise.
@@ -140,6 +141,7 @@
 // RegisterModuleType was a lambda.
 func RegisterModuleTypeForDocs(name string, factory reflect.Value) {
 	moduleTypesForDocs[name] = factory
+	moduleTypeByFactory[factory] = name
 }
 
 func RegisterSingletonType(name string, factory SingletonFactory) {
@@ -228,6 +230,10 @@
 	return moduleTypesForDocs
 }
 
+func ModuleTypeByFactory() map[reflect.Value]string {
+	return moduleTypeByFactory
+}
+
 // Interface for registering build components.
 //
 // Provided to allow registration of build components to be shared between the runtime
diff --git a/android/rule_builder.go b/android/rule_builder.go
index 098c1fc..11da36c 100644
--- a/android/rule_builder.go
+++ b/android/rule_builder.go
@@ -101,12 +101,7 @@
 }
 
 // Restat marks the rule as a restat rule, which will be passed to ModuleContext.Rule in BuildParams.Restat.
-//
-// Restat is not compatible with Sbox()
 func (r *RuleBuilder) Restat() *RuleBuilder {
-	if r.sbox {
-		panic("Restat() is not compatible with Sbox()")
-	}
 	r.restat = true
 	return r
 }
@@ -141,8 +136,6 @@
 // point to a location where sbox's manifest will be written and must be outside outputDir. sbox
 // will ensure that all outputs have been written, and will discard any output files that were not
 // specified.
-//
-// Sbox is not compatible with Restat()
 func (r *RuleBuilder) Sbox(outputDir WritablePath, manifestPath WritablePath) *RuleBuilder {
 	if r.sbox {
 		panic("Sbox() may not be called more than once")
@@ -150,9 +143,6 @@
 	if len(r.commands) > 0 {
 		panic("Sbox() may not be called after Command()")
 	}
-	if r.restat {
-		panic("Sbox() is not compatible with Restat()")
-	}
 	r.sbox = true
 	r.outDir = outputDir
 	r.sboxManifestPath = manifestPath
@@ -636,11 +626,14 @@
 				ctx: r.ctx,
 			},
 		}
-		sboxCmd.Text("rm -rf").Output(r.outDir)
-		sboxCmd.Text("&&")
 		sboxCmd.builtToolWithoutDeps("sbox").
-			Flag("--sandbox-path").Text(shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
-			Flag("--manifest").Input(r.sboxManifestPath)
+			FlagWithArg("--sandbox-path ", shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
+			FlagWithArg("--output-dir ", r.outDir.String()).
+			FlagWithInput("--manifest ", r.sboxManifestPath)
+
+		if r.restat {
+			sboxCmd.Flag("--write-if-changed")
+		}
 
 		// Replace the command string, and add the sbox tool and manifest textproto to the
 		// dependencies of the final sbox rule.
diff --git a/android/rule_builder_test.go b/android/rule_builder_test.go
index 3766bb0..86647eb 100644
--- a/android/rule_builder_test.go
+++ b/android/rule_builder_test.go
@@ -678,32 +678,32 @@
 	})
 	t.Run("sbox", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox"
-		outFile := filepath.Join(outDir, "gen/foo_sbox")
-		depFile := filepath.Join(outDir, "gen/foo_sbox.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 		module := result.ModuleForTests("foo_sbox", "")
 		check(t, module.Output("gen/foo_sbox"), module.Output(rspFile2),
 			cmd, outFile, depFile, rspFile, rspFile2, false, []string{manifest}, []string{sbox})
 	})
 	t.Run("sbox_inputs", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox_inputs"
-		outFile := filepath.Join(outDir, "gen/foo_sbox_inputs")
-		depFile := filepath.Join(outDir, "gen/foo_sbox_inputs.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox_inputs")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox_inputs.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 
 		module := result.ModuleForTests("foo_sbox_inputs", "")
 		check(t, module.Output("gen/foo_sbox_inputs"), module.Output(rspFile2),
diff --git a/android/singleton_module_test.go b/android/singleton_module_test.go
index eb5554c..9d98478 100644
--- a/android/singleton_module_test.go
+++ b/android/singleton_module_test.go
@@ -46,8 +46,8 @@
 	PrepareForTestWithAndroidMk,
 	FixtureRegisterWithContext(func(ctx RegistrationContext) {
 		ctx.RegisterSingletonModuleType("test_singleton_module", testSingletonModuleFactory)
-		ctx.RegisterSingletonType("makevars", makeVarsSingletonFunc)
 	}),
+	PrepareForTestWithMakevars,
 )
 
 func TestSingletonModule(t *testing.T) {
diff --git a/android/testing.go b/android/testing.go
index 39864e1..ac02db9 100644
--- a/android/testing.go
+++ b/android/testing.go
@@ -15,6 +15,7 @@
 package android
 
 import (
+	"bytes"
 	"fmt"
 	"path/filepath"
 	"regexp"
@@ -23,6 +24,8 @@
 	"sync"
 	"testing"
 
+	mkparser "android/soong/androidmk/parser"
+
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
 )
@@ -115,6 +118,10 @@
 	ctx.PreArchMutators(RegisterNamespaceMutator)
 })
 
+var PrepareForTestWithMakevars = FixtureRegisterWithContext(func(ctx RegistrationContext) {
+	ctx.RegisterSingletonType("makevars", makeVarsSingletonFunc)
+})
+
 // Test fixture preparer that will register most java build components.
 //
 // Singletons and mutators should only be added here if they are needed for a majority of java
@@ -204,7 +211,7 @@
 	ctx.finalDeps = append(ctx.finalDeps, f)
 }
 
-func (ctx *TestContext) RegisterBp2BuildConfig(config Bp2BuildConfig) {
+func (ctx *TestContext) RegisterBp2BuildConfig(config bp2BuildConversionAllowlist) {
 	ctx.config.bp2buildPackageConfig = config
 }
 
@@ -602,6 +609,62 @@
 		"\nall singletons: %v", name, allSingletonNames))
 }
 
+type InstallMakeRule struct {
+	Target        string
+	Deps          []string
+	OrderOnlyDeps []string
+}
+
+func parseMkRules(t *testing.T, config Config, nodes []mkparser.Node) []InstallMakeRule {
+	var rules []InstallMakeRule
+	for _, node := range nodes {
+		if mkParserRule, ok := node.(*mkparser.Rule); ok {
+			var rule InstallMakeRule
+
+			if targets := mkParserRule.Target.Words(); len(targets) == 0 {
+				t.Fatalf("no targets for rule %s", mkParserRule.Dump())
+			} else if len(targets) > 1 {
+				t.Fatalf("unsupported multiple targets for rule %s", mkParserRule.Dump())
+			} else if !targets[0].Const() {
+				t.Fatalf("unsupported non-const target for rule %s", mkParserRule.Dump())
+			} else {
+				rule.Target = normalizeStringRelativeToTop(config, targets[0].Value(nil))
+			}
+
+			prereqList := &rule.Deps
+			for _, prereq := range mkParserRule.Prerequisites.Words() {
+				if !prereq.Const() {
+					t.Fatalf("unsupported non-const prerequisite for rule %s", mkParserRule.Dump())
+				}
+
+				if prereq.Value(nil) == "|" {
+					prereqList = &rule.OrderOnlyDeps
+					continue
+				}
+
+				*prereqList = append(*prereqList, normalizeStringRelativeToTop(config, prereq.Value(nil)))
+			}
+
+			rules = append(rules, rule)
+		}
+	}
+
+	return rules
+}
+
+func (ctx *TestContext) InstallMakeRulesForTesting(t *testing.T) []InstallMakeRule {
+	installs := ctx.SingletonForTests("makevars").Singleton().(*makeVarsSingleton).installsForTesting
+	buf := bytes.NewBuffer(append([]byte(nil), installs...))
+	parser := mkparser.NewParser("makevars", buf)
+
+	nodes, errs := parser.Parse()
+	if len(errs) > 0 {
+		t.Fatalf("error parsing install rules: %s", errs[0])
+	}
+
+	return parseMkRules(t, ctx.config, nodes)
+}
+
 func (ctx *TestContext) Config() Config {
 	return ctx.config
 }
diff --git a/android/util.go b/android/util.go
index 0ee253e..47c4583 100644
--- a/android/util.go
+++ b/android/util.go
@@ -65,21 +65,55 @@
 	return buf.String()
 }
 
-// SorterStringKeys returns the keys of the given string-keyed map in the ascending order
+// SorterStringKeys returns the keys of the given string-keyed map in the ascending order.
 func SortedStringKeys(m interface{}) []string {
 	v := reflect.ValueOf(m)
 	if v.Kind() != reflect.Map {
 		panic(fmt.Sprintf("%#v is not a map", m))
 	}
-	keys := v.MapKeys()
-	s := make([]string, 0, len(keys))
-	for _, key := range keys {
-		s = append(s, key.String())
+	if v.Len() == 0 {
+		return nil
+	}
+	iter := v.MapRange()
+	s := make([]string, 0, v.Len())
+	for iter.Next() {
+		s = append(s, iter.Key().String())
 	}
 	sort.Strings(s)
 	return s
 }
 
+// stringValues returns the values of the given string-valued map in randomized map order.
+func stringValues(m interface{}) []string {
+	v := reflect.ValueOf(m)
+	if v.Kind() != reflect.Map {
+		panic(fmt.Sprintf("%#v is not a map", m))
+	}
+	if v.Len() == 0 {
+		return nil
+	}
+	iter := v.MapRange()
+	s := make([]string, 0, v.Len())
+	for iter.Next() {
+		s = append(s, iter.Value().String())
+	}
+	return s
+}
+
+// SortedStringValues returns the values of the given string-valued map in the ascending order.
+func SortedStringValues(m interface{}) []string {
+	s := stringValues(m)
+	sort.Strings(s)
+	return s
+}
+
+// SortedUniqueStringValues returns the values of the given string-valued map in the ascending order
+// with duplicates removed.
+func SortedUniqueStringValues(m interface{}) []string {
+	s := stringValues(m)
+	return SortedUniqueStrings(s)
+}
+
 // IndexList returns the index of the first occurrence of the given string in the list or -1
 func IndexList(s string, list []string) int {
 	for i, l := range list {
diff --git a/android/util_test.go b/android/util_test.go
index 09bec01..9b9253b 100644
--- a/android/util_test.go
+++ b/android/util_test.go
@@ -640,3 +640,117 @@
 		})
 	}
 }
+
+func TestSortedStringKeys(t *testing.T) {
+	testCases := []struct {
+		name     string
+		in       interface{}
+		expected []string
+	}{
+		{
+			name:     "nil",
+			in:       map[string]string(nil),
+			expected: nil,
+		},
+		{
+			name:     "empty",
+			in:       map[string]string{},
+			expected: nil,
+		},
+		{
+			name:     "simple",
+			in:       map[string]string{"a": "foo", "b": "bar"},
+			expected: []string{"a", "b"},
+		},
+		{
+			name:     "interface values",
+			in:       map[string]interface{}{"a": nil, "b": nil},
+			expected: []string{"a", "b"},
+		},
+	}
+
+	for _, tt := range testCases {
+		t.Run(tt.name, func(t *testing.T) {
+			got := SortedStringKeys(tt.in)
+			if g, w := got, tt.expected; !reflect.DeepEqual(g, w) {
+				t.Errorf("wanted %q, got %q", w, g)
+			}
+		})
+	}
+}
+
+func TestSortedStringValues(t *testing.T) {
+	testCases := []struct {
+		name     string
+		in       interface{}
+		expected []string
+	}{
+		{
+			name:     "nil",
+			in:       map[string]string(nil),
+			expected: nil,
+		},
+		{
+			name:     "empty",
+			in:       map[string]string{},
+			expected: nil,
+		},
+		{
+			name:     "simple",
+			in:       map[string]string{"foo": "a", "bar": "b"},
+			expected: []string{"a", "b"},
+		},
+		{
+			name:     "duplicates",
+			in:       map[string]string{"foo": "a", "bar": "b", "baz": "b"},
+			expected: []string{"a", "b", "b"},
+		},
+	}
+
+	for _, tt := range testCases {
+		t.Run(tt.name, func(t *testing.T) {
+			got := SortedStringValues(tt.in)
+			if g, w := got, tt.expected; !reflect.DeepEqual(g, w) {
+				t.Errorf("wanted %q, got %q", w, g)
+			}
+		})
+	}
+}
+
+func TestSortedUniqueStringValues(t *testing.T) {
+	testCases := []struct {
+		name     string
+		in       interface{}
+		expected []string
+	}{
+		{
+			name:     "nil",
+			in:       map[string]string(nil),
+			expected: nil,
+		},
+		{
+			name:     "empty",
+			in:       map[string]string{},
+			expected: nil,
+		},
+		{
+			name:     "simple",
+			in:       map[string]string{"foo": "a", "bar": "b"},
+			expected: []string{"a", "b"},
+		},
+		{
+			name:     "duplicates",
+			in:       map[string]string{"foo": "a", "bar": "b", "baz": "b"},
+			expected: []string{"a", "b"},
+		},
+	}
+
+	for _, tt := range testCases {
+		t.Run(tt.name, func(t *testing.T) {
+			got := SortedUniqueStringValues(tt.in)
+			if g, w := got, tt.expected; !reflect.DeepEqual(g, w) {
+				t.Errorf("wanted %q, got %q", w, g)
+			}
+		})
+	}
+}
diff --git a/android/variable.go b/android/variable.go
index 59ae919..077b810 100644
--- a/android/variable.go
+++ b/android/variable.go
@@ -191,6 +191,7 @@
 	Platform_sdk_version_or_codename          *string  `json:",omitempty"`
 	Platform_sdk_final                        *bool    `json:",omitempty"`
 	Platform_sdk_extension_version            *int     `json:",omitempty"`
+	Platform_base_sdk_extension_version       *int     `json:",omitempty"`
 	Platform_version_active_codenames         []string `json:",omitempty"`
 	Platform_vndk_version                     *string  `json:",omitempty"`
 	Platform_systemsdk_versions               []string `json:",omitempty"`
@@ -200,6 +201,7 @@
 	Platform_base_os                          *string  `json:",omitempty"`
 
 	DeviceName                            *string  `json:",omitempty"`
+	DeviceProduct                         *string  `json:",omitempty"`
 	DeviceArch                            *string  `json:",omitempty"`
 	DeviceArchVariant                     *string  `json:",omitempty"`
 	DeviceCpuVariant                      *string  `json:",omitempty"`
@@ -305,10 +307,11 @@
 	JavaCoveragePaths        []string `json:",omitempty"`
 	JavaCoverageExcludePaths []string `json:",omitempty"`
 
-	GcovCoverage               *bool    `json:",omitempty"`
-	ClangCoverage              *bool    `json:",omitempty"`
-	NativeCoveragePaths        []string `json:",omitempty"`
-	NativeCoverageExcludePaths []string `json:",omitempty"`
+	GcovCoverage                *bool    `json:",omitempty"`
+	ClangCoverage               *bool    `json:",omitempty"`
+	NativeCoveragePaths         []string `json:",omitempty"`
+	NativeCoverageExcludePaths  []string `json:",omitempty"`
+	ClangCoverageContinuousMode *bool    `json:",omitempty"`
 
 	// Set by NewConfig
 	Native_coverage *bool `json:",omitempty"`
@@ -386,6 +389,8 @@
 	CertificateOverrides         []string `json:",omitempty"`
 	PackageNameOverrides         []string `json:",omitempty"`
 
+	ApexGlobalMinSdkVersionOverride *string `json:",omitempty"`
+
 	EnforceSystemCertificate          *bool    `json:",omitempty"`
 	EnforceSystemCertificateAllowList []string `json:",omitempty"`
 
@@ -420,9 +425,10 @@
 
 	ShippingApiLevel *string `json:",omitempty"`
 
-	BuildBrokenEnforceSyspropOwner     bool `json:",omitempty"`
-	BuildBrokenTrebleSyspropNeverallow bool `json:",omitempty"`
-	BuildBrokenVendorPropertyNamespace bool `json:",omitempty"`
+	BuildBrokenEnforceSyspropOwner     bool     `json:",omitempty"`
+	BuildBrokenTrebleSyspropNeverallow bool     `json:",omitempty"`
+	BuildBrokenVendorPropertyNamespace bool     `json:",omitempty"`
+	BuildBrokenInputDirModules         []string `json:",omitempty"`
 
 	BuildDebugfsRestrictionsEnabled bool `json:",omitempty"`
 
@@ -464,6 +470,7 @@
 		HostArch:                   stringPtr("x86_64"),
 		HostSecondaryArch:          stringPtr("x86"),
 		DeviceName:                 stringPtr("generic_arm64"),
+		DeviceProduct:              stringPtr("aosp_arm-eng"),
 		DeviceArch:                 stringPtr("arm64"),
 		DeviceArchVariant:          stringPtr("armv8-a"),
 		DeviceCpuVariant:           stringPtr("generic"),
diff --git a/androidmk/androidmk/android.go b/androidmk/androidmk/android.go
index 295b0e5..954f8d0 100644
--- a/androidmk/androidmk/android.go
+++ b/androidmk/androidmk/android.go
@@ -17,6 +17,7 @@
 import (
 	"fmt"
 	"sort"
+	"strconv"
 	"strings"
 
 	mkparser "android/soong/androidmk/parser"
@@ -623,6 +624,16 @@
 	return err
 }
 
+// Assigns a given boolean value to a given variable in the result bp file. See
+// setVariable documentation for more information about prefix and name.
+func makeBlueprintBoolAssignment(ctx variableAssignmentContext, prefix, name string, value bool) error {
+	expressionValue, err := stringToBoolValue(strconv.FormatBool(value))
+	if err == nil {
+		err = setVariable(ctx.file, false, prefix, name, expressionValue, true)
+	}
+	return err
+}
+
 // If variable is a literal variable name, return the name, otherwise return ""
 func varLiteralName(variable mkparser.Variable) string {
 	if len(variable.Name.Variables) == 0 {
@@ -647,7 +658,11 @@
 	varname := ""
 	fixed := ""
 	val := ctx.mkvalue
+
 	if len(val.Variables) == 1 && varLiteralName(val.Variables[0]) != "" && len(val.Strings) == 2 && val.Strings[0] == "" {
+		if varLiteralName(val.Variables[0]) == "PRODUCT_OUT" && val.Strings[1] == "/system/priv-app" {
+			return makeBlueprintBoolAssignment(ctx, "", "privileged", true)
+		}
 		fixed = val.Strings[1]
 		varname = val.Variables[0].Name.Strings[0]
 		// TARGET_OUT_OPTIONAL_EXECUTABLES puts the artifact in xbin, which is
diff --git a/androidmk/androidmk/androidmk.go b/androidmk/androidmk/androidmk.go
index b8316a3..aaafdc7 100644
--- a/androidmk/androidmk/androidmk.go
+++ b/androidmk/androidmk/androidmk.go
@@ -411,6 +411,24 @@
 	return exp, nil
 }
 
+// If local is set to true, then the variable will be added as a part of the
+// variable at file.bpPos. For example, if file.bpPos references a module,
+// then calling this method will set a property on that module if local is set
+// to true. Otherwise, the Variable will be created at the root of the file.
+//
+// prefix should be populated with the top level value to be assigned, and
+// name with a sub-value. If prefix is empty, then name is the top level value.
+// For example, if prefix is "foo" and name is "bar" with a value of "baz", then
+// the following variable will be generated:
+//
+// foo {
+//   bar: "baz"
+// }
+//
+// If prefix is the empty string and name is "foo" with a value of "bar", the
+// following variable will be generated (if it is a property):
+//
+// foo: "bar"
 func setVariable(file *bpFile, plusequals bool, prefix, name string, value bpparser.Expression, local bool) error {
 	if prefix != "" {
 		name = prefix + "." + name
diff --git a/androidmk/androidmk/androidmk_test.go b/androidmk/androidmk/androidmk_test.go
index e8b6f78..afde68b 100644
--- a/androidmk/androidmk/androidmk_test.go
+++ b/androidmk/androidmk/androidmk_test.go
@@ -1675,6 +1675,36 @@
 }
 `,
 	},
+	{
+		desc: "privileged app",
+		in: `
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo
+LOCAL_MODULE_PATH := $(PRODUCT_OUT)/system/priv-app
+include $(BUILD_PACKAGE)
+		`,
+		expected: `
+android_app {
+	name: "foo",
+	privileged: true
+}
+`,
+	},
+	{
+		desc: "convert android_app to android_test when having test_suites",
+		in: `
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo
+LOCAL_COMPATIBILITY_SUITE := bar
+include $(BUILD_PACKAGE)
+		`,
+		expected: `
+android_test {
+	name: "foo",
+	test_suites: ["bar"],
+}
+`,
+	},
 }
 
 func TestEndToEnd(t *testing.T) {
diff --git a/androidmk/parser/make_strings.go b/androidmk/parser/make_strings.go
index aac4c4e..8030326 100644
--- a/androidmk/parser/make_strings.go
+++ b/androidmk/parser/make_strings.go
@@ -24,14 +24,24 @@
 // A MakeString is a string that may contain variable substitutions in it.
 // It can be considered as an alternating list of raw Strings and variable
 // substitutions, where the first and last entries in the list must be raw
-// Strings (possibly empty).  A MakeString that starts with a variable
-// will have an empty first raw string, and a MakeString that ends with a
-// variable will have an empty last raw string.  Two sequential Variables
-// will have an empty raw string between them.
+// Strings (possibly empty). The entirety of the text before the first variable,
+// between two variables, and after the last variable will be considered a
+// single String value. A MakeString that starts with a variable will have an
+// empty first raw string, and a MakeString that ends with a  variable will have
+// an empty last raw string.  Two sequential Variables will have an empty raw
+// string between them.
 //
 // The MakeString is stored as two lists, a list of raw Strings and a list
 // of Variables.  The raw string list is always one longer than the variable
 // list.
+//
+// For example, "$(FOO)/bar/baz" will be represented as the
+// following lists:
+//
+// {
+//   Strings: ["", "/bar/baz"],
+//   Variables: ["FOO"]
+// }
 type MakeString struct {
 	StringPos Pos
 	Strings   []string
diff --git a/androidmk/parser/parser.go b/androidmk/parser/parser.go
index d24efc1..fb6be38 100644
--- a/androidmk/parser/parser.go
+++ b/androidmk/parser/parser.go
@@ -222,7 +222,7 @@
 			if d == "ifdef" || d == "ifndef" || d == "ifeq" || d == "ifneq" {
 				d = "el" + d
 				p.ignoreSpaces()
-				expression = p.parseExpression()
+				expression = p.parseExpression('#')
 				expression.TrimRightSpaces()
 			} else {
 				p.errorf("expected ifdef/ifndef/ifeq/ifneq, found %s", d)
@@ -232,7 +232,7 @@
 		expression, endPos = p.parseDefine()
 	default:
 		p.ignoreSpaces()
-		expression = p.parseExpression()
+		expression = p.parseExpression('#')
 	}
 
 	p.nodes = append(p.nodes, &Directive{
@@ -338,9 +338,6 @@
 				value.appendString(`\` + string(p.tok))
 			}
 			p.accept(p.tok)
-		case '#':
-			p.parseComment()
-			break loop
 		case '$':
 			var variable Variable
 			variable = p.parseVariable()
@@ -522,7 +519,7 @@
 	// non-whitespace character after the = until the end of the logical line,
 	// which may included escaped newlines
 	p.accept('=')
-	value := p.parseExpression()
+	value := p.parseExpression('#')
 	value.TrimLeftSpaces()
 	if ident.EndsWith('+') && t == "=" {
 		ident.TrimRightOne()
diff --git a/androidmk/parser/parser_test.go b/androidmk/parser/parser_test.go
index f562c29..9efebf8 100644
--- a/androidmk/parser/parser_test.go
+++ b/androidmk/parser/parser_test.go
@@ -34,6 +34,56 @@
 			},
 		},
 	},
+	{
+		name: "Simple warning",
+		in:   `$(warning A warning)`,
+		out: []Node{
+			&Variable{
+				Name: SimpleMakeString("warning A warning", NoPos),
+			},
+		},
+	},
+	{
+		name: "Warning with #",
+		in:   `$(warning # A warning)`,
+		out: []Node{
+			&Variable{
+				Name: SimpleMakeString("warning # A warning", NoPos),
+			},
+		},
+	},
+	{
+		name: "Findstring with #",
+		in:   `$(findstring x,x a #)`,
+		out: []Node{
+			&Variable{
+				Name: SimpleMakeString("findstring x,x a #", NoPos),
+			},
+		},
+	},
+	{
+		name: "If statement",
+		in: `ifeq (a,b) # comment
+endif`,
+		out: []Node{
+			&Directive{
+				NamePos: NoPos,
+				Name:    "ifeq",
+				Args:    SimpleMakeString("(a,b) ", NoPos),
+				EndPos:  NoPos,
+			},
+			&Comment{
+				CommentPos: NoPos,
+				Comment:    " comment",
+			},
+			&Directive{
+				NamePos: NoPos,
+				Name:    "endif",
+				Args:    SimpleMakeString("", NoPos),
+				EndPos:  NoPos,
+			},
+		},
+	},
 }
 
 func TestParse(t *testing.T) {
diff --git a/apex/Android.bp b/apex/Android.bp
index b9b5428..41224ec 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -14,6 +14,7 @@
         "soong-cc",
         "soong-filesystem",
         "soong-java",
+        "soong-provenance",
         "soong-python",
         "soong-rust",
         "soong-sh",
diff --git a/apex/androidmk.go b/apex/androidmk.go
index 8cca137..e094a12 100644
--- a/apex/androidmk.go
+++ b/apex/androidmk.go
@@ -309,7 +309,14 @@
 	return moduleNames
 }
 
-func (a *apexBundle) writeRequiredModules(w io.Writer) {
+func (a *apexBundle) writeRequiredModules(w io.Writer, moduleNames []string) {
+	if len(moduleNames) > 0 {
+		fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES +=", strings.Join(moduleNames, " "))
+	}
+	if len(a.requiredDeps) > 0 {
+		fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES +=", strings.Join(a.requiredDeps, " "))
+	}
+
 	var required []string
 	var targetRequired []string
 	var hostRequired []string
@@ -349,10 +356,7 @@
 				fmt.Fprintln(w, "LOCAL_PATH :=", moduleDir)
 				fmt.Fprintln(w, "LOCAL_MODULE :=", name+a.suffix)
 				data.Entries.WriteLicenseVariables(w)
-				if len(moduleNames) > 0 {
-					fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES :=", strings.Join(moduleNames, " "))
-				}
-				a.writeRequiredModules(w)
+				a.writeRequiredModules(w, moduleNames)
 				fmt.Fprintln(w, "include $(BUILD_PHONY_PACKAGE)")
 
 			} else {
@@ -369,8 +373,10 @@
 				}
 				fmt.Fprintln(w, "LOCAL_MODULE_STEM :=", name+stemSuffix)
 				fmt.Fprintln(w, "LOCAL_UNINSTALLABLE_MODULE :=", !a.installable())
-				fmt.Fprintln(w, "LOCAL_SOONG_INSTALLED_MODULE :=", a.installedFile.String())
-				fmt.Fprintln(w, "LOCAL_SOONG_INSTALL_PAIRS :=", a.outputFile.String()+":"+a.installedFile.String())
+				if a.installable() {
+					fmt.Fprintln(w, "LOCAL_SOONG_INSTALLED_MODULE :=", a.installedFile.String())
+					fmt.Fprintln(w, "LOCAL_SOONG_INSTALL_PAIRS :=", a.outputFile.String()+":"+a.installedFile.String())
+				}
 
 				// Because apex writes .mk with Custom(), we need to write manually some common properties
 				// which are available via data.Entries
@@ -388,17 +394,7 @@
 				if len(a.overridableProperties.Overrides) > 0 {
 					fmt.Fprintln(w, "LOCAL_OVERRIDES_MODULES :=", strings.Join(a.overridableProperties.Overrides, " "))
 				}
-				if len(moduleNames) > 0 {
-					fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES +=", strings.Join(moduleNames, " "))
-				}
-				if len(a.requiredDeps) > 0 {
-					fmt.Fprintln(w, "LOCAL_REQUIRED_MODULES +=", strings.Join(a.requiredDeps, " "))
-				}
-				a.writeRequiredModules(w)
-
-				if a.mergedNotices.Merged.Valid() {
-					fmt.Fprintln(w, "LOCAL_NOTICE_FILE :=", a.mergedNotices.Merged.Path().String())
-				}
+				a.writeRequiredModules(w, moduleNames)
 
 				fmt.Fprintln(w, "include $(BUILD_PREBUILT)")
 
diff --git a/apex/apex.go b/apex/apex.go
index d12a786..76af1b8 100644
--- a/apex/apex.go
+++ b/apex/apex.go
@@ -76,6 +76,8 @@
 	ctx.BottomUp("apex", apexMutator).Parallel()
 	ctx.BottomUp("apex_directly_in_any", apexDirectlyInAnyMutator).Parallel()
 	ctx.BottomUp("apex_flattened", apexFlattenedMutator).Parallel()
+	// Register after apex_info mutator so that it can use ApexVariationName
+	ctx.TopDown("apex_strict_updatability_lint", apexStrictUpdatibilityLintMutator).Parallel()
 }
 
 type apexBundleProperties struct {
@@ -108,15 +110,6 @@
 
 	Multilib apexMultilibProperties
 
-	// List of bootclasspath fragments that are embedded inside this APEX bundle.
-	Bootclasspath_fragments []string
-
-	// List of systemserverclasspath fragments that are embedded inside this APEX bundle.
-	Systemserverclasspath_fragments []string
-
-	// List of java libraries that are embedded inside this APEX bundle.
-	Java_libs []string
-
 	// List of sh binaries that are embedded inside this APEX bundle.
 	Sh_binaries []string
 
@@ -316,6 +309,15 @@
 	// List of BPF programs inside this APEX bundle.
 	Bpfs []string
 
+	// List of bootclasspath fragments that are embedded inside this APEX bundle.
+	Bootclasspath_fragments []string
+
+	// List of systemserverclasspath fragments that are embedded inside this APEX bundle.
+	Systemserverclasspath_fragments []string
+
+	// List of java libraries that are embedded inside this APEX bundle.
+	Java_libs []string
+
 	// Names of modules to be overridden. Listed modules can only be other binaries (in Make or
 	// Soong). This does not completely prevent installation of the overridden binaries, but if
 	// both binaries would be installed by default (in PRODUCT_PACKAGES) the other binary will
@@ -412,12 +414,16 @@
 	// Processed file_contexts files
 	fileContexts android.WritablePath
 
-	// Struct holding the merged notice file paths in different formats
-	mergedNotices android.NoticeOutputs
+	// Path to notice file in html.gz format.
+	htmlGzNotice android.WritablePath
 
 	// The built APEX file. This is the main product.
+	// Could be .apex or .capex
 	outputFile android.WritablePath
 
+	// The built uncompressed .apex file.
+	outputApexFile android.WritablePath
+
 	// The built APEX file in app bundle format. This file is not directly installed to the
 	// device. For an APEX, multiple app bundles are created each of which is for a specific ABI
 	// like arm, arm64, x86, etc. Then they are processed again (outside of the Android build
@@ -481,11 +487,10 @@
 // for each of the files in case when the APEX is flattened.
 type apexFile struct {
 	// buildFile is put in the installDir inside the APEX.
-	builtFile   android.Path
-	noticeFiles android.Paths
-	installDir  string
-	customStem  string
-	symlinks    []string // additional symlinks
+	builtFile  android.Path
+	installDir string
+	customStem string
+	symlinks   []string // additional symlinks
 
 	// Info for Android.mk Module name of `module` in AndroidMk. Note the generated AndroidMk
 	// module for apexFile is named something like <AndroidMk module name>.<apex name>[<apex
@@ -522,7 +527,6 @@
 		module:              module,
 	}
 	if module != nil {
-		ret.noticeFiles = module.NoticeFiles()
 		ret.moduleDir = ctx.OtherModuleDir(module)
 		ret.requiredModuleNames = module.RequiredModuleNames()
 		ret.targetRequiredModuleNames = module.TargetRequiredModuleNames()
@@ -783,9 +787,6 @@
 
 	// Common-arch dependencies come next
 	commonVariation := ctx.Config().AndroidCommonTarget.Variations()
-	ctx.AddFarVariationDependencies(commonVariation, bcpfTag, a.properties.Bootclasspath_fragments...)
-	ctx.AddFarVariationDependencies(commonVariation, sscpfTag, a.properties.Systemserverclasspath_fragments...)
-	ctx.AddFarVariationDependencies(commonVariation, javaLibTag, a.properties.Java_libs...)
 	ctx.AddFarVariationDependencies(commonVariation, fsTag, a.properties.Filesystems...)
 	ctx.AddFarVariationDependencies(commonVariation, compatConfigTag, a.properties.Compat_configs...)
 
@@ -813,6 +814,9 @@
 	ctx.AddFarVariationDependencies(commonVariation, androidAppTag, a.overridableProperties.Apps...)
 	ctx.AddFarVariationDependencies(commonVariation, bpfTag, a.overridableProperties.Bpfs...)
 	ctx.AddFarVariationDependencies(commonVariation, rroTag, a.overridableProperties.Rros...)
+	ctx.AddFarVariationDependencies(commonVariation, bcpfTag, a.overridableProperties.Bootclasspath_fragments...)
+	ctx.AddFarVariationDependencies(commonVariation, sscpfTag, a.overridableProperties.Systemserverclasspath_fragments...)
+	ctx.AddFarVariationDependencies(commonVariation, javaLibTag, a.overridableProperties.Java_libs...)
 	if prebuilts := a.overridableProperties.Prebuilts; len(prebuilts) > 0 {
 		// For prebuilt_etc, use the first variant (64 on 64/32bit device, 32 on 32bit device)
 		// regardless of the TARGET_PREFER_* setting. See b/144532908
@@ -1001,6 +1005,66 @@
 	}
 }
 
+// apexStrictUpdatibilityLintMutator propagates strict_updatability_linting to transitive deps of a mainline module
+// This check is enforced for updatable modules
+func apexStrictUpdatibilityLintMutator(mctx android.TopDownMutatorContext) {
+	if !mctx.Module().Enabled() {
+		return
+	}
+	if apex, ok := mctx.Module().(*apexBundle); ok && apex.checkStrictUpdatabilityLinting() {
+		mctx.WalkDeps(func(child, parent android.Module) bool {
+			// b/208656169 Do not propagate strict updatability linting to libcore/
+			// These libs are available on the classpath during compilation
+			// These libs are transitive deps of the sdk. See java/sdk.go:decodeSdkDep
+			// Only skip libraries defined in libcore root, not subdirectories
+			if mctx.OtherModuleDir(child) == "libcore" {
+				// Do not traverse transitive deps of libcore/ libs
+				return false
+			}
+			if android.InList(child.Name(), skipLintJavalibAllowlist) {
+				return false
+			}
+			if lintable, ok := child.(java.LintDepSetsIntf); ok {
+				lintable.SetStrictUpdatabilityLinting(true)
+			}
+			// visit transitive deps
+			return true
+		})
+	}
+}
+
+// TODO: b/215736885 Whittle the denylist
+// Transitive deps of certain mainline modules baseline NewApi errors
+// Skip these mainline modules for now
+var (
+	skipStrictUpdatabilityLintAllowlist = []string{
+		"com.android.art",
+		"com.android.art.debug",
+		"com.android.conscrypt",
+		"com.android.media",
+		// test apexes
+		"test_com.android.art",
+		"test_com.android.conscrypt",
+		"test_com.android.media",
+		"test_jitzygote_com.android.art",
+	}
+
+	// TODO: b/215736885 Remove this list
+	skipLintJavalibAllowlist = []string{
+		"conscrypt.module.platform.api.stubs",
+		"conscrypt.module.public.api.stubs",
+		"conscrypt.module.public.api.stubs.system",
+		"conscrypt.module.public.api.stubs.module_lib",
+		"framework-media.stubs",
+		"framework-media.stubs.system",
+		"framework-media.stubs.module_lib",
+	}
+)
+
+func (a *apexBundle) checkStrictUpdatabilityLinting() bool {
+	return a.Updatable() && !android.InList(a.ApexVariationName(), skipStrictUpdatabilityLintAllowlist)
+}
+
 // apexUniqueVariationsMutator checks if any dependencies use unique apex variations. If so, use
 // unique apex variations for this module. See android/apex.go for more about unique apex variant.
 // TODO(jiyong): move this to android/apex.go?
@@ -1284,6 +1348,12 @@
 	case "", android.DefaultDistTag:
 		// This is the default dist path.
 		return android.Paths{a.outputFile}, nil
+	case imageApexSuffix:
+		// uncompressed one
+		if a.outputApexFile != nil {
+			return android.Paths{a.outputApexFile}, nil
+		}
+		fallthrough
 	default:
 		return nil, fmt.Errorf("unsupported module reference tag %q", tag)
 	}
@@ -1405,7 +1475,7 @@
 		for _, target := range ctx.MultiTargets() {
 			if target.Arch.ArchType.Multilib == "lib64" {
 				addDependenciesForNativeModules(ctx, ApexNativeDependencies{
-					Native_shared_libs: []string{"libclang_rt.hwasan-aarch64-android"},
+					Native_shared_libs: []string{"libclang_rt.hwasan"},
 					Tests:              nil,
 					Jni_libs:           nil,
 					Binaries:           nil,
@@ -1586,13 +1656,20 @@
 var _ androidApp = (*java.AndroidApp)(nil)
 var _ androidApp = (*java.AndroidAppImport)(nil)
 
+const APEX_VERSION_PLACEHOLDER = "__APEX_VERSION_PLACEHOLDER__"
+
 func apexFileForAndroidApp(ctx android.BaseModuleContext, aapp androidApp) apexFile {
 	appDir := "app"
 	if aapp.Privileged() {
 		appDir = "priv-app"
 	}
-	dirInApex := filepath.Join(appDir, aapp.InstallApkName())
+
+	// TODO(b/224589412, b/226559955): Ensure that the subdirname is suffixed
+	// so that PackageManager correctly invalidates the existing installed apk
+	// in favour of the new APK-in-APEX.  See bugs for more information.
+	dirInApex := filepath.Join(appDir, aapp.InstallApkName()+"@"+APEX_VERSION_PLACEHOLDER)
 	fileToCopy := aapp.OutputFile()
+
 	af := newApexFile(ctx, fileToCopy, aapp.BaseModuleName(), dirInApex, app, aapp)
 	af.jacocoReportClassesFile = aapp.JacocoReportClassesFile()
 	af.lintDepSets = aapp.LintDepSets()
@@ -1745,6 +1822,7 @@
 					fi := apexFileForRustLibrary(ctx, r)
 					fi.isJniLib = isJniLib
 					filesInfo = append(filesInfo, fi)
+					return true // track transitive dependencies
 				} else {
 					propertyName := "native_shared_libs"
 					if isJniLib {
@@ -1824,8 +1902,12 @@
 					if ap.Privileged() {
 						appDir = "priv-app"
 					}
-					af := newApexFile(ctx, ap.OutputFile(), ap.BaseModuleName(),
-						filepath.Join(appDir, ap.BaseModuleName()), appSet, ap)
+					// TODO(b/224589412, b/226559955): Ensure that the dirname is
+					// suffixed so that PackageManager correctly invalidates the
+					// existing installed apk in favour of the new APK-in-APEX.
+					// See bugs for more information.
+					appDirName := filepath.Join(appDir, ap.BaseModuleName()+"@"+APEX_VERSION_PLACEHOLDER)
+					af := newApexFile(ctx, ap.OutputFile(), ap.BaseModuleName(), appDirName, appSet, ap)
 					af.certificate = java.PresignedCertificate
 					filesInfo = append(filesInfo, af)
 				} else {
@@ -2379,20 +2461,43 @@
 	android.CheckMinSdkVersion(ctx, minSdkVersion, a.WalkPayloadDeps)
 }
 
+// Returns apex's min_sdk_version string value, honoring overrides
+func (a *apexBundle) minSdkVersionValue(ctx android.EarlyModuleContext) string {
+	// Only override the minSdkVersion value on Apexes which already specify
+	// a min_sdk_version (it's optional for non-updatable apexes), and that its
+	// min_sdk_version value is lower than the one to override with.
+	overrideMinSdkValue := ctx.DeviceConfig().ApexGlobalMinSdkVersionOverride()
+	overrideApiLevel := minSdkVersionFromValue(ctx, overrideMinSdkValue)
+	originalMinApiLevel := minSdkVersionFromValue(ctx, proptools.String(a.properties.Min_sdk_version))
+	isMinSdkSet := a.properties.Min_sdk_version != nil
+	isOverrideValueHigher := overrideApiLevel.CompareTo(originalMinApiLevel) > 0
+	if overrideMinSdkValue != "" && isMinSdkSet && isOverrideValueHigher {
+		return overrideMinSdkValue
+	}
+
+	return proptools.String(a.properties.Min_sdk_version)
+}
+
+// Returns apex's min_sdk_version SdkSpec, honoring overrides
 func (a *apexBundle) MinSdkVersion(ctx android.EarlyModuleContext) android.SdkSpec {
 	return android.SdkSpec{
 		Kind:     android.SdkNone,
 		ApiLevel: a.minSdkVersion(ctx),
-		Raw:      String(a.properties.Min_sdk_version),
+		Raw:      a.minSdkVersionValue(ctx),
 	}
 }
 
+// Returns apex's min_sdk_version ApiLevel, honoring overrides
 func (a *apexBundle) minSdkVersion(ctx android.EarlyModuleContext) android.ApiLevel {
-	ver := proptools.String(a.properties.Min_sdk_version)
-	if ver == "" {
+	return minSdkVersionFromValue(ctx, a.minSdkVersionValue(ctx))
+}
+
+// Construct ApiLevel object from min_sdk_version string value
+func minSdkVersionFromValue(ctx android.EarlyModuleContext, value string) android.ApiLevel {
+	if value == "" {
 		return android.NoneApiLevel
 	}
-	apiLevel, err := android.ApiLevelFromUser(ctx, ver)
+	apiLevel, err := android.ApiLevelFromUser(ctx, value)
 	if err != nil {
 		ctx.PropertyErrorf("min_sdk_version", "%s", err.Error())
 		return android.NoneApiLevel
@@ -2447,7 +2552,7 @@
 // checkUpdatable enforces APEX and its transitive dep properties to have desired values for updatable APEXes.
 func (a *apexBundle) checkUpdatable(ctx android.ModuleContext) {
 	if a.Updatable() {
-		if String(a.properties.Min_sdk_version) == "" {
+		if a.minSdkVersionValue(ctx) == "" {
 			ctx.PropertyErrorf("updatable", "updatable APEXes should set min_sdk_version as well")
 		}
 		if a.UsePlatformApis() {
@@ -2584,9 +2689,9 @@
 
 // Collect information for opening IDE project files in java/jdeps.go.
 func (a *apexBundle) IDEInfo(dpInfo *android.IdeInfo) {
-	dpInfo.Deps = append(dpInfo.Deps, a.properties.Java_libs...)
-	dpInfo.Deps = append(dpInfo.Deps, a.properties.Bootclasspath_fragments...)
-	dpInfo.Deps = append(dpInfo.Deps, a.properties.Systemserverclasspath_fragments...)
+	dpInfo.Deps = append(dpInfo.Deps, a.overridableProperties.Java_libs...)
+	dpInfo.Deps = append(dpInfo.Deps, a.overridableProperties.Bootclasspath_fragments...)
+	dpInfo.Deps = append(dpInfo.Deps, a.overridableProperties.Systemserverclasspath_fragments...)
 	dpInfo.Paths = append(dpInfo.Paths, a.modulePaths...)
 }
 
@@ -3207,21 +3312,19 @@
 }
 
 func init() {
-	android.AddNeverAllowRules(createApexPermittedPackagesRules(qModulesPackages())...)
-	android.AddNeverAllowRules(createApexPermittedPackagesRules(rModulesPackages())...)
+	android.AddNeverAllowRules(createBcpPermittedPackagesRules(qBcpPackages())...)
+	android.AddNeverAllowRules(createBcpPermittedPackagesRules(rBcpPackages())...)
 }
 
-func createApexPermittedPackagesRules(modules_packages map[string][]string) []android.Rule {
-	rules := make([]android.Rule, 0, len(modules_packages))
-	for module_name, module_packages := range modules_packages {
+func createBcpPermittedPackagesRules(bcpPermittedPackages map[string][]string) []android.Rule {
+	rules := make([]android.Rule, 0, len(bcpPermittedPackages))
+	for jar, permittedPackages := range bcpPermittedPackages {
 		permittedPackagesRule := android.NeverAllow().
-			BootclasspathJar().
-			With("apex_available", module_name).
-			WithMatcher("permitted_packages", android.NotInList(module_packages)).
-			WithMatcher("min_sdk_version", android.LessThanSdkVersion("Tiramisu")).
-			Because("jars that are part of the " + module_name +
-				" module may only use these package prefixes: " + strings.Join(module_packages, ",") +
-				" with min_sdk < T. Please consider the following alternatives:\n" +
+			With("name", jar).
+			WithMatcher("permitted_packages", android.NotInList(permittedPackages)).
+			Because(jar +
+				" bootjar may only use these package prefixes: " + strings.Join(permittedPackages, ",") +
+				". Please consider the following alternatives:\n" +
 				"    1. If the offending code is from a statically linked library, consider " +
 				"removing that dependency and using an alternative already in the " +
 				"bootclasspath, or perhaps a shared library." +
@@ -3229,55 +3332,56 @@
 				"    3. Jarjar the offending code. Please be mindful of the potential system " +
 				"health implications of bundling that code, particularly if the offending jar " +
 				"is part of the bootclasspath.")
+
 		rules = append(rules, permittedPackagesRule)
 	}
 	return rules
 }
 
-// DO NOT EDIT! These are the package prefixes that are exempted from being AOT'ed by ART on Q/R/S.
+// DO NOT EDIT! These are the package prefixes that are exempted from being AOT'ed by ART.
 // Adding code to the bootclasspath in new packages will cause issues on module update.
-func qModulesPackages() map[string][]string {
+func qBcpPackages() map[string][]string {
 	return map[string][]string{
-		"com.android.conscrypt": []string{
+		"conscrypt": []string{
 			"android.net.ssl",
 			"com.android.org.conscrypt",
 		},
-		"com.android.media": []string{
+		"updatable-media": []string{
 			"android.media",
 		},
 	}
 }
 
-// DO NOT EDIT! These are the package prefixes that are exempted from being AOT'ed by ART on R/S.
+// DO NOT EDIT! These are the package prefixes that are exempted from being AOT'ed by ART.
 // Adding code to the bootclasspath in new packages will cause issues on module update.
-func rModulesPackages() map[string][]string {
+func rBcpPackages() map[string][]string {
 	return map[string][]string{
-		"com.android.mediaprovider": []string{
+		"framework-mediaprovider": []string{
 			"android.provider",
 		},
-		"com.android.permission": []string{
+		"framework-permission": []string{
 			"android.permission",
 			"android.app.role",
 			"com.android.permission",
 			"com.android.role",
 		},
-		"com.android.sdkext": []string{
+		"framework-sdkextensions": []string{
 			"android.os.ext",
 		},
-		"com.android.os.statsd": []string{
+		"framework-statsd": []string{
 			"android.app",
 			"android.os",
 			"android.util",
 			"com.android.internal.statsd",
 			"com.android.server.stats",
 		},
-		"com.android.wifi": []string{
+		"framework-wifi": []string{
 			"com.android.server.wifi",
 			"com.android.wifi.x",
 			"android.hardware.wifi",
 			"android.net.wifi",
 		},
-		"com.android.tethering": []string{
+		"framework-tethering": []string{
 			"android.net",
 		},
 	}
@@ -3328,6 +3432,8 @@
 		fileContextsLabelAttribute.SetValue(android.BazelLabelForModuleDepSingle(ctx, *a.properties.File_contexts))
 	}
 
+	// TODO(b/219503907) this would need to be set to a.MinSdkVersionValue(ctx) but
+	// given it's coming via config, we probably don't want to put it in here.
 	var minSdkVersion *string
 	if a.properties.Min_sdk_version != nil {
 		minSdkVersion = a.properties.Min_sdk_version
diff --git a/apex/apex_test.go b/apex/apex_test.go
index 6d77b06..4a52115 100644
--- a/apex/apex_test.go
+++ b/apex/apex_test.go
@@ -113,6 +113,12 @@
 	})
 }
 
+func withApexGlobalMinSdkVersionOverride(minSdkOverride *string) android.FixturePreparer {
+	return android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+		variables.ApexGlobalMinSdkVersionOverride = minSdkOverride
+	})
+}
+
 var withBinder32bit = android.FixtureModifyProductVariables(
 	func(variables android.FixtureProductVariables) {
 		variables.Binder32bit = proptools.BoolPtr(true)
@@ -168,44 +174,42 @@
 		"system/sepolicy/apex/otherapex-file_contexts":                nil,
 		"system/sepolicy/apex/com.android.vndk-file_contexts":         nil,
 		"system/sepolicy/apex/com.android.vndk.current-file_contexts": nil,
-		"mylib.cpp":                                  nil,
-		"mytest.cpp":                                 nil,
-		"mytest1.cpp":                                nil,
-		"mytest2.cpp":                                nil,
-		"mytest3.cpp":                                nil,
-		"myprebuilt":                                 nil,
-		"my_include":                                 nil,
-		"foo/bar/MyClass.java":                       nil,
-		"prebuilt.jar":                               nil,
-		"prebuilt.so":                                nil,
-		"vendor/foo/devkeys/test.x509.pem":           nil,
-		"vendor/foo/devkeys/test.pk8":                nil,
-		"testkey.x509.pem":                           nil,
-		"testkey.pk8":                                nil,
-		"testkey.override.x509.pem":                  nil,
-		"testkey.override.pk8":                       nil,
-		"vendor/foo/devkeys/testkey.avbpubkey":       nil,
-		"vendor/foo/devkeys/testkey.pem":             nil,
-		"NOTICE":                                     nil,
-		"custom_notice":                              nil,
-		"custom_notice_for_static_lib":               nil,
-		"testkey2.avbpubkey":                         nil,
-		"testkey2.pem":                               nil,
-		"myapex-arm64.apex":                          nil,
-		"myapex-arm.apex":                            nil,
-		"myapex.apks":                                nil,
-		"frameworks/base/api/current.txt":            nil,
-		"framework/aidl/a.aidl":                      nil,
-		"build/make/core/proguard.flags":             nil,
-		"build/make/core/proguard_basic_keeps.flags": nil,
-		"dummy.txt":                                  nil,
-		"baz":                                        nil,
-		"bar/baz":                                    nil,
-		"testdata/baz":                               nil,
-		"AppSet.apks":                                nil,
-		"foo.rs":                                     nil,
-		"libfoo.jar":                                 nil,
-		"libbar.jar":                                 nil,
+		"mylib.cpp":                            nil,
+		"mytest.cpp":                           nil,
+		"mytest1.cpp":                          nil,
+		"mytest2.cpp":                          nil,
+		"mytest3.cpp":                          nil,
+		"myprebuilt":                           nil,
+		"my_include":                           nil,
+		"foo/bar/MyClass.java":                 nil,
+		"prebuilt.jar":                         nil,
+		"prebuilt.so":                          nil,
+		"vendor/foo/devkeys/test.x509.pem":     nil,
+		"vendor/foo/devkeys/test.pk8":          nil,
+		"testkey.x509.pem":                     nil,
+		"testkey.pk8":                          nil,
+		"testkey.override.x509.pem":            nil,
+		"testkey.override.pk8":                 nil,
+		"vendor/foo/devkeys/testkey.avbpubkey": nil,
+		"vendor/foo/devkeys/testkey.pem":       nil,
+		"NOTICE":                               nil,
+		"custom_notice":                        nil,
+		"custom_notice_for_static_lib":         nil,
+		"testkey2.avbpubkey":                   nil,
+		"testkey2.pem":                         nil,
+		"myapex-arm64.apex":                    nil,
+		"myapex-arm.apex":                      nil,
+		"myapex.apks":                          nil,
+		"frameworks/base/api/current.txt":      nil,
+		"framework/aidl/a.aidl":                nil,
+		"dummy.txt":                            nil,
+		"baz":                                  nil,
+		"bar/baz":                              nil,
+		"testdata/baz":                         nil,
+		"AppSet.apks":                          nil,
+		"foo.rs":                               nil,
+		"libfoo.jar":                           nil,
+		"libbar.jar":                           nil,
 	},
 	),
 
@@ -591,15 +595,6 @@
 		t.Errorf("Could not find all expected symlinks! foo: %t, foo_link_64: %t. Command was %s", found_foo, found_foo_link_64, copyCmds)
 	}
 
-	mergeNoticesRule := ctx.ModuleForTests("myapex", "android_common_myapex_image").Rule("mergeNoticesRule")
-	noticeInputs := mergeNoticesRule.Inputs.Strings()
-	if len(noticeInputs) != 3 {
-		t.Errorf("number of input notice files: expected = 3, actual = %q", len(noticeInputs))
-	}
-	ensureListContains(t, noticeInputs, "NOTICE")
-	ensureListContains(t, noticeInputs, "custom_notice")
-	ensureListContains(t, noticeInputs, "custom_notice_for_static_lib")
-
 	fullDepsInfo := strings.Split(ctx.ModuleForTests("myapex", "android_common_myapex_image").Output("depsinfo/fulllist.txt").Args["content"], "\\n")
 	ensureListContains(t, fullDepsInfo, "  myjar(minSdkVersion:(no version)) <- myapex")
 	ensureListContains(t, fullDepsInfo, "  mylib2(minSdkVersion:(no version)) <- mylib")
@@ -687,7 +682,7 @@
 		"etc/myetc",
 		"javalib/myjar.jar",
 		"lib64/mylib.so",
-		"app/AppFoo/AppFoo.apk",
+		"app/AppFoo@__APEX_VERSION_PLACEHOLDER__/AppFoo.apk",
 		"overlay/blue/rro.apk",
 		"etc/bpf/bpf.o",
 		"etc/bpf/bpf2.o",
@@ -971,6 +966,9 @@
 	rustDeps := ctx.ModuleForTests("foo.rust", "android_arm64_armv8-a_apex10000").Rule("rustc").Args["linkFlags"]
 	ensureContains(t, rustDeps, "libfoo.shared_from_rust/android_arm64_armv8-a_shared_current/libfoo.shared_from_rust.so")
 	ensureNotContains(t, rustDeps, "libfoo.shared_from_rust/android_arm64_armv8-a_shared/libfoo.shared_from_rust.so")
+
+	apexManifestRule := ctx.ModuleForTests("myapex", "android_common_myapex_image").Rule("apexManifestRule")
+	ensureListContains(t, names(apexManifestRule.Args["requireNativeLibs"]), "libfoo.shared_from_rust.so")
 }
 
 func TestApexCanUsePrivateApis(t *testing.T) {
@@ -1038,10 +1036,10 @@
 
 	// Ensure that we are using non-stub variants of mylib2 and libfoo.shared_from_rust (because
 	// of the platform_apis: true)
-	mylibLdFlags := ctx.ModuleForTests("mylib", "android_arm64_armv8-a_shared_apex10000_private").Rule("ld").Args["libFlags"]
+	mylibLdFlags := ctx.ModuleForTests("mylib", "android_arm64_armv8-a_shared_apex10000").Rule("ld").Args["libFlags"]
 	ensureNotContains(t, mylibLdFlags, "mylib2/android_arm64_armv8-a_shared_current/mylib2.so")
 	ensureContains(t, mylibLdFlags, "mylib2/android_arm64_armv8-a_shared/mylib2.so")
-	rustDeps := ctx.ModuleForTests("foo.rust", "android_arm64_armv8-a_apex10000_private").Rule("rustc").Args["linkFlags"]
+	rustDeps := ctx.ModuleForTests("foo.rust", "android_arm64_armv8-a_apex10000").Rule("rustc").Args["linkFlags"]
 	ensureNotContains(t, rustDeps, "libfoo.shared_from_rust/android_arm64_armv8-a_shared_current/libfoo.shared_from_rust.so")
 	ensureContains(t, rustDeps, "libfoo.shared_from_rust/android_arm64_armv8-a_shared/libfoo.shared_from_rust.so")
 }
@@ -1412,13 +1410,14 @@
 		}
 
 		cc_prebuilt_library_shared {
-			name: "libclang_rt.hwasan-aarch64-android",
+			name: "libclang_rt.hwasan",
 			no_libcrt: true,
 			nocrt: true,
 			stl: "none",
 			system_shared_libs: [],
 			srcs: [""],
 			stubs: { versions: ["1"] },
+			stem: "libclang_rt.hwasan-aarch64-android",
 
 			sanitize: {
 				never: true,
@@ -1431,7 +1430,7 @@
 		"lib64/bionic/libclang_rt.hwasan-aarch64-android.so",
 	})
 
-	hwasan := ctx.ModuleForTests("libclang_rt.hwasan-aarch64-android", "android_arm64_armv8-a_shared")
+	hwasan := ctx.ModuleForTests("libclang_rt.hwasan", "android_arm64_armv8-a_shared")
 
 	installed := hwasan.Description("install libclang_rt.hwasan")
 	ensureContains(t, installed.Output.String(), "/system/lib64/bootstrap/libclang_rt.hwasan-aarch64-android.so")
@@ -1459,13 +1458,14 @@
 		}
 
 		cc_prebuilt_library_shared {
-			name: "libclang_rt.hwasan-aarch64-android",
+			name: "libclang_rt.hwasan",
 			no_libcrt: true,
 			nocrt: true,
 			stl: "none",
 			system_shared_libs: [],
 			srcs: [""],
 			stubs: { versions: ["1"] },
+			stem: "libclang_rt.hwasan-aarch64-android",
 
 			sanitize: {
 				never: true,
@@ -1479,7 +1479,7 @@
 		"lib64/bionic/libclang_rt.hwasan-aarch64-android.so",
 	})
 
-	hwasan := ctx.ModuleForTests("libclang_rt.hwasan-aarch64-android", "android_arm64_armv8-a_shared")
+	hwasan := ctx.ModuleForTests("libclang_rt.hwasan", "android_arm64_armv8-a_shared")
 
 	installed := hwasan.Description("install libclang_rt.hwasan")
 	ensureContains(t, installed.Output.String(), "/system/lib64/bootstrap/libclang_rt.hwasan-aarch64-android.so")
@@ -3893,7 +3893,7 @@
 		}),
 		withBinder32bit,
 		withTargets(map[android.OsType][]android.Target{
-			android.Android: []android.Target{
+			android.Android: {
 				{Os: android.Android, Arch: android.Arch{ArchType: android.Arm, ArchVariant: "armv7-a-neon", Abi: []string{"armeabi-v7a"}},
 					NativeBridge: android.NativeBridgeDisabled, NativeBridgeHostArchName: "", NativeBridgeRelativePath: ""},
 			},
@@ -4574,12 +4574,20 @@
 		}
 	`)
 
-	prebuilt := ctx.ModuleForTests("myapex", "android_common_myapex").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex", "android_common_myapex")
+	prebuilt := testingModule.Module().(*Prebuilt)
 
 	expectedInput := "myapex-arm64.apex"
 	if prebuilt.inputApex.String() != expectedInput {
 		t.Errorf("inputApex invalid. expected: %q, actual: %q", expectedInput, prebuilt.inputApex.String())
 	}
+	android.AssertStringDoesContain(t, "Invalid provenance metadata file",
+		prebuilt.ProvenanceMetaDataFile().String(), "soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto")
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm64.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/myapex.apex", rule.Args["install_path"])
 }
 
 func TestPrebuiltMissingSrc(t *testing.T) {
@@ -4599,12 +4607,18 @@
 		}
 	`)
 
-	p := ctx.ModuleForTests("myapex", "android_common_myapex").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex", "android_common_myapex")
+	p := testingModule.Module().(*Prebuilt)
 
 	expected := "notmyapex.apex"
 	if p.installFilename != expected {
 		t.Errorf("installFilename invalid. expected: %q, actual: %q", expected, p.installFilename)
 	}
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/notmyapex.apex", rule.Args["install_path"])
 }
 
 func TestApexSetFilenameOverride(t *testing.T) {
@@ -4647,13 +4661,19 @@
 		}
 	`)
 
-	p := ctx.ModuleForTests("myapex.prebuilt", "android_common_myapex.prebuilt").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex.prebuilt", "android_common_myapex.prebuilt")
+	p := testingModule.Module().(*Prebuilt)
 
 	expected := []string{"myapex"}
 	actual := android.AndroidMkEntriesForTest(t, ctx, p)[0].EntryMap["LOCAL_OVERRIDES_MODULES"]
 	if !reflect.DeepEqual(actual, expected) {
 		t.Errorf("Incorrect LOCAL_OVERRIDES_MODULES value '%s', expected '%s'", actual, expected)
 	}
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex.prebuilt/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex.prebuilt", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/myapex.prebuilt.apex", rule.Args["install_path"])
 }
 
 func TestPrebuiltApexName(t *testing.T) {
@@ -5662,8 +5682,8 @@
 	apexRule := module.Rule("apexRule")
 	copyCmds := apexRule.Args["copy_commands"]
 
-	ensureContains(t, copyCmds, "image.apex/app/AppFoo/AppFoo.apk")
-	ensureContains(t, copyCmds, "image.apex/priv-app/AppFooPriv/AppFooPriv.apk")
+	ensureContains(t, copyCmds, "image.apex/app/AppFoo@__APEX_VERSION_PLACEHOLDER__/AppFoo.apk")
+	ensureContains(t, copyCmds, "image.apex/priv-app/AppFooPriv@__APEX_VERSION_PLACEHOLDER__/AppFooPriv.apk")
 
 	appZipRule := ctx.ModuleForTests("AppFoo", "android_common_apex10000").Description("zip jni libs")
 	// JNI libraries are uncompressed
@@ -5725,8 +5745,8 @@
 	apexRule := module.Rule("apexRule")
 	copyCmds := apexRule.Args["copy_commands"]
 
-	ensureContains(t, copyCmds, "image.apex/app/AppFooPrebuilt/AppFooPrebuilt.apk")
-	ensureContains(t, copyCmds, "image.apex/priv-app/AppFooPrivPrebuilt/AwesomePrebuiltAppFooPriv.apk")
+	ensureContains(t, copyCmds, "image.apex/app/AppFooPrebuilt@__APEX_VERSION_PLACEHOLDER__/AppFooPrebuilt.apk")
+	ensureContains(t, copyCmds, "image.apex/priv-app/AppFooPrivPrebuilt@__APEX_VERSION_PLACEHOLDER__/AwesomePrebuiltAppFooPriv.apk")
 }
 
 func TestApexWithAppImportsPrefer(t *testing.T) {
@@ -5767,7 +5787,7 @@
 	}))
 
 	ensureExactContents(t, ctx, "myapex", "android_common_myapex_image", []string{
-		"app/AppFoo/AppFooPrebuilt.apk",
+		"app/AppFoo@__APEX_VERSION_PLACEHOLDER__/AppFooPrebuilt.apk",
 	})
 }
 
@@ -5800,7 +5820,7 @@
 	apexRule := module.Rule("apexRule")
 	copyCmds := apexRule.Args["copy_commands"]
 
-	ensureContains(t, copyCmds, "image.apex/app/TesterHelpAppFoo/TesterHelpAppFoo.apk")
+	ensureContains(t, copyCmds, "image.apex/app/TesterHelpAppFoo@__APEX_VERSION_PLACEHOLDER__/TesterHelpAppFoo.apk")
 }
 
 func TestApexPropertiesShouldBeDefaultable(t *testing.T) {
@@ -6088,6 +6108,9 @@
 			apps: ["app"],
 			bpfs: ["bpf"],
 			prebuilts: ["myetc"],
+			bootclasspath_fragments: ["mybootclasspath_fragment"],
+			systemserverclasspath_fragments: ["mysystemserverclasspath_fragment"],
+			java_libs: ["myjava_library"],
 			overrides: ["oldapex"],
 			updatable: false,
 		}
@@ -6098,6 +6121,9 @@
 			apps: ["override_app"],
 			bpfs: ["override_bpf"],
 			prebuilts: ["override_myetc"],
+			bootclasspath_fragments: ["override_bootclasspath_fragment"],
+			systemserverclasspath_fragments: ["override_systemserverclasspath_fragment"],
+			java_libs: ["override_java_library"],
 			overrides: ["unknownapex"],
 			logging_parent: "com.foo.bar",
 			package_name: "test.overridden.package",
@@ -6156,6 +6182,72 @@
 			name: "override_myetc",
 			src: "override_myprebuilt",
 		}
+
+		java_library {
+			name: "bcplib",
+			srcs: ["a.java"],
+			compile_dex: true,
+			apex_available: ["myapex"],
+			permitted_packages: ["bcp.lib"],
+		}
+
+		bootclasspath_fragment {
+			name: "mybootclasspath_fragment",
+			contents: ["bcplib"],
+			apex_available: ["myapex"],
+		}
+
+		java_library {
+			name: "override_bcplib",
+			srcs: ["a.java"],
+			compile_dex: true,
+			apex_available: ["myapex"],
+			permitted_packages: ["override.bcp.lib"],
+		}
+
+		bootclasspath_fragment {
+			name: "override_bootclasspath_fragment",
+			contents: ["override_bcplib"],
+			apex_available: ["myapex"],
+		}
+
+		java_library {
+			name: "systemserverlib",
+			srcs: ["a.java"],
+			apex_available: ["myapex"],
+		}
+
+		systemserverclasspath_fragment {
+			name: "mysystemserverclasspath_fragment",
+			standalone_contents: ["systemserverlib"],
+			apex_available: ["myapex"],
+		}
+
+		java_library {
+			name: "override_systemserverlib",
+			srcs: ["a.java"],
+			apex_available: ["myapex"],
+		}
+
+		systemserverclasspath_fragment {
+			name: "override_systemserverclasspath_fragment",
+			standalone_contents: ["override_systemserverlib"],
+			apex_available: ["myapex"],
+		}
+
+		java_library {
+			name: "myjava_library",
+			srcs: ["a.java"],
+			compile_dex: true,
+			apex_available: ["myapex"],
+		}
+
+		java_library {
+			name: "override_java_library",
+			srcs: ["a.java"],
+			compile_dex: true,
+			apex_available: ["myapex"],
+		}
 	`, withManifestPackageNameOverrides([]string{"myapex:com.android.myapex"}))
 
 	originalVariant := ctx.ModuleForTests("myapex", "android_common_myapex_image").Module().(android.OverridableModule)
@@ -6171,8 +6263,8 @@
 	apexRule := module.Rule("apexRule")
 	copyCmds := apexRule.Args["copy_commands"]
 
-	ensureNotContains(t, copyCmds, "image.apex/app/app/app.apk")
-	ensureContains(t, copyCmds, "image.apex/app/override_app/override_app.apk")
+	ensureNotContains(t, copyCmds, "image.apex/app/app@__APEX_VERSION_PLACEHOLDER__/app.apk")
+	ensureContains(t, copyCmds, "image.apex/app/override_app@__APEX_VERSION_PLACEHOLDER__/override_app.apk")
 
 	ensureNotContains(t, copyCmds, "image.apex/etc/bpf/bpf.o")
 	ensureContains(t, copyCmds, "image.apex/etc/bpf/override_bpf.o")
@@ -6190,6 +6282,13 @@
 		t.Errorf("override_myapex should have logging parent (com.foo.bar), but was %q.", apexBundle.overridableProperties.Logging_parent)
 	}
 
+	android.AssertArrayString(t, "Bootclasspath_fragments does not match",
+		[]string{"override_bootclasspath_fragment"}, apexBundle.overridableProperties.Bootclasspath_fragments)
+	android.AssertArrayString(t, "Systemserverclasspath_fragments does not match",
+		[]string{"override_systemserverclasspath_fragment"}, apexBundle.overridableProperties.Systemserverclasspath_fragments)
+	android.AssertArrayString(t, "Java_libs does not match",
+		[]string{"override_java_library"}, apexBundle.overridableProperties.Java_libs)
+
 	optFlags := apexRule.Args["opt_flags"]
 	ensureContains(t, optFlags, "--override_apk_package_name test.overridden.package")
 	ensureContains(t, optFlags, "--pubkey testkey2.avbpubkey")
@@ -6204,15 +6303,139 @@
 	ensureContains(t, androidMk, "LOCAL_MODULE := override_app.override_myapex")
 	ensureContains(t, androidMk, "LOCAL_MODULE := override_bpf.o.override_myapex")
 	ensureContains(t, androidMk, "LOCAL_MODULE := apex_manifest.pb.override_myapex")
+	ensureContains(t, androidMk, "LOCAL_MODULE := override_bcplib.override_myapex")
+	ensureContains(t, androidMk, "LOCAL_MODULE := override_systemserverlib.override_myapex")
+	ensureContains(t, androidMk, "LOCAL_MODULE := override_java_library.override_myapex")
 	ensureContains(t, androidMk, "LOCAL_MODULE_STEM := override_myapex.apex")
 	ensureContains(t, androidMk, "LOCAL_OVERRIDES_MODULES := unknownapex myapex")
 	ensureNotContains(t, androidMk, "LOCAL_MODULE := app.myapex")
 	ensureNotContains(t, androidMk, "LOCAL_MODULE := bpf.myapex")
 	ensureNotContains(t, androidMk, "LOCAL_MODULE := override_app.myapex")
 	ensureNotContains(t, androidMk, "LOCAL_MODULE := apex_manifest.pb.myapex")
+	ensureNotContains(t, androidMk, "LOCAL_MODULE := override_bcplib.myapex")
+	ensureNotContains(t, androidMk, "LOCAL_MODULE := override_systemserverlib.myapex")
+	ensureNotContains(t, androidMk, "LOCAL_MODULE := override_java_library.pb.myapex")
 	ensureNotContains(t, androidMk, "LOCAL_MODULE_STEM := myapex.apex")
 }
 
+func TestMinSdkVersionOverride(t *testing.T) {
+	// Override from 29 to 31
+	minSdkOverride31 := "31"
+	ctx := testApex(t, `
+			apex {
+					name: "myapex",
+					key: "myapex.key",
+					native_shared_libs: ["mylib"],
+					updatable: true,
+					min_sdk_version: "29"
+			}
+
+			override_apex {
+					name: "override_myapex",
+					base: "myapex",
+					logging_parent: "com.foo.bar",
+					package_name: "test.overridden.package"
+			}
+
+			apex_key {
+					name: "myapex.key",
+					public_key: "testkey.avbpubkey",
+					private_key: "testkey.pem",
+			}
+
+			cc_library {
+					name: "mylib",
+					srcs: ["mylib.cpp"],
+					runtime_libs: ["libbar"],
+					system_shared_libs: [],
+					stl: "none",
+					apex_available: [ "myapex" ],
+					min_sdk_version: "apex_inherit"
+			}
+
+			cc_library {
+					name: "libbar",
+					srcs: ["mylib.cpp"],
+					system_shared_libs: [],
+					stl: "none",
+					apex_available: [ "myapex" ],
+					min_sdk_version: "apex_inherit"
+			}
+
+	`, withApexGlobalMinSdkVersionOverride(&minSdkOverride31))
+
+	apexRule := ctx.ModuleForTests("myapex", "android_common_myapex_image").Rule("apexRule")
+	copyCmds := apexRule.Args["copy_commands"]
+
+	// Ensure that direct non-stubs dep is always included
+	ensureContains(t, copyCmds, "image.apex/lib64/mylib.so")
+
+	// Ensure that runtime_libs dep in included
+	ensureContains(t, copyCmds, "image.apex/lib64/libbar.so")
+
+	// Ensure libraries target overridden min_sdk_version value
+	ensureListContains(t, ctx.ModuleVariantsForTests("libbar"), "android_arm64_armv8-a_shared_apex31")
+}
+
+func TestMinSdkVersionOverrideToLowerVersionNoOp(t *testing.T) {
+	// Attempt to override from 31 to 29, should be a NOOP
+	minSdkOverride29 := "29"
+	ctx := testApex(t, `
+			apex {
+					name: "myapex",
+					key: "myapex.key",
+					native_shared_libs: ["mylib"],
+					updatable: true,
+					min_sdk_version: "31"
+			}
+
+			override_apex {
+					name: "override_myapex",
+					base: "myapex",
+					logging_parent: "com.foo.bar",
+					package_name: "test.overridden.package"
+			}
+
+			apex_key {
+					name: "myapex.key",
+					public_key: "testkey.avbpubkey",
+					private_key: "testkey.pem",
+			}
+
+			cc_library {
+					name: "mylib",
+					srcs: ["mylib.cpp"],
+					runtime_libs: ["libbar"],
+					system_shared_libs: [],
+					stl: "none",
+					apex_available: [ "myapex" ],
+					min_sdk_version: "apex_inherit"
+			}
+
+			cc_library {
+					name: "libbar",
+					srcs: ["mylib.cpp"],
+					system_shared_libs: [],
+					stl: "none",
+					apex_available: [ "myapex" ],
+					min_sdk_version: "apex_inherit"
+			}
+
+	`, withApexGlobalMinSdkVersionOverride(&minSdkOverride29))
+
+	apexRule := ctx.ModuleForTests("myapex", "android_common_myapex_image").Rule("apexRule")
+	copyCmds := apexRule.Args["copy_commands"]
+
+	// Ensure that direct non-stubs dep is always included
+	ensureContains(t, copyCmds, "image.apex/lib64/mylib.so")
+
+	// Ensure that runtime_libs dep in included
+	ensureContains(t, copyCmds, "image.apex/lib64/libbar.so")
+
+	// Ensure libraries target the original min_sdk_version value rather than the overridden
+	ensureListContains(t, ctx.ModuleVariantsForTests("libbar"), "android_arm64_armv8-a_shared_apex31")
+}
+
 func TestLegacyAndroid10Support(t *testing.T) {
 	ctx := testApex(t, `
 		apex {
@@ -6830,7 +7053,7 @@
 		apex {
 			name: "myapex",
 			key: "myapex.key",
-			jni_libs: ["mylib"],
+			jni_libs: ["mylib", "libfoo.rust"],
 			updatable: false,
 		}
 
@@ -6856,15 +7079,41 @@
 			stl: "none",
 			apex_available: [ "myapex" ],
 		}
+
+		rust_ffi_shared {
+			name: "libfoo.rust",
+			crate_name: "foo",
+			srcs: ["foo.rs"],
+			shared_libs: ["libfoo.shared_from_rust"],
+			prefer_rlib: true,
+			apex_available: ["myapex"],
+		}
+
+		cc_library_shared {
+			name: "libfoo.shared_from_rust",
+			srcs: ["mylib.cpp"],
+			system_shared_libs: [],
+			stl: "none",
+			stubs: {
+				versions: ["10", "11", "12"],
+			},
+		}
+
 	`)
 
 	rule := ctx.ModuleForTests("myapex", "android_common_myapex_image").Rule("apexManifestRule")
 	// Notice mylib2.so (transitive dep) is not added as a jni_lib
-	ensureEquals(t, rule.Args["opt"], "-a jniLibs mylib.so")
+	ensureEquals(t, rule.Args["opt"], "-a jniLibs libfoo.rust.so mylib.so")
 	ensureExactContents(t, ctx, "myapex", "android_common_myapex_image", []string{
 		"lib64/mylib.so",
 		"lib64/mylib2.so",
+		"lib64/libfoo.rust.so",
+		"lib64/libc++.so", // auto-added to libfoo.rust by Soong
+		"lib64/liblog.so", // auto-added to libfoo.rust by Soong
 	})
+
+	// b/220397949
+	ensureListContains(t, names(rule.Args["requireNativeLibs"]), "libfoo.shared_from_rust.so")
 }
 
 func TestApexMutatorsDontRunIfDisabled(t *testing.T) {
@@ -6919,7 +7168,7 @@
 	content := bundleConfigRule.Args["content"]
 
 	ensureContains(t, content, `"compression":{"uncompressed_glob":["apex_payload.img","apex_manifest.*"]}`)
-	ensureContains(t, content, `"apex_config":{"apex_embedded_apk_config":[{"package_name":"com.android.foo","path":"app/AppFoo/AppFoo.apk"}]}`)
+	ensureContains(t, content, `"apex_config":{"apex_embedded_apk_config":[{"package_name":"com.android.foo","path":"app/AppFoo@__APEX_VERSION_PLACEHOLDER__/AppFoo.apk"}]}`)
 }
 
 func TestAppSetBundle(t *testing.T) {
@@ -6950,9 +7199,9 @@
 	if len(copyCmds) != 3 {
 		t.Fatalf("Expected 3 commands, got %d in:\n%s", len(copyCmds), s)
 	}
-	ensureMatches(t, copyCmds[0], "^rm -rf .*/app/AppSet$")
-	ensureMatches(t, copyCmds[1], "^mkdir -p .*/app/AppSet$")
-	ensureMatches(t, copyCmds[2], "^unzip .*-d .*/app/AppSet .*/AppSet.zip$")
+	ensureMatches(t, copyCmds[0], "^rm -rf .*/app/AppSet@__APEX_VERSION_PLACEHOLDER__$")
+	ensureMatches(t, copyCmds[1], "^mkdir -p .*/app/AppSet@__APEX_VERSION_PLACEHOLDER__$")
+	ensureMatches(t, copyCmds[2], "^unzip .*-d .*/app/AppSet@__APEX_VERSION_PLACEHOLDER__ .*/AppSet.zip$")
 }
 
 func TestAppSetBundlePrebuilt(t *testing.T) {
@@ -7473,7 +7722,7 @@
 	})
 }
 
-func testApexPermittedPackagesRules(t *testing.T, errmsg, bp string, bootJars []string, rules []android.Rule) {
+func testBootJarPermittedPackagesRules(t *testing.T, errmsg, bp string, bootJars []string, rules []android.Rule) {
 	t.Helper()
 	bp += `
 	apex_key {
@@ -7512,11 +7761,11 @@
 
 func TestApexPermittedPackagesRules(t *testing.T) {
 	testcases := []struct {
-		name            string
-		expectedError   string
-		bp              string
-		bootJars        []string
-		modulesPackages map[string][]string
+		name                 string
+		expectedError        string
+		bp                   string
+		bootJars             []string
+		bcpPermittedPackages map[string][]string
 	}{
 
 		{
@@ -7530,7 +7779,6 @@
 					apex_available: ["myapex"],
 					sdk_version: "none",
 					system_modules: "none",
-					min_sdk_version: "30",
 				}
 				java_library {
 					name: "nonbcp_lib2",
@@ -7539,25 +7787,23 @@
 					permitted_packages: ["a.b"],
 					sdk_version: "none",
 					system_modules: "none",
-					min_sdk_version: "30",
 				}
 				apex {
 					name: "myapex",
-					min_sdk_version: "30",
 					key: "myapex.key",
 					java_libs: ["bcp_lib1", "nonbcp_lib2"],
 					updatable: false,
 				}`,
 			bootJars: []string{"bcp_lib1"},
-			modulesPackages: map[string][]string{
-				"myapex": []string{
+			bcpPermittedPackages: map[string][]string{
+				"bcp_lib1": []string{
 					"foo.bar",
 				},
 			},
 		},
 		{
-			name:          "Bootclasspath apex jar not satisfying allowed module packages on Q.",
-			expectedError: `(?s)module "bcp_lib2" .* which is restricted because jars that are part of the myapex module may only use these package prefixes: foo.bar with min_sdk < T. Please consider the following alternatives:\n    1. If the offending code is from a statically linked library, consider removing that dependency and using an alternative already in the bootclasspath, or perhaps a shared library.    2. Move the offending code into an allowed package.\n    3. Jarjar the offending code. Please be mindful of the potential system health implications of bundling that code, particularly if the offending jar is part of the bootclasspath.`,
+			name:          "Bootclasspath apex jar not satisfying allowed module packages.",
+			expectedError: `(?s)module "bcp_lib2" .* which is restricted because bcp_lib2 bootjar may only use these package prefixes: foo.bar. Please consider the following alternatives:\n    1. If the offending code is from a statically linked library, consider removing that dependency and using an alternative already in the bootclasspath, or perhaps a shared library.    2. Move the offending code into an allowed package.\n    3. Jarjar the offending code. Please be mindful of the potential system health implications of bundling that code, particularly if the offending jar is part of the bootclasspath.`,
 			bp: `
 				java_library {
 					name: "bcp_lib1",
@@ -7566,7 +7812,6 @@
 					permitted_packages: ["foo.bar"],
 					sdk_version: "none",
 					system_modules: "none",
-					min_sdk_version: "29",
 				}
 				java_library {
 					name: "bcp_lib2",
@@ -7575,102 +7820,67 @@
 					permitted_packages: ["foo.bar", "bar.baz"],
 					sdk_version: "none",
 					system_modules: "none",
-					min_sdk_version: "29",
 				}
 				apex {
 					name: "myapex",
-					min_sdk_version: "29",
 					key: "myapex.key",
 					java_libs: ["bcp_lib1", "bcp_lib2"],
 					updatable: false,
 				}
 			`,
 			bootJars: []string{"bcp_lib1", "bcp_lib2"},
-			modulesPackages: map[string][]string{
-				"myapex": []string{
+			bcpPermittedPackages: map[string][]string{
+				"bcp_lib1": []string{
+					"foo.bar",
+				},
+				"bcp_lib2": []string{
 					"foo.bar",
 				},
 			},
 		},
 		{
-			name:          "Bootclasspath apex jar not satisfying allowed module packages on R.",
-			expectedError: `(?s)module "bcp_lib2" .* which is restricted because jars that are part of the myapex module may only use these package prefixes: foo.bar with min_sdk < T. Please consider the following alternatives:\n    1. If the offending code is from a statically linked library, consider removing that dependency and using an alternative already in the bootclasspath, or perhaps a shared library.    2. Move the offending code into an allowed package.\n    3. Jarjar the offending code. Please be mindful of the potential system health implications of bundling that code, particularly if the offending jar is part of the bootclasspath.`,
-			bp: `
-				java_library {
-					name: "bcp_lib1",
-					srcs: ["lib1/src/*.java"],
-					apex_available: ["myapex"],
-					permitted_packages: ["foo.bar"],
-					sdk_version: "none",
-					system_modules: "none",
-					min_sdk_version: "30",
-				}
-				java_library {
-					name: "bcp_lib2",
-					srcs: ["lib2/src/*.java"],
-					apex_available: ["myapex"],
-					permitted_packages: ["foo.bar", "bar.baz"],
-					sdk_version: "none",
-					system_modules: "none",
-					min_sdk_version: "30",
-				}
-				apex {
-					name: "myapex",
-					min_sdk_version: "30",
-					key: "myapex.key",
-					java_libs: ["bcp_lib1", "bcp_lib2"],
-					updatable: false,
-				}
-			`,
-			bootJars: []string{"bcp_lib1", "bcp_lib2"},
-			modulesPackages: map[string][]string{
-				"myapex": []string{
-					"foo.bar",
-				},
-			},
-		},
-		{
-			name:          "Bootclasspath apex jar >= T not satisfying Q/R/S allowed module packages.",
+			name:          "Updateable Bootclasspath apex jar not satisfying allowed module packages.",
 			expectedError: "",
 			bp: `
 				java_library {
-					name: "bcp_lib1",
+					name: "bcp_lib_restricted",
 					srcs: ["lib1/src/*.java"],
 					apex_available: ["myapex"],
 					permitted_packages: ["foo.bar"],
 					sdk_version: "none",
+					min_sdk_version: "29",
 					system_modules: "none",
-					min_sdk_version: "current",
 				}
 				java_library {
-					name: "bcp_lib2",
+					name: "bcp_lib_unrestricted",
 					srcs: ["lib2/src/*.java"],
 					apex_available: ["myapex"],
 					permitted_packages: ["foo.bar", "bar.baz"],
 					sdk_version: "none",
+					min_sdk_version: "29",
 					system_modules: "none",
-					min_sdk_version: "current",
 				}
 				apex {
 					name: "myapex",
-					min_sdk_version: "current",
 					key: "myapex.key",
-					java_libs: ["bcp_lib1", "bcp_lib2"],
-					updatable: false,
+					java_libs: ["bcp_lib_restricted", "bcp_lib_unrestricted"],
+					updatable: true,
+					min_sdk_version: "29",
 				}
 			`,
 			bootJars: []string{"bcp_lib1", "bcp_lib2"},
-			modulesPackages: map[string][]string{
-				"myapex": []string{
+			bcpPermittedPackages: map[string][]string{
+				"bcp_lib1_non_updateable": []string{
 					"foo.bar",
 				},
+				// bcp_lib2_updateable has no entry here since updateable bcp can contain new packages - tracking via an allowlist is not necessary
 			},
 		},
 	}
 	for _, tc := range testcases {
 		t.Run(tc.name, func(t *testing.T) {
-			rules := createApexPermittedPackagesRules(tc.modulesPackages)
-			testApexPermittedPackagesRules(t, tc.expectedError, tc.bp, tc.bootJars, rules)
+			rules := createBcpPermittedPackagesRules(tc.bcpPermittedPackages)
+			testBootJarPermittedPackagesRules(t, tc.expectedError, tc.bp, tc.bootJars, rules)
 		})
 	}
 }
@@ -8661,6 +8871,86 @@
 	ensureContains(t, androidMk, "LOCAL_REQUIRED_MODULES += otherapex")
 }
 
+func TestAndroidMk_RequiredDeps(t *testing.T) {
+	ctx := testApex(t, `
+		apex {
+			name: "myapex",
+			key: "myapex.key",
+			updatable: false,
+		}
+
+		apex_key {
+			name: "myapex.key",
+			public_key: "testkey.avbpubkey",
+			private_key: "testkey.pem",
+		}
+	`)
+
+	bundle := ctx.ModuleForTests("myapex", "android_common_myapex_image").Module().(*apexBundle)
+	bundle.requiredDeps = append(bundle.requiredDeps, "foo")
+	data := android.AndroidMkDataForTest(t, ctx, bundle)
+	var builder strings.Builder
+	data.Custom(&builder, bundle.BaseModuleName(), "TARGET_", "", data)
+	androidMk := builder.String()
+	ensureContains(t, androidMk, "LOCAL_REQUIRED_MODULES += foo")
+
+	flattenedBundle := ctx.ModuleForTests("myapex", "android_common_myapex_flattened").Module().(*apexBundle)
+	flattenedBundle.requiredDeps = append(flattenedBundle.requiredDeps, "foo")
+	flattenedData := android.AndroidMkDataForTest(t, ctx, flattenedBundle)
+	var flattenedBuilder strings.Builder
+	flattenedData.Custom(&flattenedBuilder, flattenedBundle.BaseModuleName(), "TARGET_", "", flattenedData)
+	flattenedAndroidMk := flattenedBuilder.String()
+	ensureContains(t, flattenedAndroidMk, "LOCAL_REQUIRED_MODULES += foo")
+}
+
+func TestApexOutputFileProducer(t *testing.T) {
+	for _, tc := range []struct {
+		name          string
+		ref           string
+		expected_data []string
+	}{
+		{
+			name:          "test_using_output",
+			ref:           ":myapex",
+			expected_data: []string{"out/soong/.intermediates/myapex/android_common_myapex_image/myapex.capex:myapex.capex"},
+		},
+		{
+			name:          "test_using_apex",
+			ref:           ":myapex{.apex}",
+			expected_data: []string{"out/soong/.intermediates/myapex/android_common_myapex_image/myapex.apex:myapex.apex"},
+		},
+	} {
+		t.Run(tc.name, func(t *testing.T) {
+			ctx := testApex(t, `
+					apex {
+						name: "myapex",
+						key: "myapex.key",
+						compressible: true,
+						updatable: false,
+					}
+
+					apex_key {
+						name: "myapex.key",
+						public_key: "testkey.avbpubkey",
+						private_key: "testkey.pem",
+					}
+
+					java_test {
+						name: "`+tc.name+`",
+						srcs: ["a.java"],
+						data: ["`+tc.ref+`"],
+					}
+				`,
+				android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+					variables.CompressedApex = proptools.BoolPtr(true)
+				}))
+			javaTest := ctx.ModuleForTests(tc.name, "android_common").Module().(*java.Test)
+			data := android.AndroidMkEntriesForTest(t, ctx, javaTest)[0].EntryMap["LOCAL_COMPATIBILITY_SUPPORT_FILES"]
+			android.AssertStringPathsRelativeToTopEquals(t, "data", ctx.Config(), tc.expected_data, data)
+		})
+	}
+}
+
 func TestSdkLibraryCanHaveHigherMinSdkVersion(t *testing.T) {
 	preparer := android.GroupFixturePreparers(
 		PrepareForTestWithApexBuildComponents,
@@ -8855,6 +9145,185 @@
 	}
 }
 
+func TestApexStrictUpdtabilityLint(t *testing.T) {
+	bpTemplate := `
+		apex {
+			name: "myapex",
+			key: "myapex.key",
+			java_libs: ["myjavalib"],
+			updatable: %v,
+			min_sdk_version: "29",
+		}
+		apex_key {
+			name: "myapex.key",
+		}
+		java_library {
+			name: "myjavalib",
+			srcs: ["MyClass.java"],
+			apex_available: [ "myapex" ],
+			lint: {
+				strict_updatability_linting: %v,
+			},
+			sdk_version: "current",
+			min_sdk_version: "29",
+		}
+		`
+	fs := android.MockFS{
+		"lint-baseline.xml": nil,
+	}
+
+	testCases := []struct {
+		testCaseName              string
+		apexUpdatable             bool
+		javaStrictUpdtabilityLint bool
+		lintFileExists            bool
+		disallowedFlagExpected    bool
+	}{
+		{
+			testCaseName:              "lint-baseline.xml does not exist, no disallowed flag necessary in lint cmd",
+			apexUpdatable:             true,
+			javaStrictUpdtabilityLint: true,
+			lintFileExists:            false,
+			disallowedFlagExpected:    false,
+		},
+		{
+			testCaseName:              "non-updatable apex respects strict_updatability of javalib",
+			apexUpdatable:             false,
+			javaStrictUpdtabilityLint: false,
+			lintFileExists:            true,
+			disallowedFlagExpected:    false,
+		},
+		{
+			testCaseName:              "non-updatable apex respects strict updatability of javalib",
+			apexUpdatable:             false,
+			javaStrictUpdtabilityLint: true,
+			lintFileExists:            true,
+			disallowedFlagExpected:    true,
+		},
+		{
+			testCaseName:              "updatable apex sets strict updatability of javalib to true",
+			apexUpdatable:             true,
+			javaStrictUpdtabilityLint: false, // will be set to true by mutator
+			lintFileExists:            true,
+			disallowedFlagExpected:    true,
+		},
+	}
+
+	for _, testCase := range testCases {
+		bp := fmt.Sprintf(bpTemplate, testCase.apexUpdatable, testCase.javaStrictUpdtabilityLint)
+		fixtures := []android.FixturePreparer{}
+		if testCase.lintFileExists {
+			fixtures = append(fixtures, fs.AddToFixture())
+		}
+
+		result := testApex(t, bp, fixtures...)
+		myjavalib := result.ModuleForTests("myjavalib", "android_common_apex29")
+		sboxProto := android.RuleBuilderSboxProtoForTests(t, myjavalib.Output("lint.sbox.textproto"))
+		disallowedFlagActual := strings.Contains(*sboxProto.Commands[0].Command, "--baseline lint-baseline.xml --disallowed_issues NewApi")
+
+		if disallowedFlagActual != testCase.disallowedFlagExpected {
+			t.Errorf("Failed testcase: %v \nActual lint cmd: %v", testCase.testCaseName, *sboxProto.Commands[0].Command)
+		}
+	}
+}
+
+func TestUpdatabilityLintSkipLibcore(t *testing.T) {
+	bp := `
+		apex {
+			name: "myapex",
+			key: "myapex.key",
+			java_libs: ["myjavalib"],
+			updatable: true,
+			min_sdk_version: "29",
+		}
+		apex_key {
+			name: "myapex.key",
+		}
+		java_library {
+			name: "myjavalib",
+			srcs: ["MyClass.java"],
+			apex_available: [ "myapex" ],
+			sdk_version: "current",
+			min_sdk_version: "29",
+		}
+		`
+
+	testCases := []struct {
+		testCaseName           string
+		moduleDirectory        string
+		disallowedFlagExpected bool
+	}{
+		{
+			testCaseName:           "lintable module defined outside libcore",
+			moduleDirectory:        "",
+			disallowedFlagExpected: true,
+		},
+		{
+			testCaseName:           "lintable module defined in libcore root directory",
+			moduleDirectory:        "libcore/",
+			disallowedFlagExpected: false,
+		},
+		{
+			testCaseName:           "lintable module defined in libcore child directory",
+			moduleDirectory:        "libcore/childdir/",
+			disallowedFlagExpected: true,
+		},
+	}
+
+	for _, testCase := range testCases {
+		lintFileCreator := android.FixtureAddTextFile(testCase.moduleDirectory+"lint-baseline.xml", "")
+		bpFileCreator := android.FixtureAddTextFile(testCase.moduleDirectory+"Android.bp", bp)
+		result := testApex(t, "", lintFileCreator, bpFileCreator)
+		myjavalib := result.ModuleForTests("myjavalib", "android_common_apex29")
+		sboxProto := android.RuleBuilderSboxProtoForTests(t, myjavalib.Output("lint.sbox.textproto"))
+		cmdFlags := fmt.Sprintf("--baseline %vlint-baseline.xml --disallowed_issues NewApi", testCase.moduleDirectory)
+		disallowedFlagActual := strings.Contains(*sboxProto.Commands[0].Command, cmdFlags)
+
+		if disallowedFlagActual != testCase.disallowedFlagExpected {
+			t.Errorf("Failed testcase: %v \nActual lint cmd: %v", testCase.testCaseName, *sboxProto.Commands[0].Command)
+		}
+	}
+}
+
+// checks transtive deps of an apex coming from bootclasspath_fragment
+func TestApexStrictUpdtabilityLintBcpFragmentDeps(t *testing.T) {
+	bp := `
+		apex {
+			name: "myapex",
+			key: "myapex.key",
+			bootclasspath_fragments: ["mybootclasspathfragment"],
+			updatable: true,
+			min_sdk_version: "29",
+		}
+		apex_key {
+			name: "myapex.key",
+		}
+		bootclasspath_fragment {
+			name: "mybootclasspathfragment",
+			contents: ["myjavalib"],
+			apex_available: ["myapex"],
+		}
+		java_library {
+			name: "myjavalib",
+			srcs: ["MyClass.java"],
+			apex_available: [ "myapex" ],
+			sdk_version: "current",
+			min_sdk_version: "29",
+			compile_dex: true,
+		}
+		`
+	fs := android.MockFS{
+		"lint-baseline.xml": nil,
+	}
+
+	result := testApex(t, bp, dexpreopt.FixtureSetApexBootJars("myapex:myjavalib"), fs.AddToFixture())
+	myjavalib := result.ModuleForTests("myjavalib", "android_common_apex29")
+	sboxProto := android.RuleBuilderSboxProtoForTests(t, myjavalib.Output("lint.sbox.textproto"))
+	if !strings.Contains(*sboxProto.Commands[0].Command, "--baseline lint-baseline.xml --disallowed_issues NewApi") {
+		t.Errorf("Strict updabality lint missing in myjavalib coming from bootclasspath_fragment mybootclasspath-fragment\nActual lint cmd: %v", *sboxProto.Commands[0].Command)
+	}
+}
+
 func TestMain(m *testing.M) {
 	os.Exit(m.Run())
 }
diff --git a/apex/builder.go b/apex/builder.go
index fc4bf8a..d4765d0 100644
--- a/apex/builder.go
+++ b/apex/builder.go
@@ -107,14 +107,16 @@
 			`--canned_fs_config ${canned_fs_config} ` +
 			`--include_build_info ` +
 			`--payload_type image ` +
-			`--key ${key} ${opt_flags} ${image_dir} ${out} `,
+			`--key ${key} ` +
+			`--apex_version_placeholder ${apex_version_placeholder} ` +
+			`${opt_flags} ${image_dir} ${out} `,
 		CommandDeps: []string{"${apexer}", "${avbtool}", "${e2fsdroid}", "${merge_zips}",
 			"${mke2fs}", "${resize2fs}", "${sefcontext_compile}", "${make_f2fs}", "${sload_f2fs}", "${make_erofs}",
 			"${soong_zip}", "${zipalign}", "${aapt2}", "prebuilts/sdk/current/public/android.jar"},
 		Rspfile:        "${out}.copy_commands",
 		RspfileContent: "${copy_commands}",
 		Description:    "APEX ${image_dir} => ${out}",
-	}, "tool_path", "image_dir", "copy_commands", "file_contexts", "canned_fs_config", "key", "opt_flags", "manifest", "payload_fs_type")
+	}, "tool_path", "image_dir", "copy_commands", "file_contexts", "canned_fs_config", "key", "opt_flags", "manifest", "payload_fs_type", "apex_version_placeholder")
 
 	zipApexRule = pctx.StaticRule("zipApexRule", blueprint.RuleParams{
 		Command: `rm -rf ${image_dir} && mkdir -p ${image_dir} && ` +
@@ -122,12 +124,13 @@
 			`APEXER_TOOL_PATH=${tool_path} ` +
 			`${apexer} --force --manifest ${manifest} ` +
 			`--payload_type zip ` +
+			`--apex_version_placeholder ${apex_version_placeholder} ` +
 			`${image_dir} ${out} `,
 		CommandDeps:    []string{"${apexer}", "${merge_zips}", "${soong_zip}", "${zipalign}", "${aapt2}"},
 		Rspfile:        "${out}.copy_commands",
 		RspfileContent: "${copy_commands}",
 		Description:    "ZipAPEX ${image_dir} => ${out}",
-	}, "tool_path", "image_dir", "copy_commands", "manifest")
+	}, "tool_path", "image_dir", "copy_commands", "manifest", "apex_version_placeholder")
 
 	apexProtoConvertRule = pctx.AndroidStaticRule("apexProtoConvertRule",
 		blueprint.RuleParams{
@@ -305,32 +308,6 @@
 	return output.OutputPath
 }
 
-// buildNoticeFiles creates a buile rule for aggregating notice files from the modules that
-// contributes to this APEX. The notice files are merged into a big notice file.
-func (a *apexBundle) buildNoticeFiles(ctx android.ModuleContext, apexFileName string) android.NoticeOutputs {
-	var noticeFiles android.Paths
-
-	a.WalkPayloadDeps(ctx, func(ctx android.ModuleContext, from blueprint.Module, to android.ApexModule, externalDep bool) bool {
-		if externalDep {
-			// As soon as the dependency graph crosses the APEX boundary, don't go further.
-			return false
-		}
-		noticeFiles = append(noticeFiles, to.NoticeFiles()...)
-		return true
-	})
-
-	// TODO(jiyong): why do we need this? WalkPayloadDeps should have already covered this.
-	for _, fi := range a.filesInfo {
-		noticeFiles = append(noticeFiles, fi.noticeFiles...)
-	}
-
-	if len(noticeFiles) == 0 {
-		return android.NoticeOutputs{}
-	}
-
-	return android.BuildNoticeOutput(ctx, a.installDir, apexFileName, android.SortedUniquePaths(noticeFiles))
-}
-
 // buildInstalledFilesFile creates a build rule for the installed-files.txt file where the list of
 // files included in this APEX is shown. The text file is dist'ed so that people can see what's
 // included in the APEX without actually downloading and extracting it.
@@ -414,7 +391,7 @@
 
 	imageDir := android.PathForModuleOut(ctx, "image"+suffix)
 
-	installSymbolFiles := !ctx.Config().KatiEnabled() || a.ExportedToMake()
+	installSymbolFiles := (!ctx.Config().KatiEnabled() || a.ExportedToMake()) && a.installable()
 
 	// b/140136207. When there are overriding APEXes for a VNDK APEX, the symbols file for the overridden
 	// APEX and the overriding APEX will have the same installation paths at /apex/com.android.vndk.v<ver>
@@ -642,12 +619,17 @@
 			optFlags = append(optFlags, "--logging_parent ", a.overridableProperties.Logging_parent)
 		}
 
-		a.mergedNotices = a.buildNoticeFiles(ctx, a.Name()+suffix)
-		if a.mergedNotices.HtmlGzOutput.Valid() {
-			// If there's a NOTICE file, embed it as an asset file in the APEX.
-			implicitInputs = append(implicitInputs, a.mergedNotices.HtmlGzOutput.Path())
-			optFlags = append(optFlags, "--assets_dir "+filepath.Dir(a.mergedNotices.HtmlGzOutput.String()))
-		}
+		// Create a NOTICE file, and embed it as an asset file in the APEX.
+		a.htmlGzNotice = android.PathForModuleOut(ctx, "NOTICE.html.gz")
+		android.BuildNoticeHtmlOutputFromLicenseMetadata(ctx, a.htmlGzNotice)
+		noticeAssetPath := android.PathForModuleOut(ctx, "NOTICE", "NOTICE.html.gz")
+		builder := android.NewRuleBuilder(pctx, ctx)
+		builder.Command().Text("cp").
+			Input(a.htmlGzNotice).
+			Output(noticeAssetPath)
+		builder.Build("notice_dir", "Building notice dir")
+		implicitInputs = append(implicitInputs, noticeAssetPath)
+		optFlags = append(optFlags, "--assets_dir "+filepath.Dir(noticeAssetPath.String()))
 
 		if (moduleMinSdkVersion.GreaterThan(android.SdkVersion_Android10) && !a.shouldGenerateHashtree()) && !compressionEnabled {
 			// Apexes which are supposed to be installed in builtin dirs(/system, etc)
@@ -679,14 +661,15 @@
 			Output:      unsignedOutputFile,
 			Description: "apex (" + apexType.name() + ")",
 			Args: map[string]string{
-				"tool_path":        outHostBinDir + ":" + prebuiltSdkToolsBinDir,
-				"image_dir":        imageDir.String(),
-				"copy_commands":    strings.Join(copyCommands, " && "),
-				"manifest":         a.manifestPbOut.String(),
-				"file_contexts":    fileContexts.String(),
-				"canned_fs_config": cannedFsConfig.String(),
-				"key":              a.privateKeyFile.String(),
-				"opt_flags":        strings.Join(optFlags, " "),
+				"tool_path":                outHostBinDir + ":" + prebuiltSdkToolsBinDir,
+				"image_dir":                imageDir.String(),
+				"copy_commands":            strings.Join(copyCommands, " && "),
+				"manifest":                 a.manifestPbOut.String(),
+				"file_contexts":            fileContexts.String(),
+				"canned_fs_config":         cannedFsConfig.String(),
+				"key":                      a.privateKeyFile.String(),
+				"opt_flags":                strings.Join(optFlags, " "),
+				"apex_version_placeholder": APEX_VERSION_PLACEHOLDER,
 			},
 		})
 
@@ -778,10 +761,11 @@
 			Output:      unsignedOutputFile,
 			Description: "apex (" + apexType.name() + ")",
 			Args: map[string]string{
-				"tool_path":     outHostBinDir + ":" + prebuiltSdkToolsBinDir,
-				"image_dir":     imageDir.String(),
-				"copy_commands": strings.Join(copyCommands, " && "),
-				"manifest":      a.manifestPbOut.String(),
+				"tool_path":                outHostBinDir + ":" + prebuiltSdkToolsBinDir,
+				"image_dir":                imageDir.String(),
+				"copy_commands":            strings.Join(copyCommands, " && "),
+				"manifest":                 a.manifestPbOut.String(),
+				"apex_version_placeholder": APEX_VERSION_PLACEHOLDER,
 			},
 		})
 	}
@@ -810,6 +794,9 @@
 		Implicits:   implicits,
 		Args:        args,
 	})
+	if suffix == imageApexSuffix {
+		a.outputApexFile = signedOutputFile
+	}
 	a.outputFile = signedOutputFile
 
 	if ctx.ModuleDir() != "system/apex/apexd/apexd_testdata" && a.testOnlyShouldForceCompression() {
@@ -853,6 +840,10 @@
 		installSuffix = imageCapexSuffix
 	}
 
+	if !a.installable() {
+		a.SkipInstall()
+	}
+
 	// Install to $OUT/soong/{target,host}/.../apex.
 	a.installedFile = ctx.InstallFile(a.installDir, a.Name()+installSuffix, a.outputFile,
 		a.compatSymlinks.Paths()...)
@@ -1008,7 +999,7 @@
 		return !externalDep
 	})
 
-	a.ApexBundleDepsInfo.BuildDepsInfoLists(ctx, proptools.String(a.properties.Min_sdk_version), depInfos)
+	a.ApexBundleDepsInfo.BuildDepsInfoLists(ctx, a.MinSdkVersion(ctx).Raw, depInfos)
 
 	ctx.Build(pctx, android.BuildParams{
 		Rule:   android.Phony,
diff --git a/apex/prebuilt.go b/apex/prebuilt.go
index 158c804..187e0df 100644
--- a/apex/prebuilt.go
+++ b/apex/prebuilt.go
@@ -23,7 +23,7 @@
 
 	"android/soong/android"
 	"android/soong/java"
-
+	"android/soong/provenance"
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
 )
@@ -482,6 +482,8 @@
 	properties PrebuiltProperties
 
 	inputApex android.Path
+
+	provenanceMetaDataFile android.OutputPath
 }
 
 type ApexFileProperties struct {
@@ -778,9 +780,14 @@
 
 	if p.installable() {
 		p.installedFile = ctx.InstallFile(p.installDir, p.installFilename, p.inputApex, p.compatSymlinks.Paths()...)
+		p.provenanceMetaDataFile = provenance.GenerateArtifactProvenanceMetaData(ctx, p.inputApex, p.installedFile)
 	}
 }
 
+func (p *Prebuilt) ProvenanceMetaDataFile() android.OutputPath {
+	return p.provenanceMetaDataFile
+}
+
 // prebuiltApexExtractorModule is a private module type that is only created by the prebuilt_apex
 // module. It extracts the correct apex to use and makes it available for use by apex_set.
 type prebuiltApexExtractorModule struct {
diff --git a/bazel/Android.bp b/bazel/Android.bp
index 80af2bd..9e7edc7 100644
--- a/bazel/Android.bp
+++ b/bazel/Android.bp
@@ -10,11 +10,11 @@
         "configurability.go",
         "constants.go",
         "properties.go",
+        "testing.go",
     ],
     testSrcs: [
         "aquery_test.go",
         "properties_test.go",
-        "testing.go",
     ],
     pluginFor: [
         "soong_build",
diff --git a/bazel/constants.go b/bazel/constants.go
index 6beb496..b10f256 100644
--- a/bazel/constants.go
+++ b/bazel/constants.go
@@ -21,7 +21,7 @@
 
 	SoongInjectionDirName = "soong_injection"
 
-	GeneratedBazelFileWarning = "# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT"
+	GeneratedBazelFileWarning = "# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT."
 )
 
 // String returns the name of the run.
diff --git a/bazel/properties.go b/bazel/properties.go
index 1300a53..f956031 100644
--- a/bazel/properties.go
+++ b/bazel/properties.go
@@ -65,6 +65,14 @@
 	Excludes []Label
 }
 
+// MakeLabelList creates a LabelList from a list Label
+func MakeLabelList(labels []Label) LabelList {
+	return LabelList{
+		Includes: labels,
+		Excludes: nil,
+	}
+}
+
 func (ll *LabelList) Equals(other LabelList) bool {
 	if len(ll.Includes) != len(other.Includes) || len(ll.Excludes) != len(other.Excludes) {
 		return false
@@ -354,6 +362,15 @@
 	return keys
 }
 
+// MakeLabelAttribute turns a string into a LabelAttribute
+func MakeLabelAttribute(label string) *LabelAttribute {
+	return &LabelAttribute{
+		Value: &Label{
+			Label: label,
+		},
+	}
+}
+
 type configToBools map[string]bool
 
 func (ctb configToBools) setValue(config string, value *bool) {
diff --git a/bazel/properties_test.go b/bazel/properties_test.go
index c7f9776..7b76b74 100644
--- a/bazel/properties_test.go
+++ b/bazel/properties_test.go
@@ -329,7 +329,7 @@
 func TestPartitionLabelListAttribute(t *testing.T) {
 	testCases := []struct {
 		name           string
-		ctx            *otherModuleTestContext
+		ctx            *OtherModuleTestContext
 		labelList      LabelListAttribute
 		filters        LabelPartitions
 		expected       PartitionToLabelListAttribute
@@ -337,7 +337,7 @@
 	}{
 		{
 			name: "no configurable values",
-			ctx:  &otherModuleTestContext{},
+			ctx:  &OtherModuleTestContext{},
 			labelList: LabelListAttribute{
 				Value: makeLabelList([]string{"a.a", "b.b", "c.c", "d.d", "e.e"}, []string{}),
 			},
@@ -354,7 +354,7 @@
 		},
 		{
 			name: "no configurable values, remainder partition",
-			ctx:  &otherModuleTestContext{},
+			ctx:  &OtherModuleTestContext{},
 			labelList: LabelListAttribute{
 				Value: makeLabelList([]string{"a.a", "b.b", "c.c", "d.d", "e.e"}, []string{}),
 			},
@@ -371,7 +371,7 @@
 		},
 		{
 			name: "no configurable values, empty partition",
-			ctx:  &otherModuleTestContext{},
+			ctx:  &OtherModuleTestContext{},
 			labelList: LabelListAttribute{
 				Value: makeLabelList([]string{"a.a", "c.c"}, []string{}),
 			},
@@ -387,8 +387,8 @@
 		},
 		{
 			name: "no configurable values, has map",
-			ctx: &otherModuleTestContext{
-				modules: []testModuleInfo{testModuleInfo{name: "srcs", typ: "fg", dir: "dir"}},
+			ctx: &OtherModuleTestContext{
+				Modules: []TestModuleInfo{{ModuleName: "srcs", Typ: "fg", Dir: "dir"}},
 			},
 			labelList: LabelListAttribute{
 				Value: makeLabelList([]string{"a.a", "srcs", "b.b", "c.c"}, []string{}),
@@ -406,7 +406,7 @@
 		},
 		{
 			name: "configurable values, keeps empty if excludes",
-			ctx:  &otherModuleTestContext{},
+			ctx:  &OtherModuleTestContext{},
 			labelList: LabelListAttribute{
 				ConfigurableValues: configurableLabelLists{
 					ArchConfigurationAxis: labelListSelectValues{
@@ -450,7 +450,7 @@
 		},
 		{
 			name: "error for multiple partitions same value",
-			ctx:  &otherModuleTestContext{},
+			ctx:  &OtherModuleTestContext{},
 			labelList: LabelListAttribute{
 				Value: makeLabelList([]string{"a.a", "b.b", "c.c", "d.d", "e.e"}, []string{}),
 			},
diff --git a/bazel/testing.go b/bazel/testing.go
index 23c8350..9a43b61 100644
--- a/bazel/testing.go
+++ b/bazel/testing.go
@@ -20,86 +20,86 @@
 	"github.com/google/blueprint"
 )
 
-// testModuleInfo implements blueprint.Module interface with sufficient information to mock a subset of
+// TestModuleInfo implements blueprint.Module interface with sufficient information to mock a subset of
 // a blueprint ModuleContext
-type testModuleInfo struct {
-	name string
-	typ  string
-	dir  string
+type TestModuleInfo struct {
+	ModuleName string
+	Typ        string
+	Dir        string
 }
 
 // Name returns name for testModuleInfo -- required to implement blueprint.Module
-func (mi testModuleInfo) Name() string {
-	return mi.name
+func (mi TestModuleInfo) Name() string {
+	return mi.ModuleName
 }
 
 // GenerateBuildActions unused, but required to implmeent blueprint.Module
-func (mi testModuleInfo) GenerateBuildActions(blueprint.ModuleContext) {}
+func (mi TestModuleInfo) GenerateBuildActions(blueprint.ModuleContext) {}
 
-func (mi testModuleInfo) equals(other testModuleInfo) bool {
-	return mi.name == other.name && mi.typ == other.typ && mi.dir == other.dir
+func (mi TestModuleInfo) equals(other TestModuleInfo) bool {
+	return mi.ModuleName == other.ModuleName && mi.Typ == other.Typ && mi.Dir == other.Dir
 }
 
 // ensure testModuleInfo implements blueprint.Module
-var _ blueprint.Module = testModuleInfo{}
+var _ blueprint.Module = TestModuleInfo{}
 
-// otherModuleTestContext is a mock context that implements OtherModuleContext
-type otherModuleTestContext struct {
-	modules []testModuleInfo
+// OtherModuleTestContext is a mock context that implements OtherModuleContext
+type OtherModuleTestContext struct {
+	Modules []TestModuleInfo
 	errors  []string
 }
 
 // ModuleFromName retrieves the testModuleInfo corresponding to name, if it exists
-func (omc *otherModuleTestContext) ModuleFromName(name string) (blueprint.Module, bool) {
-	for _, m := range omc.modules {
-		if m.name == name {
+func (omc *OtherModuleTestContext) ModuleFromName(name string) (blueprint.Module, bool) {
+	for _, m := range omc.Modules {
+		if m.ModuleName == name {
 			return m, true
 		}
 	}
-	return testModuleInfo{}, false
+	return TestModuleInfo{}, false
 }
 
 // testModuleInfo returns the testModuleInfo corresponding to a blueprint.Module if it exists in omc
-func (omc *otherModuleTestContext) testModuleInfo(m blueprint.Module) (testModuleInfo, bool) {
-	mi, ok := m.(testModuleInfo)
+func (omc *OtherModuleTestContext) testModuleInfo(m blueprint.Module) (TestModuleInfo, bool) {
+	mi, ok := m.(TestModuleInfo)
 	if !ok {
-		return testModuleInfo{}, false
+		return TestModuleInfo{}, false
 	}
-	for _, other := range omc.modules {
+	for _, other := range omc.Modules {
 		if other.equals(mi) {
 			return mi, true
 		}
 	}
-	return testModuleInfo{}, false
+	return TestModuleInfo{}, false
 }
 
 // OtherModuleType returns type of m if it exists in omc
-func (omc *otherModuleTestContext) OtherModuleType(m blueprint.Module) string {
+func (omc *OtherModuleTestContext) OtherModuleType(m blueprint.Module) string {
 	if mi, ok := omc.testModuleInfo(m); ok {
-		return mi.typ
+		return mi.Typ
 	}
 	return ""
 }
 
 // OtherModuleName returns name of m if it exists in omc
-func (omc *otherModuleTestContext) OtherModuleName(m blueprint.Module) string {
+func (omc *OtherModuleTestContext) OtherModuleName(m blueprint.Module) string {
 	if mi, ok := omc.testModuleInfo(m); ok {
-		return mi.name
+		return mi.ModuleName
 	}
 	return ""
 }
 
 // OtherModuleDir returns dir of m if it exists in omc
-func (omc *otherModuleTestContext) OtherModuleDir(m blueprint.Module) string {
+func (omc *OtherModuleTestContext) OtherModuleDir(m blueprint.Module) string {
 	if mi, ok := omc.testModuleInfo(m); ok {
-		return mi.dir
+		return mi.Dir
 	}
 	return ""
 }
 
-func (omc *otherModuleTestContext) ModuleErrorf(format string, args ...interface{}) {
+func (omc *OtherModuleTestContext) ModuleErrorf(format string, args ...interface{}) {
 	omc.errors = append(omc.errors, fmt.Sprintf(format, args...))
 }
 
 // Ensure otherModuleTestContext implements OtherModuleContext
-var _ OtherModuleContext = &otherModuleTestContext{}
+var _ OtherModuleContext = &OtherModuleTestContext{}
diff --git a/bp2build/Android.bp b/bp2build/Android.bp
index b904c35..e0ce194 100644
--- a/bp2build/Android.bp
+++ b/bp2build/Android.bp
@@ -18,6 +18,7 @@
     ],
     deps: [
         "soong-android",
+        "soong-android-allowlists",
         "soong-android-soongconfig",
         "soong-shared",
         "soong-apex",
@@ -44,9 +45,18 @@
         "cc_library_shared_conversion_test.go",
         "cc_library_static_conversion_test.go",
         "cc_object_conversion_test.go",
+        "cc_prebuilt_library_conversion_test.go",
+        "cc_prebuilt_library_shared_test.go",
+        "cc_prebuilt_library_static_test.go",
         "conversion_test.go",
         "filegroup_conversion_test.go",
         "genrule_conversion_test.go",
+        "java_binary_host_conversion_test.go",
+        "java_import_conversion_test.go",
+        "java_library_conversion_test.go",
+        "java_library_host_conversion_test.go",
+        "java_plugin_conversion_test.go",
+        "java_proto_conversion_test.go",
         "performance_test.go",
         "prebuilt_etc_conversion_test.go",
         "python_binary_conversion_test.go",
diff --git a/bp2build/android_app_conversion_test.go b/bp2build/android_app_conversion_test.go
index 28de06c..3824586 100644
--- a/bp2build/android_app_conversion_test.go
+++ b/bp2build/android_app_conversion_test.go
@@ -90,3 +90,41 @@
 			}),
 		}})
 }
+
+func TestAndroidAppArchVariantSrcs(t *testing.T) {
+	runAndroidAppTestCase(t, bp2buildTestCase{
+		description:                "Android app - arch variant srcs",
+		moduleTypeUnderTest:        "android_app",
+		moduleTypeUnderTestFactory: java.AndroidAppFactory,
+		filesystem: map[string]string{
+			"arm.java":            "",
+			"x86.java":            "",
+			"res/res.png":         "",
+			"AndroidManifest.xml": "",
+		},
+		blueprint: `
+android_app {
+        name: "TestApp",
+        sdk_version: "current",
+        arch: {
+			arm: {
+				srcs: ["arm.java"],
+			},
+			x86: {
+				srcs: ["x86.java"],
+			}
+		}
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("android_binary", "TestApp", attrNameToString{
+				"srcs": `select({
+        "//build/bazel/platforms/arch:arm": ["arm.java"],
+        "//build/bazel/platforms/arch:x86": ["x86.java"],
+        "//conditions:default": [],
+    })`,
+				"manifest":       `"AndroidManifest.xml"`,
+				"resource_files": `["res/res.png"]`,
+			}),
+		}})
+}
diff --git a/bp2build/build_conversion.go b/bp2build/build_conversion.go
index 1d3b105..a96a3fc 100644
--- a/bp2build/build_conversion.go
+++ b/bp2build/build_conversion.go
@@ -580,7 +580,9 @@
 				elements = append(elements, val)
 			}
 		}
-		return starlark_fmt.PrintList(elements, indent, "%s"), nil
+		return starlark_fmt.PrintList(elements, indent, func(s string) string {
+			return "%s"
+		}), nil
 
 	case reflect.Struct:
 		// Special cases where the bp2build sends additional information to the codegenerator
diff --git a/bp2build/build_conversion_test.go b/bp2build/build_conversion_test.go
index b21a477..0f3ca79 100644
--- a/bp2build/build_conversion_test.go
+++ b/bp2build/build_conversion_test.go
@@ -20,6 +20,7 @@
 	"testing"
 
 	"android/soong/android"
+	"android/soong/android/allowlists"
 	"android/soong/python"
 )
 
@@ -922,7 +923,7 @@
 		moduleTypeUnderTestFactory android.ModuleFactory
 		expectedCount              map[string]int
 		description                string
-		bp2buildConfig             android.Bp2BuildConfig
+		bp2buildConfig             allowlists.Bp2BuildConfig
 		checkDir                   string
 		fs                         map[string]string
 	}{
@@ -937,10 +938,10 @@
 				"not_migrated":                       0,
 				"also_not_migrated":                  0,
 			},
-			bp2buildConfig: android.Bp2BuildConfig{
-				"migrated":                android.Bp2BuildDefaultTrueRecursively,
-				"migrated/but_not_really": android.Bp2BuildDefaultFalse,
-				"not_migrated":            android.Bp2BuildDefaultFalse,
+			bp2buildConfig: allowlists.Bp2BuildConfig{
+				"migrated":                allowlists.Bp2BuildDefaultTrueRecursively,
+				"migrated/but_not_really": allowlists.Bp2BuildDefaultFalse,
+				"not_migrated":            allowlists.Bp2BuildDefaultFalse,
 			},
 			fs: map[string]string{
 				"migrated/Android.bp":                           `filegroup { name: "a" }`,
@@ -960,9 +961,9 @@
 				"package-opt-out":            1,
 				"package-opt-out/subpackage": 0,
 			},
-			bp2buildConfig: android.Bp2BuildConfig{
-				"package-opt-in":  android.Bp2BuildDefaultFalse,
-				"package-opt-out": android.Bp2BuildDefaultTrueRecursively,
+			bp2buildConfig: allowlists.Bp2BuildConfig{
+				"package-opt-in":  allowlists.Bp2BuildDefaultFalse,
+				"package-opt-out": allowlists.Bp2BuildDefaultTrueRecursively,
 			},
 			fs: map[string]string{
 				"package-opt-in/Android.bp": `
@@ -1004,7 +1005,8 @@
 		config := android.TestConfig(buildDir, nil, "", fs)
 		ctx := android.NewTestContext(config)
 		ctx.RegisterModuleType(testCase.moduleTypeUnderTest, testCase.moduleTypeUnderTestFactory)
-		ctx.RegisterBp2BuildConfig(testCase.bp2buildConfig)
+		allowlist := android.NewBp2BuildAllowlist().SetDefaultConfig(testCase.bp2buildConfig)
+		ctx.RegisterBp2BuildConfig(allowlist)
 		ctx.RegisterForBazelConversion()
 
 		_, errs := ctx.ParseFileList(dir, toParse)
diff --git a/bp2build/cc_binary_conversion_test.go b/bp2build/cc_binary_conversion_test.go
index 8d94079..037564b 100644
--- a/bp2build/cc_binary_conversion_test.go
+++ b/bp2build/cc_binary_conversion_test.go
@@ -15,12 +15,13 @@
 package bp2build
 
 import (
-	"android/soong/android"
-	"android/soong/cc"
-	"android/soong/genrule"
 	"fmt"
 	"strings"
 	"testing"
+
+	"android/soong/android"
+	"android/soong/cc"
+	"android/soong/genrule"
 )
 
 const (
@@ -127,6 +128,9 @@
         keep_symbols_list: ["symbol"],
         none: true,
     },
+    sdk_version: "current",
+    min_sdk_version: "29",
+    use_version_lib: true,
 }
 `,
 		targets: []testBazelTarget{
@@ -150,6 +154,9 @@
         "keep_symbols_list": ["symbol"],
         "none": True,
     }`,
+				"sdk_version":     `"current"`,
+				"min_sdk_version": `"29"`,
+				"use_version_lib": `True`,
 			},
 			},
 		},
diff --git a/bp2build/cc_library_conversion_test.go b/bp2build/cc_library_conversion_test.go
index 8fde655..2775a10 100644
--- a/bp2build/cc_library_conversion_test.go
+++ b/bp2build/cc_library_conversion_test.go
@@ -115,6 +115,9 @@
         },
     },
     include_build_directory: false,
+    sdk_version: "current",
+    min_sdk_version: "29",
+    use_version_lib: true,
 }
 `,
 		expectedBazelTargets: makeCcLibraryTargets("foo-lib", attrNameToString{
@@ -140,6 +143,9 @@
         "//build/bazel/platforms/os:linux_bionic": ["bionic.cpp"],
         "//conditions:default": [],
     })`,
+			"sdk_version":     `"current"`,
+			"min_sdk_version": `"29"`,
+			"use_version_lib": `True`,
 		}),
 	})
 }
@@ -279,8 +285,8 @@
     srcs: ["both.cpp"],
     cflags: ["bothflag"],
     shared_libs: ["shared_dep_for_both"],
-    static_libs: ["static_dep_for_both"],
-    whole_static_libs: ["whole_static_lib_for_both"],
+    static_libs: ["static_dep_for_both", "whole_and_static_lib_for_both"],
+    whole_static_libs: ["whole_static_lib_for_both", "whole_and_static_lib_for_both"],
     static: {
         srcs: ["staticonly.cpp"],
         cflags: ["staticflag"],
@@ -328,6 +334,11 @@
     bazel_module: { bp2build_available: false },
 }
 
+cc_library_static {
+    name: "whole_and_static_lib_for_both",
+    bazel_module: { bp2build_available: false },
+}
+
 cc_library {
     name: "shared_dep_for_shared",
     bazel_module: { bp2build_available: false },
@@ -363,6 +374,7 @@
     ]`,
 				"whole_archive_deps": `[
         ":whole_static_lib_for_both",
+        ":whole_and_static_lib_for_both",
         ":whole_static_lib_for_static",
     ]`}),
 			makeBazelTarget("cc_library_shared", "a", attrNameToString{
@@ -384,6 +396,7 @@
     ]`,
 				"whole_archive_deps": `[
         ":whole_static_lib_for_both",
+        ":whole_and_static_lib_for_both",
         ":whole_static_lib_for_shared",
     ]`,
 			}),
diff --git a/bp2build/cc_library_headers_conversion_test.go b/bp2build/cc_library_headers_conversion_test.go
index e4cfa35..e5bb120 100644
--- a/bp2build/cc_library_headers_conversion_test.go
+++ b/bp2build/cc_library_headers_conversion_test.go
@@ -112,6 +112,8 @@
             export_include_dirs: ["arch_x86_64_exported_include_dir"],
         },
     },
+    sdk_version: "current",
+    min_sdk_version: "29",
 
     // TODO: Also support export_header_lib_headers
 }`,
@@ -130,6 +132,8 @@
         ":lib-1",
         ":lib-2",
     ]`,
+        "sdk_version": `"current"`,
+        "min_sdk_version": `"29"`,
 			}),
 		},
 	})
diff --git a/bp2build/cc_library_shared_conversion_test.go b/bp2build/cc_library_shared_conversion_test.go
index 78192fe..7c2c100 100644
--- a/bp2build/cc_library_shared_conversion_test.go
+++ b/bp2build/cc_library_shared_conversion_test.go
@@ -136,6 +136,8 @@
         "header_lib_1",
         "header_lib_2"
     ],
+    sdk_version: "current",
+    min_sdk_version: "29",
 
     // TODO: Also support export_header_lib_headers
 }`,
@@ -174,6 +176,8 @@
         ":whole_static_lib_1",
         ":whole_static_lib_2",
     ]`,
+				"sdk_version":     `"current"`,
+				"min_sdk_version": `"29"`,
 			}),
 		},
 	})
@@ -492,3 +496,27 @@
 	},
 	)
 }
+
+func TestCcLibrarySharedSystemSharedLibsSharedEmpty(t *testing.T) {
+	runCcLibrarySharedTestCase(t, bp2buildTestCase{
+		description:                "cc_library_shared system_shared_libs empty shared default",
+		moduleTypeUnderTest:        "cc_library_shared",
+		moduleTypeUnderTestFactory: cc.LibrarySharedFactory,
+		blueprint: soongCcLibrarySharedPreamble + `
+cc_defaults {
+    name: "empty_defaults",
+    shared: {
+        system_shared_libs: [],
+    },
+    include_build_directory: false,
+}
+cc_library_shared {
+    name: "empty",
+    defaults: ["empty_defaults"],
+}
+`,
+		expectedBazelTargets: []string{makeBazelTarget("cc_library_shared", "empty", attrNameToString{
+			"system_dynamic_deps": "[]",
+		})},
+	})
+}
diff --git a/bp2build/cc_library_static_conversion_test.go b/bp2build/cc_library_static_conversion_test.go
index 205bf4d..be10e86 100644
--- a/bp2build/cc_library_static_conversion_test.go
+++ b/bp2build/cc_library_static_conversion_test.go
@@ -166,6 +166,8 @@
         "header_lib_1",
         "header_lib_2"
     ],
+    sdk_version: "current",
+    min_sdk_version: "29",
 
     // TODO: Also support export_header_lib_headers
 }`,
@@ -202,6 +204,8 @@
         ":whole_static_lib_1",
         ":whole_static_lib_2",
     ]`,
+        "sdk_version": `"current"`,
+        "min_sdk_version": `"29"`,
 			}),
 		},
 	})
diff --git a/bp2build/cc_object_conversion_test.go b/bp2build/cc_object_conversion_test.go
index 0a6c317..ea58086 100644
--- a/bp2build/cc_object_conversion_test.go
+++ b/bp2build/cc_object_conversion_test.go
@@ -55,6 +55,8 @@
         "a/b/*.c"
     ],
     exclude_srcs: ["a/b/exclude.c"],
+    sdk_version: "current",
+    min_sdk_version: "29",
 }
 `,
 		expectedBazelTargets: []string{
@@ -71,6 +73,8 @@
     ]`,
 				"srcs":                `["a/b/c.c"]`,
 				"system_dynamic_deps": `[]`,
+        "sdk_version": `"current"`,
+        "min_sdk_version": `"29"`,
 			}),
 		},
 	})
diff --git a/bp2build/cc_prebuilt_library_conversion_test.go b/bp2build/cc_prebuilt_library_conversion_test.go
new file mode 100644
index 0000000..3cf8969
--- /dev/null
+++ b/bp2build/cc_prebuilt_library_conversion_test.go
@@ -0,0 +1,250 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package bp2build
+
+import (
+	"fmt"
+	"testing"
+
+	"android/soong/cc"
+)
+
+func TestPrebuiltLibraryStaticAndSharedSimple(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library static and shared simple",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	srcs: ["libf.so"],
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest_bp2build_cc_library_static", attrNameToString{
+					"static_library": `"libf.so"`,
+				}),
+				makeBazelTarget("prebuilt_library_shared", "libtest", attrNameToString{
+					"shared_library": `"libf.so"`,
+				}),
+			},
+		})
+}
+
+func TestPrebuiltLibraryWithArchVariance(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library with arch variance",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	arch: {
+		arm64: { srcs: ["libf.so"], },
+		arm: { srcs: ["libg.so"], },
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest_bp2build_cc_library_static", attrNameToString{
+					"static_library": `select({
+        "//build/bazel/platforms/arch:arm": "libg.so",
+        "//build/bazel/platforms/arch:arm64": "libf.so",
+        "//conditions:default": None,
+    })`,
+				}),
+				makeBazelTarget("prebuilt_library_shared", "libtest", attrNameToString{
+					"shared_library": `select({
+        "//build/bazel/platforms/arch:arm": "libg.so",
+        "//build/bazel/platforms/arch:arm64": "libf.so",
+        "//conditions:default": None,
+    })`,
+				}),
+			},
+		})
+}
+
+func TestPrebuiltLibraryAdditionalAttrs(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library additional attributes",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so":    "",
+				"testdir/1/": "",
+				"testdir/2/": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	srcs: ["libf.so"],
+	export_include_dirs: ["testdir/1/"],
+	export_system_include_dirs: ["testdir/2/"],
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest_bp2build_cc_library_static", attrNameToString{
+					"static_library":         `"libf.so"`,
+					"export_includes":        `["testdir/1/"]`,
+					"export_system_includes": `["testdir/2/"]`,
+				}),
+				// TODO(b/229374533): When fixed, update this test
+				makeBazelTarget("prebuilt_library_shared", "libtest", attrNameToString{
+					"shared_library": `"libf.so"`,
+				}),
+			},
+		})
+}
+
+func TestPrebuiltLibrarySharedStanzaFails(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library with shared stanza fails because multiple sources",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	srcs: ["libf.so"],
+	shared: {
+		srcs: ["libg.so"],
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedErr: fmt.Errorf("Expected at most one source file"),
+		})
+}
+
+func TestPrebuiltLibraryStaticStanzaFails(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library with static stanza fails because multiple sources",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	srcs: ["libf.so"],
+	static: {
+		srcs: ["libg.so"],
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedErr: fmt.Errorf("Expected at most one source file"),
+		})
+}
+
+func TestPrebuiltLibrarySharedAndStaticStanzas(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library with both shared and static stanzas",
+			moduleTypeUnderTest:        "cc_prebuilt_library",
+			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library {
+	name: "libtest",
+	static: {
+		srcs: ["libf.so"],
+	},
+	shared: {
+		srcs: ["libg.so"],
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest_bp2build_cc_library_static", attrNameToString{
+					"static_library": `"libf.so"`,
+				}),
+				makeBazelTarget("prebuilt_library_shared", "libtest", attrNameToString{
+					"shared_library": `"libg.so"`,
+				}),
+			},
+		})
+}
+
+// TODO(b/228623543): When this bug is fixed, enable this test
+//func TestPrebuiltLibraryOnlyShared(t *testing.T) {
+//	runBp2BuildTestCaseSimple(t,
+//		bp2buildTestCase{
+//			description:                "prebuilt library shared only",
+//			moduleTypeUnderTest:        "cc_prebuilt_library",
+//			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+//			filesystem: map[string]string{
+//				"libf.so": "",
+//			},
+//			blueprint: `
+//cc_prebuilt_library {
+//	name: "libtest",
+//	srcs: ["libf.so"],
+//	static: {
+//		enabled: false,
+//	},
+//	bazel_module: { bp2build_available: true },
+//}`,
+//			expectedBazelTargets: []string{
+//				makeBazelTarget("prebuilt_library_shared", "libtest", attrNameToString{
+//					"shared_library": `"libf.so"`,
+//				}),
+//			},
+//		})
+//}
+
+// TODO(b/228623543): When this bug is fixed, enable this test
+//func TestPrebuiltLibraryOnlyStatic(t *testing.T) {
+//	runBp2BuildTestCaseSimple(t,
+//		bp2buildTestCase{
+//			description:                "prebuilt library static only",
+//			moduleTypeUnderTest:        "cc_prebuilt_library",
+//			moduleTypeUnderTestFactory: cc.PrebuiltLibraryFactory,
+//			filesystem: map[string]string{
+//				"libf.so": "",
+//			},
+//			blueprint: `
+//cc_prebuilt_library {
+//	name: "libtest",
+//	srcs: ["libf.so"],
+//	shared: {
+//		enabled: false,
+//	},
+//	bazel_module: { bp2build_available: true },
+//}`,
+//			expectedBazelTargets: []string{
+//				makeBazelTarget("prebuilt_library_static", "libtest_bp2build_cc_library_static", attrNameToString{
+//					"static_library": `"libf.so"`,
+//				}),
+//			},
+//		})
+//}
diff --git a/bp2build/cc_prebuilt_library_shared_test.go b/bp2build/cc_prebuilt_library_shared_test.go
index ef2fddc..57905e5 100644
--- a/bp2build/cc_prebuilt_library_shared_test.go
+++ b/bp2build/cc_prebuilt_library_shared_test.go
@@ -1,6 +1,7 @@
 package bp2build
 
 import (
+	"fmt"
 	"testing"
 
 	"android/soong/cc"
@@ -59,3 +60,26 @@
 			},
 		})
 }
+
+func TestSharedPrebuiltLibrarySharedStanzaFails(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library shared with shared stanza fails because multiple sources",
+			moduleTypeUnderTest:        "cc_prebuilt_library_shared",
+			moduleTypeUnderTestFactory: cc.PrebuiltSharedLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library_shared {
+	name: "libtest",
+	srcs: ["libf.so"],
+	shared: {
+		srcs: ["libg.so"],
+	},
+	bazel_module: { bp2build_available: true},
+}`,
+			expectedErr: fmt.Errorf("Expected at most one source file"),
+		})
+}
diff --git a/bp2build/cc_prebuilt_library_static_test.go b/bp2build/cc_prebuilt_library_static_test.go
new file mode 100644
index 0000000..3feb1f1
--- /dev/null
+++ b/bp2build/cc_prebuilt_library_static_test.go
@@ -0,0 +1,98 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package bp2build
+
+import (
+	"fmt"
+	"testing"
+
+	"android/soong/cc"
+)
+
+func TestStaticPrebuiltLibrary(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library static simple",
+			moduleTypeUnderTest:        "cc_prebuilt_library_static",
+			moduleTypeUnderTestFactory: cc.PrebuiltStaticLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library_static {
+	name: "libtest",
+	srcs: ["libf.so"],
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest", attrNameToString{
+					"static_library": `"libf.so"`,
+				}),
+			},
+		})
+}
+
+func TestStaticPrebuiltLibraryWithArchVariance(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library static with arch variance",
+			moduleTypeUnderTest:        "cc_prebuilt_library_static",
+			moduleTypeUnderTestFactory: cc.PrebuiltStaticLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library_static {
+	name: "libtest",
+	arch: {
+		arm64: { srcs: ["libf.so"], },
+		arm: { srcs: ["libg.so"], },
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedBazelTargets: []string{
+				makeBazelTarget("prebuilt_library_static", "libtest", attrNameToString{
+					"static_library": `select({
+        "//build/bazel/platforms/arch:arm": "libg.so",
+        "//build/bazel/platforms/arch:arm64": "libf.so",
+        "//conditions:default": None,
+    })`,
+				}),
+			},
+		})
+}
+
+func TestStaticPrebuiltLibraryStaticStanzaFails(t *testing.T) {
+	runBp2BuildTestCaseSimple(t,
+		bp2buildTestCase{
+			description:                "prebuilt library with static stanza fails because multiple sources",
+			moduleTypeUnderTest:        "cc_prebuilt_library_static",
+			moduleTypeUnderTestFactory: cc.PrebuiltStaticLibraryFactory,
+			filesystem: map[string]string{
+				"libf.so": "",
+				"libg.so": "",
+			},
+			blueprint: `
+cc_prebuilt_library_static {
+	name: "libtest",
+	srcs: ["libf.so"],
+	static: {
+		srcs: ["libg.so"],
+	},
+	bazel_module: { bp2build_available: true },
+}`,
+			expectedErr: fmt.Errorf("Expected at most one source file"),
+		})
+}
diff --git a/bp2build/conversion.go b/bp2build/conversion.go
index 96c12d3..1790dd7 100644
--- a/bp2build/conversion.go
+++ b/bp2build/conversion.go
@@ -7,7 +7,8 @@
 	"strings"
 
 	"android/soong/android"
-	"android/soong/cc/config"
+	cc_config "android/soong/cc/config"
+	java_config "android/soong/java/config"
 
 	"github.com/google/blueprint/proptools"
 )
@@ -22,18 +23,24 @@
 	var files []BazelFile
 
 	files = append(files, newFile("cc_toolchain", GeneratedBuildFileName, "")) // Creates a //cc_toolchain package.
-	files = append(files, newFile("cc_toolchain", "constants.bzl", config.BazelCcToolchainVars(cfg)))
+	files = append(files, newFile("cc_toolchain", "constants.bzl", cc_config.BazelCcToolchainVars(cfg)))
+
+	files = append(files, newFile("java_toolchain", GeneratedBuildFileName, "")) // Creates a //java_toolchain package.
+	files = append(files, newFile("java_toolchain", "constants.bzl", java_config.BazelJavaToolchainVars(cfg)))
 
 	files = append(files, newFile("metrics", "converted_modules.txt", strings.Join(metrics.convertedModules, "\n")))
 
 	files = append(files, newFile("product_config", "soong_config_variables.bzl", cfg.Bp2buildSoongConfigDefinitions.String()))
 
+	files = append(files, newFile("product_config", "arch_configuration.bzl", android.StarlarkArchConfigurations()))
+
 	apiLevelsContent, err := json.Marshal(android.GetApiLevelsMap(cfg))
 	if err != nil {
 		panic(err)
 	}
 	files = append(files, newFile("api_levels", GeneratedBuildFileName, `exports_files(["api_levels.json"])`))
 	files = append(files, newFile("api_levels", "api_levels.json", string(apiLevelsContent)))
+	files = append(files, newFile("api_levels", "api_levels.bzl", android.StarlarkApiLevelConfigs(cfg)))
 
 	return files
 }
diff --git a/bp2build/conversion_test.go b/bp2build/conversion_test.go
index 629ca9b..e49d855 100644
--- a/bp2build/conversion_test.go
+++ b/bp2build/conversion_test.go
@@ -95,6 +95,14 @@
 			basename: "constants.bzl",
 		},
 		{
+			dir:      "java_toolchain",
+			basename: GeneratedBuildFileName,
+		},
+		{
+			dir:      "java_toolchain",
+			basename: "constants.bzl",
+		},
+		{
 			dir:      "metrics",
 			basename: "converted_modules.txt",
 		},
@@ -103,6 +111,10 @@
 			basename: "soong_config_variables.bzl",
 		},
 		{
+			dir:      "product_config",
+			basename: "arch_configuration.bzl",
+		},
+		{
 			dir:      "api_levels",
 			basename: GeneratedBuildFileName,
 		},
@@ -110,6 +122,10 @@
 			dir:      "api_levels",
 			basename: "api_levels.json",
 		},
+		{
+			dir:      "api_levels",
+			basename: "api_levels.bzl",
+		},
 	}
 
 	if len(files) != len(expectedFilePaths) {
diff --git a/bp2build/java_binary_host_conversion_test.go b/bp2build/java_binary_host_conversion_test.go
index 65136d9..4fc07e0 100644
--- a/bp2build/java_binary_host_conversion_test.go
+++ b/bp2build/java_binary_host_conversion_test.go
@@ -28,6 +28,7 @@
 	(&tc).moduleTypeUnderTestFactory = java.BinaryHostFactory
 	runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {
 		ctx.RegisterModuleType("cc_library_host_shared", cc.LibraryHostSharedFactory)
+		ctx.RegisterModuleType("java_library", java.LibraryFactory)
 	}, tc)
 }
 
@@ -67,3 +68,33 @@
 		},
 	})
 }
+
+func TestJavaBinaryHostRuntimeDeps(t *testing.T) {
+	runJavaBinaryHostTestCase(t, bp2buildTestCase{
+		description: "java_binary_host with srcs, exclude_srcs, jni_libs, javacflags, and manifest.",
+		filesystem:  fs,
+		blueprint: `java_binary_host {
+    name: "java-binary-host-1",
+    static_libs: ["java-dep-1"],
+    manifest: "test.mf",
+    bazel_module: { bp2build_available: true },
+}
+
+java_library {
+    name: "java-dep-1",
+    srcs: ["a.java"],
+    bazel_module: { bp2build_available: false },
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_binary", "java-binary-host-1", attrNameToString{
+				"main_class":   `"com.android.test.MainClass"`,
+				"runtime_deps": `[":java-dep-1"]`,
+				"target_compatible_with": `select({
+        "//build/bazel/platforms/os:android": ["@platforms//:incompatible"],
+        "//conditions:default": [],
+    })`,
+			}),
+		},
+	})
+}
diff --git a/bp2build/java_import_conversion_test.go b/bp2build/java_import_conversion_test.go
index 2f7211c..0b3191c 100644
--- a/bp2build/java_import_conversion_test.go
+++ b/bp2build/java_import_conversion_test.go
@@ -29,7 +29,7 @@
 func registerJavaImportModuleTypes(ctx android.RegistrationContext) {
 }
 
-func TestMinimalJavaImport(t *testing.T) {
+func TestJavaImportMinimal(t *testing.T) {
 	runJavaImportTestCase(t, bp2buildTestCase{
 		description:                "Java import - simple example",
 		moduleTypeUnderTest:        "java_import",
@@ -50,3 +50,36 @@
 			}),
 		}})
 }
+
+func TestJavaImportArchVariant(t *testing.T) {
+	runJavaImportTestCase(t, bp2buildTestCase{
+		description:                "Java import - simple example",
+		moduleTypeUnderTest:        "java_import",
+		moduleTypeUnderTestFactory: java.ImportFactory,
+		filesystem: map[string]string{
+			"import.jar": "",
+		},
+		blueprint: `
+java_import {
+        name: "example_import",
+		target: {
+			android: {
+				jars: ["android.jar"],
+			},
+			linux_glibc: {
+				jars: ["linux.jar"],
+			},
+		},
+        bazel_module: { bp2build_available: true },
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_import", "example_import", attrNameToString{
+				"jars": `select({
+        "//build/bazel/platforms/os:android": ["android.jar"],
+        "//build/bazel/platforms/os:linux": ["linux.jar"],
+        "//conditions:default": [],
+    })`,
+			}),
+		}})
+}
diff --git a/bp2build/java_library_conversion_test.go b/bp2build/java_library_conversion_test.go
index 5c65ec2..ccc52ef 100644
--- a/bp2build/java_library_conversion_test.go
+++ b/bp2build/java_library_conversion_test.go
@@ -15,17 +15,23 @@
 package bp2build
 
 import (
+	"fmt"
 	"testing"
 
 	"android/soong/android"
 	"android/soong/java"
 )
 
-func runJavaLibraryTestCase(t *testing.T, tc bp2buildTestCase) {
+func runJavaLibraryTestCaseWithRegistrationCtxFunc(t *testing.T, tc bp2buildTestCase, registrationCtxFunc func(ctx android.RegistrationContext)) {
 	t.Helper()
 	(&tc).moduleTypeUnderTest = "java_library"
 	(&tc).moduleTypeUnderTestFactory = java.LibraryFactory
-	runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {}, tc)
+	runBp2BuildTestCase(t, registrationCtxFunc, tc)
+}
+
+func runJavaLibraryTestCase(t *testing.T, tc bp2buildTestCase) {
+	t.Helper()
+	runJavaLibraryTestCaseWithRegistrationCtxFunc(t, tc, func(ctx android.RegistrationContext) {})
 }
 
 func TestJavaLibrary(t *testing.T) {
@@ -55,3 +61,193 @@
 		},
 	})
 }
+
+func TestJavaLibraryConvertsStaticLibsToDepsAndExports(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    libs: ["java-lib-2"],
+    static_libs: ["java-lib-3"],
+    bazel_module: { bp2build_available: true },
+}
+
+java_library {
+    name: "java-lib-2",
+    srcs: ["b.java"],
+    bazel_module: { bp2build_available: false },
+}
+
+java_library {
+    name: "java-lib-3",
+    srcs: ["c.java"],
+    bazel_module: { bp2build_available: false },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"srcs": `["a.java"]`,
+				"deps": `[
+        ":java-lib-2",
+        ":java-lib-3",
+    ]`,
+				"exports": `[":java-lib-3"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryConvertsStaticLibsToExportsIfNoSrcs(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    static_libs: ["java-lib-2"],
+    bazel_module: { bp2build_available: true },
+}
+
+java_library {
+    name: "java-lib-2",
+    srcs: ["a.java"],
+    bazel_module: { bp2build_available: false },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"exports": `[":java-lib-2"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryFailsToConvertLibsWithNoSrcs(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		expectedErr: fmt.Errorf("Module has direct dependencies but no sources. Bazel will not allow this."),
+		blueprint: `java_library {
+    name: "java-lib-1",
+    libs: ["java-lib-2"],
+    bazel_module: { bp2build_available: true },
+}
+
+java_library {
+    name: "java-lib-2",
+    srcs: ["a.java"],
+    bazel_module: { bp2build_available: false },
+}`,
+		expectedBazelTargets: []string{},
+	})
+}
+
+func TestJavaLibraryPlugins(t *testing.T) {
+	runJavaLibraryTestCaseWithRegistrationCtxFunc(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    plugins: ["java-plugin-1"],
+    bazel_module: { bp2build_available: true },
+}
+
+java_plugin {
+    name: "java-plugin-1",
+    srcs: ["a.java"],
+    bazel_module: { bp2build_available: false },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"plugins": `[":java-plugin-1"]`,
+			}),
+		},
+	}, func(ctx android.RegistrationContext) {
+		ctx.RegisterModuleType("java_plugin", java.PluginFactory)
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsEnabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        enabled: true,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `[
+        "-Xsuper-fast",
+        "-Xep:SpeedLimit:OFF",
+    ]`,
+				"srcs": `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledByDefault(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+		enabled: false,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryLogTags(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		description:                "Java library - logtags creates separate dependency",
+		moduleTypeUnderTest:        "java_library",
+		moduleTypeUnderTestFactory: java.LibraryFactory,
+		blueprint: `java_library {
+        name: "example_lib",
+        srcs: [
+			"a.java",
+			"b.java",
+			"a.logtag",
+			"b.logtag",
+		],
+        bazel_module: { bp2build_available: true },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("event_log_tags", "example_lib_logtags", attrNameToString{
+				"srcs": `[
+        "a.logtag",
+        "b.logtag",
+    ]`,
+			}),
+			makeBazelTarget("java_library", "example_lib", attrNameToString{
+				"srcs": `[
+        "a.java",
+        "b.java",
+        ":example_lib_logtags",
+    ]`,
+			}),
+		}})
+}
diff --git a/bp2build/java_plugin_conversion_test.go b/bp2build/java_plugin_conversion_test.go
index ff13bb0..c2a2182 100644
--- a/bp2build/java_plugin_conversion_test.go
+++ b/bp2build/java_plugin_conversion_test.go
@@ -70,3 +70,39 @@
 		},
 	})
 }
+
+func TestJavaPluginNoSrcs(t *testing.T) {
+	runJavaPluginTestCase(t, bp2buildTestCase{
+		description: "java_plugin without srcs converts (static) libs to deps",
+		blueprint: `java_plugin {
+    name: "java-plug-1",
+    libs: ["java-lib-1"],
+    static_libs: ["java-lib-2"],
+    bazel_module: { bp2build_available: true },
+}
+
+java_library {
+    name: "java-lib-1",
+    srcs: ["b.java"],
+    bazel_module: { bp2build_available: false },
+}
+
+java_library {
+    name: "java-lib-2",
+    srcs: ["c.java"],
+    bazel_module: { bp2build_available: false },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_plugin", "java-plug-1", attrNameToString{
+				"target_compatible_with": `select({
+        "//build/bazel/platforms/os:android": ["@platforms//:incompatible"],
+        "//conditions:default": [],
+    })`,
+				"deps": `[
+        ":java-lib-1",
+        ":java-lib-2",
+    ]`,
+			}),
+		},
+	})
+}
diff --git a/bp2build/java_proto_conversion_test.go b/bp2build/java_proto_conversion_test.go
new file mode 100644
index 0000000..67f8044
--- /dev/null
+++ b/bp2build/java_proto_conversion_test.go
@@ -0,0 +1,122 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bp2build
+
+import (
+	"fmt"
+	"testing"
+
+	"android/soong/android"
+	"android/soong/java"
+)
+
+func runJavaProtoTestCase(t *testing.T, tc bp2buildTestCase) {
+	t.Helper()
+	(&tc).moduleTypeUnderTest = "java_library_static"
+	(&tc).moduleTypeUnderTestFactory = java.LibraryFactory
+	runBp2BuildTestCase(t, func(ctx android.RegistrationContext) {}, tc)
+}
+
+func TestJavaProto(t *testing.T) {
+	testCases := []struct {
+		protoType                string
+		javaLibraryType          string
+		javaLibraryNameExtension string
+	}{
+		{
+			protoType:                "nano",
+			javaLibraryType:          "java_nano_proto_library",
+			javaLibraryNameExtension: "java_proto_nano",
+		},
+		{
+			protoType:                "micro",
+			javaLibraryType:          "java_micro_proto_library",
+			javaLibraryNameExtension: "java_proto_micro",
+		},
+		{
+			protoType:                "lite",
+			javaLibraryType:          "java_lite_proto_library",
+			javaLibraryNameExtension: "java_proto_lite",
+		},
+		{
+			protoType:                "stream",
+			javaLibraryType:          "java_stream_proto_library",
+			javaLibraryNameExtension: "java_proto_stream",
+		},
+		{
+			protoType:                "full",
+			javaLibraryType:          "java_proto_library",
+			javaLibraryNameExtension: "java_proto",
+		},
+	}
+
+	bp := `java_library_static {
+    name: "java-protos",
+    proto: {
+        type: "%s",
+    },
+    srcs: ["a.proto"],
+}`
+
+	protoLibrary := makeBazelTarget("proto_library", "java-protos_proto", attrNameToString{
+		"srcs": `["a.proto"]`,
+	})
+
+	for _, tc := range testCases {
+		javaLibraryName := fmt.Sprintf("java-protos_%s", tc.javaLibraryNameExtension)
+
+		runJavaProtoTestCase(t, bp2buildTestCase{
+			description: fmt.Sprintf("java_proto %s", tc.protoType),
+			blueprint:   fmt.Sprintf(bp, tc.protoType),
+			expectedBazelTargets: []string{
+				protoLibrary,
+				makeBazelTarget(
+					tc.javaLibraryType,
+					javaLibraryName,
+					attrNameToString{
+						"deps": `[":java-protos_proto"]`,
+					}),
+				makeBazelTarget("java_library", "java-protos", attrNameToString{
+					"exports": fmt.Sprintf(`[":%s"]`, javaLibraryName),
+				}),
+			},
+		})
+	}
+}
+
+func TestJavaProtoDefault(t *testing.T) {
+	runJavaProtoTestCase(t, bp2buildTestCase{
+		description: "java_library proto default",
+		blueprint: `java_library_static {
+    name: "java-protos",
+    srcs: ["a.proto"],
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("proto_library", "java-protos_proto", attrNameToString{
+				"srcs": `["a.proto"]`,
+			}),
+			makeBazelTarget(
+				"java_lite_proto_library",
+				"java-protos_java_proto_lite",
+				attrNameToString{
+					"deps": `[":java-protos_proto"]`,
+				}),
+			makeBazelTarget("java_library", "java-protos", attrNameToString{
+				"exports": `[":java-protos_java_proto_lite"]`,
+			}),
+		},
+	})
+}
diff --git a/bp2build/metrics.go b/bp2build/metrics.go
index 8a0b1c9..04fac44 100644
--- a/bp2build/metrics.go
+++ b/bp2build/metrics.go
@@ -43,6 +43,8 @@
 
 	// Counts of total modules by module type.
 	totalModuleTypeCount map[string]uint64
+
+	Events []*bp2build_metrics_proto.Event
 }
 
 // Serialize returns the protoized version of CodegenMetrics: bp2build_metrics_proto.Bp2BuildMetrics
@@ -55,6 +57,7 @@
 		ConvertedModules:         metrics.convertedModules,
 		ConvertedModuleTypeCount: metrics.convertedModuleTypeCount,
 		TotalModuleTypeCount:     metrics.totalModuleTypeCount,
+		Events:                   metrics.Events,
 	}
 }
 
diff --git a/bp2build/prebuilt_etc_conversion_test.go b/bp2build/prebuilt_etc_conversion_test.go
index 3a5d5bb..2e4b221 100644
--- a/bp2build/prebuilt_etc_conversion_test.go
+++ b/bp2build/prebuilt_etc_conversion_test.go
@@ -45,11 +45,11 @@
 }
 `,
 		expectedBazelTargets: []string{
-			makeBazelTarget("prebuilt_etc", "apex_tz_version", attrNameToString{
+			makeBazelTarget("prebuilt_file", "apex_tz_version", attrNameToString{
 				"filename":    `"tz_version"`,
 				"installable": `False`,
 				"src":         `"version/tz_version"`,
-				"sub_dir":     `"tz"`,
+				"dir":         `"etc/tz"`,
 			})}})
 }
 
@@ -75,7 +75,7 @@
 }
 `,
 		expectedBazelTargets: []string{
-			makeBazelTarget("prebuilt_etc", "apex_tz_version", attrNameToString{
+			makeBazelTarget("prebuilt_file", "apex_tz_version", attrNameToString{
 				"filename":    `"tz_version"`,
 				"installable": `False`,
 				"src": `select({
@@ -83,7 +83,7 @@
         "//build/bazel/platforms/arch:arm64": "arm64",
         "//conditions:default": "version/tz_version",
     })`,
-				"sub_dir": `"tz"`,
+				"dir": `"etc/tz"`,
 			})}})
 }
 
@@ -114,7 +114,7 @@
 }
 `,
 		expectedBazelTargets: []string{
-			makeBazelTarget("prebuilt_etc", "apex_tz_version", attrNameToString{
+			makeBazelTarget("prebuilt_file", "apex_tz_version", attrNameToString{
 				"filename":    `"tz_version"`,
 				"installable": `False`,
 				"src": `select({
@@ -125,6 +125,59 @@
         "//build/bazel/platforms/os_arch:linux_bionic_arm64": "darwin_or_arm64",
         "//conditions:default": "version/tz_version",
     })`,
-				"sub_dir": `"tz"`,
+				"dir": `"etc/tz"`,
+			})}})
+}
+
+func runPrebuiltUsrShareTestCase(t *testing.T, tc bp2buildTestCase) {
+	t.Helper()
+	(&tc).moduleTypeUnderTest = "prebuilt_usr_share"
+	(&tc).moduleTypeUnderTestFactory = etc.PrebuiltUserShareFactory
+	runBp2BuildTestCase(t, registerPrebuiltEtcModuleTypes, tc)
+}
+
+func registerPrebuiltUsrShareModuleTypes(ctx android.RegistrationContext) {
+}
+
+func TestPrebuiltUsrShareSimple(t *testing.T) {
+	runPrebuiltUsrShareTestCase(t, bp2buildTestCase{
+		description: "prebuilt_usr_share - simple example",
+		filesystem:  map[string]string{},
+		blueprint: `
+prebuilt_usr_share {
+    name: "apex_tz_version",
+    src: "version/tz_version",
+    filename: "tz_version",
+    sub_dir: "tz",
+    installable: false,
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("prebuilt_file", "apex_tz_version", attrNameToString{
+				"filename":    `"tz_version"`,
+				"installable": `False`,
+				"src":         `"version/tz_version"`,
+				"dir":         `"usr/share/tz"`,
+			})}})
+}
+
+func TestPrebuiltEtcNoSubdir(t *testing.T) {
+	runPrebuiltEtcTestCase(t, bp2buildTestCase{
+		description: "prebuilt_etc - no subdir",
+		filesystem:  map[string]string{},
+		blueprint: `
+prebuilt_etc {
+    name: "apex_tz_version",
+    src: "version/tz_version",
+    filename: "tz_version",
+    installable: false,
+}
+`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("prebuilt_file", "apex_tz_version", attrNameToString{
+				"filename":    `"tz_version"`,
+				"installable": `False`,
+				"src":         `"version/tz_version"`,
+				"dir":         `"etc"`,
 			})}})
 }
diff --git a/bp2build/testing.go b/bp2build/testing.go
index 53b60fa..029ba49 100644
--- a/bp2build/testing.go
+++ b/bp2build/testing.go
@@ -25,14 +25,17 @@
 	"testing"
 
 	"android/soong/android"
+	"android/soong/android/allowlists"
 	"android/soong/bazel"
 )
 
 var (
 	// A default configuration for tests to not have to specify bp2build_available on top level targets.
-	bp2buildConfig = android.Bp2BuildConfig{
-		android.BP2BUILD_TOPLEVEL: android.Bp2BuildDefaultTrueRecursively,
-	}
+	bp2buildConfig = android.NewBp2BuildAllowlist().SetDefaultConfig(
+		allowlists.Bp2BuildConfig{
+			android.Bp2BuildTopLevel: allowlists.Bp2BuildDefaultTrueRecursively,
+		},
+	)
 
 	buildDir string
 )
@@ -81,8 +84,9 @@
 	expectedBazelTargets       []string
 	filesystem                 map[string]string
 	dir                        string
-	expectedErr                error
-	unconvertedDepsMode        unconvertedDepsMode
+	// An error with a string contained within the string of the expected error
+	expectedErr         error
+	unconvertedDepsMode unconvertedDepsMode
 }
 
 func runBp2BuildTestCase(t *testing.T, registerModuleTypes func(ctx android.RegistrationContext), tc bp2buildTestCase) {
diff --git a/bpfix/bpfix/bpfix.go b/bpfix/bpfix/bpfix.go
index 4f7d88c..94b28dc 100644
--- a/bpfix/bpfix/bpfix.go
+++ b/bpfix/bpfix/bpfix.go
@@ -449,6 +449,7 @@
 		}
 
 		hasInstrumentationFor := hasNonEmptyLiteralStringProperty(mod, "instrumentation_for")
+		hasTestSuites := hasNonEmptyLiteralListProperty(mod, "test_suites")
 		tags, _ := getLiteralListPropertyValue(mod, "tags")
 
 		var hasTestsTag bool
@@ -458,7 +459,7 @@
 			}
 		}
 
-		isTest := hasInstrumentationFor || hasTestsTag
+		isTest := hasInstrumentationFor || hasTestsTag || hasTestSuites
 
 		if isTest {
 			switch mod.Type {
@@ -470,13 +471,7 @@
 				mod.Type = "java_test"
 			case "java_library_host":
 				mod.Type = "java_test_host"
-			}
-		}
-
-		// when a cc_binary module has a nonempty test_suites field, modify the type to cc_test
-		if mod.Type == "cc_binary" {
-			hasTestSuites := hasNonEmptyLiteralListProperty(mod, "test_suites")
-			if hasTestSuites {
+			case "cc_binary":
 				mod.Type = "cc_test"
 			}
 		}
diff --git a/bpfix/bpfix/bpfix_test.go b/bpfix/bpfix/bpfix_test.go
index 17b3c24..672e852 100644
--- a/bpfix/bpfix/bpfix_test.go
+++ b/bpfix/bpfix/bpfix_test.go
@@ -1436,6 +1436,38 @@
 				}
 			`,
 		},
+		{
+			name: "android_app with android_test",
+			in: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+					test_suites: ["test_suite1"],
+				}
+			`,
+			out: `
+				android_test {
+					name: "foo",
+					srcs: ["srcs"],
+					test_suites: ["test_suite1"],
+				}
+			`,
+		},
+		{
+			name: "android_app without test_suites",
+			in: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+				}
+			`,
+			out: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+				}
+			`,
+		},
 	}
 	for _, test := range tests {
 		t.Run(test.name, func(t *testing.T) {
diff --git a/build_test.bash b/build_test.bash
index b6d00e2..8b91e2c 100755
--- a/build_test.bash
+++ b/build_test.bash
@@ -25,7 +25,10 @@
 
 # Products that are broken or otherwise don't work with multiproduct_kati
 SKIPPED_PRODUCTS=(
+    # These products are for soong-only builds, and will fail the kati stage.
+    linux_bionic
     mainline_sdk
+    ndk
 )
 
 # To track how long we took to startup. %N isn't supported on Darwin, but
diff --git a/cc/afdo.go b/cc/afdo.go
index d7cce77..66e9732 100644
--- a/cc/afdo.go
+++ b/cc/afdo.go
@@ -32,7 +32,7 @@
 
 var afdoProfileProjectsConfigKey = android.NewOnceKey("AfdoProfileProjects")
 
-const afdoCFlagsFormat = "-fprofile-sample-accurate -fprofile-sample-use=%s"
+const afdoCFlagsFormat = "-funique-internal-linkage-names -fprofile-sample-accurate -fprofile-sample-use=%s"
 
 func getAfdoProfileProjects(config android.DeviceConfig) []string {
 	return config.OnceStringSlice(afdoProfileProjectsConfigKey, func() []string {
@@ -45,6 +45,8 @@
 }
 
 type AfdoProperties struct {
+	// Afdo allows developers self-service enroll for
+	// automatic feedback-directed optimization using profile data.
 	Afdo bool
 
 	AfdoTarget *string  `blueprint:"mutated"`
diff --git a/cc/androidmk.go b/cc/androidmk.go
index 9290272..ff5ba45 100644
--- a/cc/androidmk.go
+++ b/cc/androidmk.go
@@ -331,6 +331,14 @@
 		})
 }
 
+func (test *testDecorator) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
+	entries.ExtraEntries = append(entries.ExtraEntries, func(ctx android.AndroidMkExtraEntriesContext, entries *android.AndroidMkEntries) {
+		if len(test.InstallerProperties.Test_suites) > 0 {
+			entries.AddCompatibilityTestSuites(test.InstallerProperties.Test_suites...)
+		}
+	})
+}
+
 func (binary *binaryDecorator) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
 	ctx.subAndroidMk(entries, binary.baseInstaller)
 
@@ -379,14 +387,13 @@
 
 func (test *testBinary) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
 	ctx.subAndroidMk(entries, test.binaryDecorator)
+	ctx.subAndroidMk(entries, test.testDecorator)
+
 	entries.Class = "NATIVE_TESTS"
 	if Bool(test.Properties.Test_per_src) {
 		entries.SubName = "_" + String(test.binaryDecorator.Properties.Stem)
 	}
 	entries.ExtraEntries = append(entries.ExtraEntries, func(ctx android.AndroidMkExtraEntriesContext, entries *android.AndroidMkEntries) {
-		if len(test.Properties.Test_suites) > 0 {
-			entries.AddCompatibilityTestSuites(test.Properties.Test_suites...)
-		}
 		if test.testConfig != nil {
 			entries.SetString("LOCAL_FULL_TEST_CONFIG", test.testConfig.String())
 		}
@@ -445,6 +452,7 @@
 
 func (test *testLibrary) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
 	ctx.subAndroidMk(entries, test.libraryDecorator)
+	ctx.subAndroidMk(entries, test.testDecorator)
 }
 
 func (installer *baseInstaller) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
@@ -560,10 +568,6 @@
 func (c *snapshotBinaryDecorator) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
 	entries.Class = "EXECUTABLES"
 	entries.SubName = c.baseProperties.Androidmk_suffix
-
-	entries.ExtraEntries = append(entries.ExtraEntries, func(ctx android.AndroidMkExtraEntriesContext, entries *android.AndroidMkEntries) {
-		entries.AddStrings("LOCAL_MODULE_SYMLINKS", c.Properties.Symlinks...)
-	})
 }
 
 func (c *snapshotObjectLinker) AndroidMkEntries(ctx AndroidMkContext, entries *android.AndroidMkEntries) {
diff --git a/cc/binary.go b/cc/binary.go
index 6c7d581..c5017c1 100644
--- a/cc/binary.go
+++ b/cc/binary.go
@@ -220,18 +220,18 @@
 func (binary *binaryDecorator) linkerInit(ctx BaseModuleContext) {
 	binary.baseLinker.linkerInit(ctx)
 
-	if !ctx.toolchain().Bionic() && !ctx.toolchain().Musl() {
-		if ctx.Os() == android.Linux {
-			// Unless explicitly specified otherwise, host static binaries are built with -static
-			// if HostStaticBinaries is true for the product configuration.
-			if binary.Properties.Static_executable == nil && ctx.Config().HostStaticBinaries() {
-				binary.Properties.Static_executable = BoolPtr(true)
-			}
-		} else {
-			// Static executables are not supported on Darwin or Windows
-			binary.Properties.Static_executable = nil
+	if ctx.Os().Linux() && ctx.Host() {
+		// Unless explicitly specified otherwise, host static binaries are built with -static
+		// if HostStaticBinaries is true for the product configuration.
+		if binary.Properties.Static_executable == nil && ctx.Config().HostStaticBinaries() {
+			binary.Properties.Static_executable = BoolPtr(true)
 		}
 	}
+
+	if ctx.Darwin() || ctx.Windows() {
+		// Static executables are not supported on Darwin or Windows
+		binary.Properties.Static_executable = nil
+	}
 }
 
 func (binary *binaryDecorator) static() bool {
@@ -441,6 +441,16 @@
 
 	// Need to determine symlinks early since some targets (ie APEX) need this
 	// information but will not call 'install'
+	binary.setSymlinkList(ctx)
+
+	return ret
+}
+
+func (binary *binaryDecorator) unstrippedOutputFilePath() android.Path {
+	return binary.unstrippedOutputFile
+}
+
+func (binary *binaryDecorator) setSymlinkList(ctx ModuleContext) {
 	for _, symlink := range binary.Properties.Symlinks {
 		binary.symlinks = append(binary.symlinks,
 			symlink+String(binary.Properties.Suffix)+ctx.toolchain().ExecutableSuffix())
@@ -457,12 +467,6 @@
 			binary.preferredArchSymlink = symlinkName
 		}
 	}
-
-	return ret
-}
-
-func (binary *binaryDecorator) unstrippedOutputFilePath() android.Path {
-	return binary.unstrippedOutputFile
 }
 
 func (binary *binaryDecorator) symlinkList() []string {
@@ -611,6 +615,7 @@
 		Linkopts:          baseAttrs.linkopts,
 		Link_crt:          baseAttrs.linkCrt,
 		Use_libcrt:        baseAttrs.useLibcrt,
+		Use_version_lib:   baseAttrs.useVersionLib,
 		Rtti:              baseAttrs.rtti,
 		Stl:               baseAttrs.stl,
 		Cpp_std:           baseAttrs.cppStd,
@@ -626,6 +631,8 @@
 		},
 
 		Features: baseAttrs.features,
+
+		sdkAttributes: bp2BuildParseSdkAttributes(m),
 	}
 
 	ctx.CreateBazelTargetModule(bazel.BazelTargetModuleProperties{
@@ -659,8 +666,9 @@
 	Linkopts                 bazel.StringListAttribute
 	Additional_linker_inputs bazel.LabelListAttribute
 
-	Link_crt   bazel.BoolAttribute
-	Use_libcrt bazel.BoolAttribute
+	Link_crt        bazel.BoolAttribute
+	Use_libcrt      bazel.BoolAttribute
+	Use_version_lib bazel.BoolAttribute
 
 	Rtti    bazel.BoolAttribute
 	Stl     *string
@@ -669,4 +677,6 @@
 	Strip stripAttributes
 
 	Features bazel.StringListAttribute
+
+	sdkAttributes
 }
diff --git a/cc/bp2build.go b/cc/bp2build.go
index 42fc0e4..cc378b3 100644
--- a/cc/bp2build.go
+++ b/cc/bp2build.go
@@ -16,7 +16,6 @@
 import (
 	"fmt"
 	"path/filepath"
-	"regexp"
 	"strings"
 
 	"android/soong/android"
@@ -34,12 +33,6 @@
 	protoSrcPartition = "proto"
 )
 
-var (
-	// ignoring case, checks for proto or protos as an independent word in the name, whether at the
-	// beginning, end, or middle. e.g. "proto.foo", "bar-protos", "baz_proto_srcs" would all match
-	filegroupLikelyProtoPattern = regexp.MustCompile("(?i)(^|[^a-z])proto(s)?([^a-z]|$)")
-)
-
 // staticOrSharedAttributes are the Bazel-ified versions of StaticOrSharedProperties --
 // properties which apply to either the shared or static version of a cc_library module.
 type staticOrSharedAttributes struct {
@@ -59,48 +52,36 @@
 	System_dynamic_deps bazel.LabelListAttribute
 
 	Enabled bazel.BoolAttribute
+
+	sdkAttributes
 }
 
+// groupSrcsByExtension partitions `srcs` into groups based on file extension.
 func groupSrcsByExtension(ctx android.BazelConversionPathContext, srcs bazel.LabelListAttribute) bazel.PartitionToLabelListAttribute {
-	// Check that a module is a filegroup type
-	isFilegroup := func(m blueprint.Module) bool {
-		return ctx.OtherModuleType(m) == "filegroup"
-	}
-
 	// Convert filegroup dependencies into extension-specific filegroups filtered in the filegroup.bzl
 	// macro.
 	addSuffixForFilegroup := func(suffix string) bazel.LabelMapper {
 		return func(ctx bazel.OtherModuleContext, label bazel.Label) (string, bool) {
 			m, exists := ctx.ModuleFromName(label.OriginalModuleName)
 			labelStr := label.Label
-			if !exists || !isFilegroup(m) {
+			if !exists || !android.IsFilegroup(ctx, m) {
 				return labelStr, false
 			}
 			return labelStr + suffix, true
 		}
 	}
 
-	isProtoFilegroup := func(ctx bazel.OtherModuleContext, label bazel.Label) (string, bool) {
-		m, exists := ctx.ModuleFromName(label.OriginalModuleName)
-		labelStr := label.Label
-		if !exists || !isFilegroup(m) {
-			return labelStr, false
-		}
-		likelyProtos := filegroupLikelyProtoPattern.MatchString(label.OriginalModuleName)
-		return labelStr, likelyProtos
-	}
-
 	// TODO(b/190006308): Handle language detection of sources in a Bazel rule.
-	partitioned := bazel.PartitionLabelListAttribute(ctx, &srcs, bazel.LabelPartitions{
-		protoSrcPartition: bazel.LabelPartition{Extensions: []string{".proto"}, LabelMapper: isProtoFilegroup},
+	labels := bazel.LabelPartitions{
+		protoSrcPartition: android.ProtoSrcLabelPartition,
 		cSrcPartition:     bazel.LabelPartition{Extensions: []string{".c"}, LabelMapper: addSuffixForFilegroup("_c_srcs")},
 		asSrcPartition:    bazel.LabelPartition{Extensions: []string{".s", ".S"}, LabelMapper: addSuffixForFilegroup("_as_srcs")},
 		// C++ is the "catch-all" group, and comprises generated sources because we don't
 		// know the language of these sources until the genrule is executed.
 		cppSrcPartition: bazel.LabelPartition{Extensions: []string{".cpp", ".cc", ".cxx", ".mm"}, LabelMapper: addSuffixForFilegroup("_cpp_srcs"), Keep_remainder: true},
-	})
+	}
 
-	return partitioned
+	return bazel.PartitionLabelListAttribute(ctx, &srcs, labels)
 }
 
 // bp2BuildParseLibProps returns the attributes for a variant of a cc_library.
@@ -160,6 +141,7 @@
 	}
 }
 
+// Parses properties common to static and shared libraries. Also used for prebuilt libraries.
 func bp2buildParseStaticOrSharedProps(ctx android.BazelConversionPathContext, module *Module, lib *libraryDecorator, isStatic bool) staticOrSharedAttributes {
 	attrs := staticOrSharedAttributes{}
 
@@ -217,30 +199,72 @@
 
 // Convenience struct to hold all attributes parsed from prebuilt properties.
 type prebuiltAttributes struct {
-	Src bazel.LabelAttribute
+	Src     bazel.LabelAttribute
+	Enabled bazel.BoolAttribute
 }
 
 // NOTE: Used outside of Soong repo project, in the clangprebuilts.go bootstrap_go_package
-func Bp2BuildParsePrebuiltLibraryProps(ctx android.BazelConversionPathContext, module *Module) prebuiltAttributes {
+func Bp2BuildParsePrebuiltLibraryProps(ctx android.BazelConversionPathContext, module *Module, isStatic bool) prebuiltAttributes {
+	manySourceFileError := func(axis bazel.ConfigurationAxis, config string) {
+		ctx.ModuleErrorf("Bp2BuildParsePrebuiltLibraryProps: Expected at most one source file for %s %s\n", axis, config)
+	}
 	var srcLabelAttribute bazel.LabelAttribute
 
-	for axis, configToProps := range module.GetArchVariantProperties(ctx, &prebuiltLinkerProperties{}) {
-		for config, props := range configToProps {
-			if prebuiltLinkerProperties, ok := props.(*prebuiltLinkerProperties); ok {
-				if len(prebuiltLinkerProperties.Srcs) > 1 {
-					ctx.ModuleErrorf("Bp2BuildParsePrebuiltLibraryProps: Expected at most once source file for %s %s\n", axis, config)
-					continue
-				} else if len(prebuiltLinkerProperties.Srcs) == 0 {
-					continue
-				}
-				src := android.BazelLabelForModuleSrcSingle(ctx, prebuiltLinkerProperties.Srcs[0])
-				srcLabelAttribute.SetSelectValue(axis, config, src)
-			}
+	parseSrcs := func(ctx android.BazelConversionPathContext, axis bazel.ConfigurationAxis, config string, srcs []string) {
+		if len(srcs) > 1 {
+			manySourceFileError(axis, config)
+			return
+		} else if len(srcs) == 0 {
+			return
 		}
+		if srcLabelAttribute.SelectValue(axis, config) != nil {
+			manySourceFileError(axis, config)
+			return
+		}
+
+		src := android.BazelLabelForModuleSrcSingle(ctx, srcs[0])
+		srcLabelAttribute.SetSelectValue(axis, config, src)
+	}
+
+	bp2BuildPropParseHelper(ctx, module, &prebuiltLinkerProperties{}, func(axis bazel.ConfigurationAxis, config string, props interface{}) {
+		if prebuiltLinkerProperties, ok := props.(*prebuiltLinkerProperties); ok {
+			parseSrcs(ctx, axis, config, prebuiltLinkerProperties.Srcs)
+		}
+	})
+
+	var enabledLabelAttribute bazel.BoolAttribute
+	parseAttrs := func(axis bazel.ConfigurationAxis, config string, props StaticOrSharedProperties) {
+		if props.Enabled != nil {
+			enabledLabelAttribute.SetSelectValue(axis, config, props.Enabled)
+		}
+		parseSrcs(ctx, axis, config, props.Srcs)
+	}
+
+	if isStatic {
+		bp2BuildPropParseHelper(ctx, module, &StaticProperties{}, func(axis bazel.ConfigurationAxis, config string, props interface{}) {
+			if staticProperties, ok := props.(*StaticProperties); ok {
+				parseAttrs(axis, config, staticProperties.Static)
+			}
+		})
+	} else {
+		bp2BuildPropParseHelper(ctx, module, &SharedProperties{}, func(axis bazel.ConfigurationAxis, config string, props interface{}) {
+			if sharedProperties, ok := props.(*SharedProperties); ok {
+				parseAttrs(axis, config, sharedProperties.Shared)
+			}
+		})
 	}
 
 	return prebuiltAttributes{
-		Src: srcLabelAttribute,
+		Src:     srcLabelAttribute,
+		Enabled: enabledLabelAttribute,
+	}
+}
+
+func bp2BuildPropParseHelper(ctx android.ArchVariantContext, module *Module, propsType interface{}, parseFunc func(axis bazel.ConfigurationAxis, config string, props interface{})) {
+	for axis, configToProps := range module.GetArchVariantProperties(ctx, propsType) {
+		for config, props := range configToProps {
+			parseFunc(axis, config, props)
+		}
 	}
 }
 
@@ -560,6 +584,18 @@
 	}
 }
 
+func bp2BuildParseSdkAttributes(module *Module) sdkAttributes {
+	return sdkAttributes{
+		Sdk_version:     module.Properties.Sdk_version,
+		Min_sdk_version: module.Properties.Min_sdk_version,
+	}
+}
+
+type sdkAttributes struct {
+	Sdk_version     *string
+	Min_sdk_version *string
+}
+
 // Convenience struct to hold all attributes parsed from linker properties.
 type linkerAttributes struct {
 	deps                             bazel.LabelListAttribute
@@ -592,9 +628,12 @@
 	// Use a single variable to capture usage of nocrt in arch variants, so there's only 1 error message for this module
 	var axisFeatures []string
 
+	wholeStaticLibs := android.FirstUniqueStrings(props.Whole_static_libs)
+	la.wholeArchiveDeps.SetSelectValue(axis, config, bazelLabelForWholeDepsExcludes(ctx, wholeStaticLibs, props.Exclude_static_libs))
 	// Excludes to parallel Soong:
 	// https://cs.android.com/android/platform/superproject/+/master:build/soong/cc/linker.go;l=247-249;drc=088b53577dde6e40085ffd737a1ae96ad82fc4b0
-	staticLibs := android.FirstUniqueStrings(props.Static_libs)
+	staticLibs := android.FirstUniqueStrings(android.RemoveListFromList(props.Static_libs, wholeStaticLibs))
+
 	staticDeps := maybePartitionExportedAndImplementationsDepsExcludes(ctx, !isBinary, staticLibs, props.Exclude_static_libs, props.Export_static_lib_headers, bazelLabelForStaticDepsExcludes)
 
 	headerLibs := android.FirstUniqueStrings(props.Header_libs)
@@ -606,9 +645,6 @@
 	(&hDeps.implementation).Append(staticDeps.implementation)
 	la.implementationDeps.SetSelectValue(axis, config, hDeps.implementation)
 
-	wholeStaticLibs := android.FirstUniqueStrings(props.Whole_static_libs)
-	la.wholeArchiveDeps.SetSelectValue(axis, config, bazelLabelForWholeDepsExcludes(ctx, wholeStaticLibs, props.Exclude_static_libs))
-
 	systemSharedLibs := props.System_shared_libs
 	// systemSharedLibs distinguishes between nil/empty list behavior:
 	//    nil -> use default values
diff --git a/cc/builder.go b/cc/builder.go
index 8e6f7cd..525b1a1 100644
--- a/cc/builder.go
+++ b/cc/builder.go
@@ -459,7 +459,7 @@
 }
 
 // Generate rules for compiling multiple .c, .cpp, or .S files to individual .o files
-func transformSourceToObj(ctx ModuleContext, subdir string, srcFiles, noTidySrcs android.Paths,
+func transformSourceToObj(ctx ModuleContext, subdir string, srcFiles, noTidySrcs, timeoutTidySrcs android.Paths,
 	flags builderFlags, pathDeps android.Paths, cFlagsDeps android.Paths) Objects {
 	// Source files are one-to-one with tidy, coverage, or kythe files, if enabled.
 	objFiles := make(android.Paths, len(srcFiles))
@@ -474,6 +474,10 @@
 		tidyTimeout := ctx.Config().Getenv("TIDY_TIMEOUT")
 		if len(tidyTimeout) > 0 {
 			tidyVars += "TIDY_TIMEOUT=" + tidyTimeout + " "
+			// add timeoutTidySrcs into noTidySrcsMap if TIDY_TIMEOUT is set
+			for _, path := range timeoutTidySrcs {
+				noTidySrcsMap[path.String()] = true
+			}
 		}
 	}
 	var coverageFiles android.Paths
diff --git a/cc/cc.go b/cc/cc.go
index a8adb0c..456b736 100644
--- a/cc/cc.go
+++ b/cc/cc.go
@@ -1218,6 +1218,17 @@
 	return c.VendorProperties.IsVendorPublicLibrary
 }
 
+func (c *Module) IsVndkPrebuiltLibrary() bool {
+	if _, ok := c.linker.(*vndkPrebuiltLibraryDecorator); ok {
+		return true
+	}
+	return false
+}
+
+func (c *Module) SdkAndPlatformVariantVisibleToMake() bool {
+	return c.Properties.SdkAndPlatformVariantVisibleToMake
+}
+
 func (c *Module) HasLlndkStubs() bool {
 	lib := moduleLibraryInterface(c)
 	return lib != nil && lib.hasLLNDKStubs()
@@ -1383,7 +1394,7 @@
 }
 
 func InstallToBootstrap(name string, config android.Config) bool {
-	if name == "libclang_rt.hwasan-aarch64-android" {
+	if name == "libclang_rt.hwasan" {
 		return true
 	}
 	return isBionic(name)
@@ -1699,7 +1710,7 @@
 	return nil
 }
 
-func (c *Module) getNameSuffixWithVndkVersion(ctx android.ModuleContext) string {
+func getNameSuffixWithVndkVersion(ctx android.ModuleContext, c LinkableInterface) string {
 	// Returns the name suffix for product and vendor variants. If the VNDK version is not
 	// "current", it will append the VNDK version to the name suffix.
 	var vndkVersion string
@@ -1719,19 +1730,19 @@
 	if vndkVersion == "current" {
 		vndkVersion = ctx.DeviceConfig().PlatformVndkVersion()
 	}
-	if c.Properties.VndkVersion != vndkVersion && c.Properties.VndkVersion != "" {
+	if c.VndkVersion() != vndkVersion && c.VndkVersion() != "" {
 		// add version suffix only if the module is using different vndk version than the
 		// version in product or vendor partition.
-		nameSuffix += "." + c.Properties.VndkVersion
+		nameSuffix += "." + c.VndkVersion()
 	}
 	return nameSuffix
 }
 
-func (c *Module) setSubnameProperty(actx android.ModuleContext) {
-	c.Properties.SubName = ""
+func GetSubnameProperty(actx android.ModuleContext, c LinkableInterface) string {
+	var subName = ""
 
 	if c.Target().NativeBridge == android.NativeBridgeEnabled {
-		c.Properties.SubName += NativeBridgeSuffix
+		subName += NativeBridgeSuffix
 	}
 
 	llndk := c.IsLlndk()
@@ -1739,25 +1750,27 @@
 		// .vendor.{version} suffix is added for vendor variant or .product.{version} suffix is
 		// added for product variant only when we have vendor and product variants with core
 		// variant. The suffix is not added for vendor-only or product-only module.
-		c.Properties.SubName += c.getNameSuffixWithVndkVersion(actx)
+		subName += getNameSuffixWithVndkVersion(actx, c)
 	} else if c.IsVendorPublicLibrary() {
-		c.Properties.SubName += vendorPublicLibrarySuffix
-	} else if _, ok := c.linker.(*vndkPrebuiltLibraryDecorator); ok {
+		subName += vendorPublicLibrarySuffix
+	} else if c.IsVndkPrebuiltLibrary() {
 		// .vendor suffix is added for backward compatibility with VNDK snapshot whose names with
 		// such suffixes are already hard-coded in prebuilts/vndk/.../Android.bp.
-		c.Properties.SubName += VendorSuffix
+		subName += VendorSuffix
 	} else if c.InRamdisk() && !c.OnlyInRamdisk() {
-		c.Properties.SubName += RamdiskSuffix
+		subName += RamdiskSuffix
 	} else if c.InVendorRamdisk() && !c.OnlyInVendorRamdisk() {
-		c.Properties.SubName += VendorRamdiskSuffix
+		subName += VendorRamdiskSuffix
 	} else if c.InRecovery() && !c.OnlyInRecovery() {
-		c.Properties.SubName += RecoverySuffix
-	} else if c.IsSdkVariant() && (c.Properties.SdkAndPlatformVariantVisibleToMake || c.SplitPerApiLevel()) {
-		c.Properties.SubName += sdkSuffix
+		subName += RecoverySuffix
+	} else if c.IsSdkVariant() && (c.SdkAndPlatformVariantVisibleToMake() || c.SplitPerApiLevel()) {
+		subName += sdkSuffix
 		if c.SplitPerApiLevel() {
-			c.Properties.SubName += "." + c.SdkVersion()
+			subName += "." + c.SdkVersion()
 		}
 	}
+
+	return subName
 }
 
 // Returns true if Bazel was successfully used for the analysis of this module.
@@ -1795,7 +1808,7 @@
 		return
 	}
 
-	c.setSubnameProperty(actx)
+	c.Properties.SubName = GetSubnameProperty(actx, c)
 	apexInfo := actx.Provider(android.ApexInfoProvider).(android.ApexInfo)
 	if !apexInfo.IsForPlatform() {
 		c.hideApexVariantFromMake = true
@@ -3543,13 +3556,14 @@
 	case fullLibrary:
 		if !prebuilt {
 			libraryBp2Build(ctx, c)
+		} else {
+			prebuiltLibraryBp2Build(ctx, c)
 		}
 	case headerLibrary:
 		libraryHeadersBp2Build(ctx, c)
 	case staticLibrary:
-
 		if prebuilt {
-			prebuiltLibraryStaticBp2Build(ctx, c)
+			prebuiltLibraryStaticBp2Build(ctx, c, false)
 		} else {
 			sharedOrStaticLibraryBp2Build(ctx, c, true)
 		}
@@ -3595,7 +3609,8 @@
 		&SharedProperties{},
 		&FlagExporterProperties{},
 		&BinaryLinkerProperties{},
-		&TestProperties{},
+		&TestLinkerProperties{},
+		&TestInstallerProperties{},
 		&TestBinaryProperties{},
 		&BenchmarkProperties{},
 		&fuzz.FuzzProperties{},
diff --git a/cc/cc_test.go b/cc/cc_test.go
index 51a6a27..09cc352 100644
--- a/cc/cc_test.go
+++ b/cc/cc_test.go
@@ -779,6 +779,68 @@
 	}
 }
 
+func TestTestBinaryTestSuites(t *testing.T) {
+	bp := `
+		cc_test {
+			name: "main_test",
+			srcs: ["main_test.cpp"],
+			test_suites: [
+				"suite_1",
+				"suite_2",
+			],
+			gtest: false,
+		}
+	`
+
+	ctx := prepareForCcTest.RunTestWithBp(t, bp).TestContext
+	module := ctx.ModuleForTests("main_test", "android_arm_armv7-a-neon").Module()
+
+	entries := android.AndroidMkEntriesForTest(t, ctx, module)[0]
+	compatEntries := entries.EntryMap["LOCAL_COMPATIBILITY_SUITE"]
+	if len(compatEntries) != 2 {
+		t.Errorf("expected two elements in LOCAL_COMPATIBILITY_SUITE. got %d", len(compatEntries))
+	}
+	if compatEntries[0] != "suite_1" {
+		t.Errorf("expected LOCAL_COMPATIBILITY_SUITE to be`suite_1`,"+
+			" but was '%s'", compatEntries[0])
+	}
+	if compatEntries[1] != "suite_2" {
+		t.Errorf("expected LOCAL_COMPATIBILITY_SUITE to be`suite_2`,"+
+			" but was '%s'", compatEntries[1])
+	}
+}
+
+func TestTestLibraryTestSuites(t *testing.T) {
+	bp := `
+		cc_test_library {
+			name: "main_test_lib",
+			srcs: ["main_test_lib.cpp"],
+			test_suites: [
+				"suite_1",
+				"suite_2",
+			],
+			gtest: false,
+		}
+	`
+
+	ctx := prepareForCcTest.RunTestWithBp(t, bp).TestContext
+	module := ctx.ModuleForTests("main_test_lib", "android_arm_armv7-a-neon_shared").Module()
+
+	entries := android.AndroidMkEntriesForTest(t, ctx, module)[0]
+	compatEntries := entries.EntryMap["LOCAL_COMPATIBILITY_SUITE"]
+	if len(compatEntries) != 2 {
+		t.Errorf("expected two elements in LOCAL_COMPATIBILITY_SUITE. got %d", len(compatEntries))
+	}
+	if compatEntries[0] != "suite_1" {
+		t.Errorf("expected LOCAL_COMPATIBILITY_SUITE to be`suite_1`,"+
+			" but was '%s'", compatEntries[0])
+	}
+	if compatEntries[1] != "suite_2" {
+		t.Errorf("expected LOCAL_COMPATIBILITY_SUITE to be`suite_2`,"+
+			" but was '%s'", compatEntries[1])
+	}
+}
+
 func TestVndkWhenVndkVersionIsNotSet(t *testing.T) {
 	ctx := testCcNoVndk(t, `
 		cc_library {
@@ -2944,13 +3006,13 @@
 	// Check the shared version of lib2.
 	variant := "android_arm64_armv8-a_shared"
 	module := ctx.ModuleForTests("lib2", variant).Module().(*Module)
-	checkStaticLibs(t, []string{"lib1", "libc++demangle", "libclang_rt.builtins-aarch64-android"}, module)
+	checkStaticLibs(t, []string{"lib1", "libc++demangle", "libclang_rt.builtins"}, module)
 
 	// Check the static version of lib2.
 	variant = "android_arm64_armv8-a_static"
 	module = ctx.ModuleForTests("lib2", variant).Module().(*Module)
 	// libc++_static is linked additionally.
-	checkStaticLibs(t, []string{"lib1", "libc++_static", "libc++demangle", "libclang_rt.builtins-aarch64-android"}, module)
+	checkStaticLibs(t, []string{"lib1", "libc++_static", "libc++demangle", "libclang_rt.builtins"}, module)
 }
 
 var compilerFlagsTestCases = []struct {
diff --git a/cc/compiler.go b/cc/compiler.go
index d9a6bcd..eb5458f 100644
--- a/cc/compiler.go
+++ b/cc/compiler.go
@@ -42,6 +42,9 @@
 	// list of source files that should not be compiled with clang-tidy.
 	Tidy_disabled_srcs []string `android:"path,arch_variant"`
 
+	// list of source files that should not be compiled by clang-tidy when TIDY_TIMEOUT is set.
+	Tidy_timeout_srcs []string `android:"path,arch_variant"`
+
 	// list of source files that should not be used to build the C/C++ module.
 	// This is most useful in the arch/multilib variants to remove non-common files
 	Exclude_srcs []string `android:"path,arch_variant"`
@@ -403,6 +406,10 @@
 		flags.Global.CommonFlags = append(flags.Global.CommonFlags, "-D__ANDROID_RECOVERY__")
 	}
 
+	if ctx.inRecovery() || ctx.inRamdisk() || ctx.inVendorRamdisk() {
+		flags.Global.CommonFlags = append(flags.Global.CommonFlags, "-D__ANDROID_RAMDISK__")
+	}
+
 	if ctx.apexVariationName() != "" {
 		flags.Global.CommonFlags = append(flags.Global.CommonFlags, "-D__ANDROID_APEX__")
 		if ctx.Device() {
@@ -663,6 +670,7 @@
 	// Compile files listed in c.Properties.Srcs into objects
 	objs := compileObjs(ctx, buildFlags, "", srcs,
 		android.PathsForModuleSrc(ctx, compiler.Properties.Tidy_disabled_srcs),
+		android.PathsForModuleSrc(ctx, compiler.Properties.Tidy_timeout_srcs),
 		pathDeps, compiler.cFlagsDeps)
 
 	if ctx.Failed() {
@@ -674,9 +682,9 @@
 
 // Compile a list of source files into objects a specified subdirectory
 func compileObjs(ctx ModuleContext, flags builderFlags, subdir string,
-	srcFiles, noTidySrcs, pathDeps android.Paths, cFlagsDeps android.Paths) Objects {
+	srcFiles, noTidySrcs, timeoutTidySrcs, pathDeps android.Paths, cFlagsDeps android.Paths) Objects {
 
-	return transformSourceToObj(ctx, subdir, srcFiles, noTidySrcs, flags, pathDeps, cFlagsDeps)
+	return transformSourceToObj(ctx, subdir, srcFiles, noTidySrcs, timeoutTidySrcs, flags, pathDeps, cFlagsDeps)
 }
 
 // Properties for rust_bindgen related to generating rust bindings.
diff --git a/cc/config/Android.bp b/cc/config/Android.bp
index e1b0605..1a21c13 100644
--- a/cc/config/Android.bp
+++ b/cc/config/Android.bp
@@ -11,7 +11,6 @@
         "soong-starlark-format",
     ],
     srcs: [
-        "bp2build.go",
         "clang.go",
         "global.go",
         "tidy.go",
@@ -33,7 +32,6 @@
         "arm64_linux_host.go",
     ],
     testSrcs: [
-        "bp2build_test.go",
         "tidy_test.go",
     ],
 }
diff --git a/cc/config/arm64_device.go b/cc/config/arm64_device.go
index 979c825..dfe143f 100644
--- a/cc/config/arm64_device.go
+++ b/cc/config/arm64_device.go
@@ -98,25 +98,28 @@
 	pctx.SourcePathVariable("Arm64GccRoot",
 		"prebuilts/gcc/${HostPrebuiltTag}/aarch64/aarch64-linux-android-${arm64GccVersion}")
 
-	exportStringListStaticVariable("Arm64Ldflags", arm64Ldflags)
-	exportStringListStaticVariable("Arm64Lldflags", arm64Lldflags)
+	exportedVars.ExportStringListStaticVariable("Arm64Ldflags", arm64Ldflags)
+	exportedVars.ExportStringListStaticVariable("Arm64Lldflags", arm64Lldflags)
 
-	exportStringListStaticVariable("Arm64Cflags", arm64Cflags)
-	exportStringListStaticVariable("Arm64Cppflags", arm64Cppflags)
+	exportedVars.ExportStringListStaticVariable("Arm64Cflags", arm64Cflags)
+	exportedVars.ExportStringListStaticVariable("Arm64Cppflags", arm64Cppflags)
 
-	exportedStringListDictVars.Set("Arm64ArchVariantCflags", arm64ArchVariantCflags)
-	exportedStringListDictVars.Set("Arm64CpuVariantCflags", arm64CpuVariantCflags)
+	exportedVars.ExportVariableReferenceDict("Arm64ArchVariantCflags", arm64ArchVariantCflagsVar)
+	exportedVars.ExportVariableReferenceDict("Arm64CpuVariantCflags", arm64CpuVariantCflagsVar)
+	exportedVars.ExportVariableReferenceDict("Arm64CpuVariantLdflags", arm64CpuVariantLdflags)
 
-	pctx.StaticVariable("Arm64Armv8ACflags", strings.Join(arm64ArchVariantCflags["armv8-a"], " "))
-	pctx.StaticVariable("Arm64Armv8ABranchProtCflags", strings.Join(arm64ArchVariantCflags["armv8-a-branchprot"], " "))
-	pctx.StaticVariable("Arm64Armv82ACflags", strings.Join(arm64ArchVariantCflags["armv8-2a"], " "))
-	pctx.StaticVariable("Arm64Armv82ADotprodCflags", strings.Join(arm64ArchVariantCflags["armv8-2a-dotprod"], " "))
+	exportedVars.ExportStringListStaticVariable("Arm64Armv8ACflags", arm64ArchVariantCflags["armv8-a"])
+	exportedVars.ExportStringListStaticVariable("Arm64Armv8ABranchProtCflags", arm64ArchVariantCflags["armv8-a-branchprot"])
+	exportedVars.ExportStringListStaticVariable("Arm64Armv82ACflags", arm64ArchVariantCflags["armv8-2a"])
+	exportedVars.ExportStringListStaticVariable("Arm64Armv82ADotprodCflags", arm64ArchVariantCflags["armv8-2a-dotprod"])
 
-	pctx.StaticVariable("Arm64CortexA53Cflags", strings.Join(arm64CpuVariantCflags["cortex-a53"], " "))
-	pctx.StaticVariable("Arm64CortexA55Cflags", strings.Join(arm64CpuVariantCflags["cortex-a55"], " "))
-	pctx.StaticVariable("Arm64KryoCflags", strings.Join(arm64CpuVariantCflags["kryo"], " "))
-	pctx.StaticVariable("Arm64ExynosM1Cflags", strings.Join(arm64CpuVariantCflags["exynos-m1"], " "))
-	pctx.StaticVariable("Arm64ExynosM2Cflags", strings.Join(arm64CpuVariantCflags["exynos-m2"], " "))
+	exportedVars.ExportStringListStaticVariable("Arm64CortexA53Cflags", arm64CpuVariantCflags["cortex-a53"])
+	exportedVars.ExportStringListStaticVariable("Arm64CortexA55Cflags", arm64CpuVariantCflags["cortex-a55"])
+	exportedVars.ExportStringListStaticVariable("Arm64KryoCflags", arm64CpuVariantCflags["kryo"])
+	exportedVars.ExportStringListStaticVariable("Arm64ExynosM1Cflags", arm64CpuVariantCflags["exynos-m1"])
+	exportedVars.ExportStringListStaticVariable("Arm64ExynosM2Cflags", arm64CpuVariantCflags["exynos-m2"])
+
+	exportedVars.ExportStringListStaticVariable("Arm64FixCortexA53Ldflags", []string{"-Wl,--fix-cortex-a53-843419"})
 }
 
 var (
@@ -128,7 +131,6 @@
 	}
 
 	arm64CpuVariantCflagsVar = map[string]string{
-		"":           "",
 		"cortex-a53": "${config.Arm64CortexA53Cflags}",
 		"cortex-a55": "${config.Arm64CortexA55Cflags}",
 		"cortex-a72": "${config.Arm64CortexA53Cflags}",
@@ -140,6 +142,15 @@
 		"exynos-m1":  "${config.Arm64ExynosM1Cflags}",
 		"exynos-m2":  "${config.Arm64ExynosM2Cflags}",
 	}
+
+	arm64CpuVariantLdflags = map[string]string{
+		"cortex-a53": "${config.Arm64FixCortexA53Ldflags}",
+		"cortex-a72": "${config.Arm64FixCortexA53Ldflags}",
+		"cortex-a73": "${config.Arm64FixCortexA53Ldflags}",
+		"kryo":       "${config.Arm64FixCortexA53Ldflags}",
+		"exynos-m1":  "${config.Arm64FixCortexA53Ldflags}",
+		"exynos-m2":  "${config.Arm64FixCortexA53Ldflags}",
+	}
 )
 
 type toolchainArm64 struct {
@@ -214,12 +225,7 @@
 	toolchainCflags = append(toolchainCflags,
 		variantOrDefault(arm64CpuVariantCflagsVar, arch.CpuVariant))
 
-	var extraLdflags string
-	switch arch.CpuVariant {
-	case "cortex-a53", "cortex-a72", "cortex-a73", "kryo", "exynos-m1", "exynos-m2":
-		extraLdflags = "-Wl,--fix-cortex-a53-843419"
-	}
-
+	extraLdflags := variantOrDefault(arm64CpuVariantLdflags, arch.CpuVariant)
 	return &toolchainArm64{
 		ldflags: strings.Join([]string{
 			"${config.Arm64Ldflags}",
diff --git a/cc/config/arm_device.go b/cc/config/arm_device.go
index 0fe5e68..d702c61 100644
--- a/cc/config/arm_device.go
+++ b/cc/config/arm_device.go
@@ -39,6 +39,10 @@
 
 	armLldflags = armLdflags
 
+	armFixCortexA8LdFlags = []string{"-Wl,--fix-cortex-a8"}
+
+	armNoFixCortexA8LdFlags = []string{"-Wl,--no-fix-cortex-a8"}
+
 	armArmCflags = []string{
 		"-fstrict-aliasing",
 	}
@@ -174,38 +178,41 @@
 	pctx.SourcePathVariable("ArmGccRoot", "prebuilts/gcc/${HostPrebuiltTag}/arm/arm-linux-androideabi-${armGccVersion}")
 
 	// Just exported. Not created as a Ninja static variable.
-	exportedStringVars.Set("ArmClangTriple", clangTriple)
+	exportedVars.ExportString("ArmClangTriple", clangTriple)
 
-	exportStringListStaticVariable("ArmLdflags", armLdflags)
-	exportStringListStaticVariable("ArmLldflags", armLldflags)
+	exportedVars.ExportStringListStaticVariable("ArmLdflags", armLdflags)
+	exportedVars.ExportStringListStaticVariable("ArmLldflags", armLldflags)
+
+	exportedVars.ExportStringListStaticVariable("ArmFixCortexA8LdFlags", armFixCortexA8LdFlags)
+	exportedVars.ExportStringListStaticVariable("ArmNoFixCortexA8LdFlags", armNoFixCortexA8LdFlags)
 
 	// Clang cflags
-	exportStringListStaticVariable("ArmToolchainCflags", armToolchainCflags)
-	exportStringListStaticVariable("ArmCflags", armCflags)
-	exportStringListStaticVariable("ArmCppflags", armCppflags)
+	exportedVars.ExportStringListStaticVariable("ArmToolchainCflags", armToolchainCflags)
+	exportedVars.ExportStringListStaticVariable("ArmCflags", armCflags)
+	exportedVars.ExportStringListStaticVariable("ArmCppflags", armCppflags)
 
 	// Clang ARM vs. Thumb instruction set cflags
-	exportStringListStaticVariable("ArmArmCflags", armArmCflags)
-	exportStringListStaticVariable("ArmThumbCflags", armThumbCflags)
+	exportedVars.ExportStringListStaticVariable("ArmArmCflags", armArmCflags)
+	exportedVars.ExportStringListStaticVariable("ArmThumbCflags", armThumbCflags)
 
-	exportedStringListDictVars.Set("ArmArchVariantCflags", armArchVariantCflags)
-	exportedStringListDictVars.Set("ArmCpuVariantCflags", armCpuVariantCflags)
+	exportedVars.ExportVariableReferenceDict("ArmArchVariantCflags", armArchVariantCflagsVar)
+	exportedVars.ExportVariableReferenceDict("ArmCpuVariantCflags", armCpuVariantCflagsVar)
 
 	// Clang arch variant cflags
-	exportStringListStaticVariable("ArmArmv7ACflags", armArchVariantCflags["armv7-a"])
-	exportStringListStaticVariable("ArmArmv7ANeonCflags", armArchVariantCflags["armv7-a-neon"])
-	exportStringListStaticVariable("ArmArmv8ACflags", armArchVariantCflags["armv8-a"])
-	exportStringListStaticVariable("ArmArmv82ACflags", armArchVariantCflags["armv8-2a"])
+	exportedVars.ExportStringListStaticVariable("ArmArmv7ACflags", armArchVariantCflags["armv7-a"])
+	exportedVars.ExportStringListStaticVariable("ArmArmv7ANeonCflags", armArchVariantCflags["armv7-a-neon"])
+	exportedVars.ExportStringListStaticVariable("ArmArmv8ACflags", armArchVariantCflags["armv8-a"])
+	exportedVars.ExportStringListStaticVariable("ArmArmv82ACflags", armArchVariantCflags["armv8-2a"])
 
 	// Clang cpu variant cflags
-	exportStringListStaticVariable("ArmGenericCflags", armCpuVariantCflags[""])
-	exportStringListStaticVariable("ArmCortexA7Cflags", armCpuVariantCflags["cortex-a7"])
-	exportStringListStaticVariable("ArmCortexA8Cflags", armCpuVariantCflags["cortex-a8"])
-	exportStringListStaticVariable("ArmCortexA15Cflags", armCpuVariantCflags["cortex-a15"])
-	exportStringListStaticVariable("ArmCortexA53Cflags", armCpuVariantCflags["cortex-a53"])
-	exportStringListStaticVariable("ArmCortexA55Cflags", armCpuVariantCflags["cortex-a55"])
-	exportStringListStaticVariable("ArmKraitCflags", armCpuVariantCflags["krait"])
-	exportStringListStaticVariable("ArmKryoCflags", armCpuVariantCflags["kryo"])
+	exportedVars.ExportStringListStaticVariable("ArmGenericCflags", armCpuVariantCflags[""])
+	exportedVars.ExportStringListStaticVariable("ArmCortexA7Cflags", armCpuVariantCflags["cortex-a7"])
+	exportedVars.ExportStringListStaticVariable("ArmCortexA8Cflags", armCpuVariantCflags["cortex-a8"])
+	exportedVars.ExportStringListStaticVariable("ArmCortexA15Cflags", armCpuVariantCflags["cortex-a15"])
+	exportedVars.ExportStringListStaticVariable("ArmCortexA53Cflags", armCpuVariantCflags["cortex-a53"])
+	exportedVars.ExportStringListStaticVariable("ArmCortexA55Cflags", armCpuVariantCflags["cortex-a55"])
+	exportedVars.ExportStringListStaticVariable("ArmKraitCflags", armCpuVariantCflags["krait"])
+	exportedVars.ExportStringListStaticVariable("ArmKryoCflags", armCpuVariantCflags["kryo"])
 }
 
 var (
@@ -324,12 +331,12 @@
 		switch arch.CpuVariant {
 		case "cortex-a8", "":
 			// Generic ARM might be a Cortex A8 -- better safe than sorry
-			fixCortexA8 = "-Wl,--fix-cortex-a8"
+			fixCortexA8 = "${config.ArmFixCortexA8LdFlags}"
 		default:
-			fixCortexA8 = "-Wl,--no-fix-cortex-a8"
+			fixCortexA8 = "${config.ArmNoFixCortexA8LdFlags}"
 		}
 	case "armv7-a":
-		fixCortexA8 = "-Wl,--fix-cortex-a8"
+		fixCortexA8 = "${config.ArmFixCortexA8LdFlags}"
 	case "armv8-a", "armv8-2a":
 		// Nothing extra for armv8-a/armv8-2a
 	default:
diff --git a/cc/config/bp2build.go b/cc/config/bp2build.go
deleted file mode 100644
index eca5161..0000000
--- a/cc/config/bp2build.go
+++ /dev/null
@@ -1,317 +0,0 @@
-// Copyright 2021 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-import (
-	"fmt"
-	"reflect"
-	"regexp"
-	"sort"
-	"strings"
-
-	"android/soong/android"
-	"android/soong/starlark_fmt"
-
-	"github.com/google/blueprint"
-)
-
-type bazelVarExporter interface {
-	asBazel(android.Config, exportedStringVariables, exportedStringListVariables, exportedConfigDependingVariables) []bazelConstant
-}
-
-// Helpers for exporting cc configuration information to Bazel.
-var (
-	// Maps containing toolchain variables that are independent of the
-	// environment variables of the build.
-	exportedStringListVars     = exportedStringListVariables{}
-	exportedStringVars         = exportedStringVariables{}
-	exportedStringListDictVars = exportedStringListDictVariables{}
-
-	/// Maps containing variables that are dependent on the build config.
-	exportedConfigDependingVars = exportedConfigDependingVariables{}
-)
-
-type exportedConfigDependingVariables map[string]interface{}
-
-func (m exportedConfigDependingVariables) Set(k string, v interface{}) {
-	m[k] = v
-}
-
-// Ensure that string s has no invalid characters to be generated into the bzl file.
-func validateCharacters(s string) string {
-	for _, c := range []string{`\n`, `"`, `\`} {
-		if strings.Contains(s, c) {
-			panic(fmt.Errorf("%s contains illegal character %s", s, c))
-		}
-	}
-	return s
-}
-
-type bazelConstant struct {
-	variableName       string
-	internalDefinition string
-}
-
-type exportedStringVariables map[string]string
-
-func (m exportedStringVariables) Set(k string, v string) {
-	m[k] = v
-}
-
-func (m exportedStringVariables) asBazel(config android.Config,
-	stringVars exportedStringVariables, stringListVars exportedStringListVariables, cfgDepVars exportedConfigDependingVariables) []bazelConstant {
-	ret := make([]bazelConstant, 0, len(m))
-	for k, variableValue := range m {
-		expandedVar, err := expandVar(config, variableValue, stringVars, stringListVars, cfgDepVars)
-		if err != nil {
-			panic(fmt.Errorf("error expanding config variable %s: %s", k, err))
-		}
-		if len(expandedVar) > 1 {
-			panic(fmt.Errorf("%s expands to more than one string value: %s", variableValue, expandedVar))
-		}
-		ret = append(ret, bazelConstant{
-			variableName:       k,
-			internalDefinition: fmt.Sprintf(`"%s"`, validateCharacters(expandedVar[0])),
-		})
-	}
-	return ret
-}
-
-// Convenience function to declare a static variable and export it to Bazel's cc_toolchain.
-func exportStringStaticVariable(name string, value string) {
-	pctx.StaticVariable(name, value)
-	exportedStringVars.Set(name, value)
-}
-
-type exportedStringListVariables map[string][]string
-
-func (m exportedStringListVariables) Set(k string, v []string) {
-	m[k] = v
-}
-
-func (m exportedStringListVariables) asBazel(config android.Config,
-	stringScope exportedStringVariables, stringListScope exportedStringListVariables,
-	exportedVars exportedConfigDependingVariables) []bazelConstant {
-	ret := make([]bazelConstant, 0, len(m))
-	// For each exported variable, recursively expand elements in the variableValue
-	// list to ensure that interpolated variables are expanded according to their values
-	// in the variable scope.
-	for k, variableValue := range m {
-		var expandedVars []string
-		for _, v := range variableValue {
-			expandedVar, err := expandVar(config, v, stringScope, stringListScope, exportedVars)
-			if err != nil {
-				panic(fmt.Errorf("Error expanding config variable %s=%s: %s", k, v, err))
-			}
-			expandedVars = append(expandedVars, expandedVar...)
-		}
-		// Assign the list as a bzl-private variable; this variable will be exported
-		// out through a constants struct later.
-		ret = append(ret, bazelConstant{
-			variableName:       k,
-			internalDefinition: starlark_fmt.PrintStringList(expandedVars, 0),
-		})
-	}
-	return ret
-}
-
-// Convenience function to declare a static "source path" variable and export it to Bazel's cc_toolchain.
-func exportVariableConfigMethod(name string, method interface{}) blueprint.Variable {
-	exportedConfigDependingVars.Set(name, method)
-	return pctx.VariableConfigMethod(name, method)
-}
-
-// Convenience function to declare a static "source path" variable and export it to Bazel's cc_toolchain.
-func exportSourcePathVariable(name string, value string) {
-	pctx.SourcePathVariable(name, value)
-	exportedStringVars.Set(name, value)
-}
-
-// Convenience function to declare a static variable and export it to Bazel's cc_toolchain.
-func exportStringListStaticVariable(name string, value []string) {
-	pctx.StaticVariable(name, strings.Join(value, " "))
-	exportedStringListVars.Set(name, value)
-}
-
-func ExportStringList(name string, value []string) {
-	exportedStringListVars.Set(name, value)
-}
-
-type exportedStringListDictVariables map[string]map[string][]string
-
-func (m exportedStringListDictVariables) Set(k string, v map[string][]string) {
-	m[k] = v
-}
-
-// Since dictionaries are not supported in Ninja, we do not expand variables for dictionaries
-func (m exportedStringListDictVariables) asBazel(_ android.Config, _ exportedStringVariables,
-	_ exportedStringListVariables, _ exportedConfigDependingVariables) []bazelConstant {
-	ret := make([]bazelConstant, 0, len(m))
-	for k, dict := range m {
-		ret = append(ret, bazelConstant{
-			variableName:       k,
-			internalDefinition: starlark_fmt.PrintStringListDict(dict, 0),
-		})
-	}
-	return ret
-}
-
-// BazelCcToolchainVars generates bzl file content containing variables for
-// Bazel's cc_toolchain configuration.
-func BazelCcToolchainVars(config android.Config) string {
-	return bazelToolchainVars(
-		config,
-		exportedStringListDictVars,
-		exportedStringListVars,
-		exportedStringVars)
-}
-
-func bazelToolchainVars(config android.Config, vars ...bazelVarExporter) string {
-	ret := "# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT.\n\n"
-
-	results := []bazelConstant{}
-	for _, v := range vars {
-		results = append(results, v.asBazel(config, exportedStringVars, exportedStringListVars, exportedConfigDependingVars)...)
-	}
-
-	sort.Slice(results, func(i, j int) bool { return results[i].variableName < results[j].variableName })
-
-	definitions := make([]string, 0, len(results))
-	constants := make([]string, 0, len(results))
-	for _, b := range results {
-		definitions = append(definitions,
-			fmt.Sprintf("_%s = %s", b.variableName, b.internalDefinition))
-		constants = append(constants,
-			fmt.Sprintf("%[1]s%[2]s = _%[2]s,", starlark_fmt.Indention(1), b.variableName))
-	}
-
-	// Build the exported constants struct.
-	ret += strings.Join(definitions, "\n\n")
-	ret += "\n\n"
-	ret += "constants = struct(\n"
-	ret += strings.Join(constants, "\n")
-	ret += "\n)"
-
-	return ret
-}
-
-// expandVar recursively expand interpolated variables in the exportedVars scope.
-//
-// We're using a string slice to track the seen variables to avoid
-// stackoverflow errors with infinite recursion. it's simpler to use a
-// string slice than to handle a pass-by-referenced map, which would make it
-// quite complex to track depth-first interpolations. It's also unlikely the
-// interpolation stacks are deep (n > 1).
-func expandVar(config android.Config, toExpand string, stringScope exportedStringVariables,
-	stringListScope exportedStringListVariables, exportedVars exportedConfigDependingVariables) ([]string, error) {
-	// e.g. "${ExternalCflags}"
-	r := regexp.MustCompile(`\${([a-zA-Z0-9_]+)}`)
-
-	// Internal recursive function.
-	var expandVarInternal func(string, map[string]bool) (string, error)
-	expandVarInternal = func(toExpand string, seenVars map[string]bool) (string, error) {
-		var ret string
-		remainingString := toExpand
-		for len(remainingString) > 0 {
-			matches := r.FindStringSubmatch(remainingString)
-			if len(matches) == 0 {
-				return ret + remainingString, nil
-			}
-			if len(matches) != 2 {
-				panic(fmt.Errorf("Expected to only match 1 subexpression in %s, got %d", remainingString, len(matches)-1))
-			}
-			matchIndex := strings.Index(remainingString, matches[0])
-			ret += remainingString[:matchIndex]
-			remainingString = remainingString[matchIndex+len(matches[0]):]
-
-			// Index 1 of FindStringSubmatch contains the subexpression match
-			// (variable name) of the capture group.
-			variable := matches[1]
-			// toExpand contains a variable.
-			if _, ok := seenVars[variable]; ok {
-				return ret, fmt.Errorf(
-					"Unbounded recursive interpolation of variable: %s", variable)
-			}
-			// A map is passed-by-reference. Create a new map for
-			// this scope to prevent variables seen in one depth-first expansion
-			// to be also treated as "seen" in other depth-first traversals.
-			newSeenVars := map[string]bool{}
-			for k := range seenVars {
-				newSeenVars[k] = true
-			}
-			newSeenVars[variable] = true
-			if unexpandedVars, ok := stringListScope[variable]; ok {
-				expandedVars := []string{}
-				for _, unexpandedVar := range unexpandedVars {
-					expandedVar, err := expandVarInternal(unexpandedVar, newSeenVars)
-					if err != nil {
-						return ret, err
-					}
-					expandedVars = append(expandedVars, expandedVar)
-				}
-				ret += strings.Join(expandedVars, " ")
-			} else if unexpandedVar, ok := stringScope[variable]; ok {
-				expandedVar, err := expandVarInternal(unexpandedVar, newSeenVars)
-				if err != nil {
-					return ret, err
-				}
-				ret += expandedVar
-			} else if unevaluatedVar, ok := exportedVars[variable]; ok {
-				evalFunc := reflect.ValueOf(unevaluatedVar)
-				validateVariableMethod(variable, evalFunc)
-				evaluatedResult := evalFunc.Call([]reflect.Value{reflect.ValueOf(config)})
-				evaluatedValue := evaluatedResult[0].Interface().(string)
-				expandedVar, err := expandVarInternal(evaluatedValue, newSeenVars)
-				if err != nil {
-					return ret, err
-				}
-				ret += expandedVar
-			} else {
-				return "", fmt.Errorf("Unbound config variable %s", variable)
-			}
-		}
-		return ret, nil
-	}
-	var ret []string
-	for _, v := range strings.Split(toExpand, " ") {
-		val, err := expandVarInternal(v, map[string]bool{})
-		if err != nil {
-			return ret, err
-		}
-		ret = append(ret, val)
-	}
-
-	return ret, nil
-}
-
-func validateVariableMethod(name string, methodValue reflect.Value) {
-	methodType := methodValue.Type()
-	if methodType.Kind() != reflect.Func {
-		panic(fmt.Errorf("method given for variable %s is not a function",
-			name))
-	}
-	if n := methodType.NumIn(); n != 1 {
-		panic(fmt.Errorf("method for variable %s has %d inputs (should be 1)",
-			name, n))
-	}
-	if n := methodType.NumOut(); n != 1 {
-		panic(fmt.Errorf("method for variable %s has %d outputs (should be 1)",
-			name, n))
-	}
-	if kind := methodType.Out(0).Kind(); kind != reflect.String {
-		panic(fmt.Errorf("method for variable %s does not return a string",
-			name))
-	}
-}
diff --git a/cc/config/bp2build_test.go b/cc/config/bp2build_test.go
deleted file mode 100644
index 4cbf0c6..0000000
--- a/cc/config/bp2build_test.go
+++ /dev/null
@@ -1,279 +0,0 @@
-// Copyright 2021 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package config
-
-import (
-	"testing"
-
-	"android/soong/android"
-)
-
-func TestExpandVars(t *testing.T) {
-	android_arm64_config := android.TestConfig("out", nil, "", nil)
-	android_arm64_config.BuildOS = android.Android
-	android_arm64_config.BuildArch = android.Arm64
-
-	testCases := []struct {
-		description     string
-		config          android.Config
-		stringScope     exportedStringVariables
-		stringListScope exportedStringListVariables
-		configVars      exportedConfigDependingVariables
-		toExpand        string
-		expectedValues  []string
-	}{
-		{
-			description:    "no expansion for non-interpolated value",
-			toExpand:       "foo",
-			expectedValues: []string{"foo"},
-		},
-		{
-			description: "single level expansion for string var",
-			stringScope: exportedStringVariables{
-				"foo": "bar",
-			},
-			toExpand:       "${foo}",
-			expectedValues: []string{"bar"},
-		},
-		{
-			description: "single level expansion string list var",
-			stringListScope: exportedStringListVariables{
-				"foo": []string{"bar"},
-			},
-			toExpand:       "${foo}",
-			expectedValues: []string{"bar"},
-		},
-		{
-			description: "mixed level expansion for string list var",
-			stringScope: exportedStringVariables{
-				"foo": "${bar}",
-				"qux": "hello",
-			},
-			stringListScope: exportedStringListVariables{
-				"bar": []string{"baz", "${qux}"},
-			},
-			toExpand:       "${foo}",
-			expectedValues: []string{"baz hello"},
-		},
-		{
-			description: "double level expansion",
-			stringListScope: exportedStringListVariables{
-				"foo": []string{"${bar}"},
-				"bar": []string{"baz"},
-			},
-			toExpand:       "${foo}",
-			expectedValues: []string{"baz"},
-		},
-		{
-			description: "double level expansion with a literal",
-			stringListScope: exportedStringListVariables{
-				"a": []string{"${b}", "c"},
-				"b": []string{"d"},
-			},
-			toExpand:       "${a}",
-			expectedValues: []string{"d c"},
-		},
-		{
-			description: "double level expansion, with two variables in a string",
-			stringListScope: exportedStringListVariables{
-				"a": []string{"${b} ${c}"},
-				"b": []string{"d"},
-				"c": []string{"e"},
-			},
-			toExpand:       "${a}",
-			expectedValues: []string{"d e"},
-		},
-		{
-			description: "triple level expansion with two variables in a string",
-			stringListScope: exportedStringListVariables{
-				"a": []string{"${b} ${c}"},
-				"b": []string{"${c}", "${d}"},
-				"c": []string{"${d}"},
-				"d": []string{"foo"},
-			},
-			toExpand:       "${a}",
-			expectedValues: []string{"foo foo foo"},
-		},
-		{
-			description: "expansion with config depending vars",
-			configVars: exportedConfigDependingVariables{
-				"a": func(c android.Config) string { return c.BuildOS.String() },
-				"b": func(c android.Config) string { return c.BuildArch.String() },
-			},
-			config:         android_arm64_config,
-			toExpand:       "${a}-${b}",
-			expectedValues: []string{"android-arm64"},
-		},
-		{
-			description: "double level multi type expansion",
-			stringListScope: exportedStringListVariables{
-				"platform": []string{"${os}-${arch}"},
-				"const":    []string{"const"},
-			},
-			configVars: exportedConfigDependingVariables{
-				"os":   func(c android.Config) string { return c.BuildOS.String() },
-				"arch": func(c android.Config) string { return c.BuildArch.String() },
-				"foo":  func(c android.Config) string { return "foo" },
-			},
-			config:         android_arm64_config,
-			toExpand:       "${const}/${platform}/${foo}",
-			expectedValues: []string{"const/android-arm64/foo"},
-		},
-	}
-
-	for _, testCase := range testCases {
-		t.Run(testCase.description, func(t *testing.T) {
-			output, _ := expandVar(testCase.config, testCase.toExpand, testCase.stringScope, testCase.stringListScope, testCase.configVars)
-			if len(output) != len(testCase.expectedValues) {
-				t.Errorf("Expected %d values, got %d", len(testCase.expectedValues), len(output))
-			}
-			for i, actual := range output {
-				expectedValue := testCase.expectedValues[i]
-				if actual != expectedValue {
-					t.Errorf("Actual value '%s' doesn't match expected value '%s'", actual, expectedValue)
-				}
-			}
-		})
-	}
-}
-
-func TestBazelToolchainVars(t *testing.T) {
-	testCases := []struct {
-		name        string
-		config      android.Config
-		vars        []bazelVarExporter
-		expectedOut string
-	}{
-		{
-			name: "exports strings",
-			vars: []bazelVarExporter{
-				exportedStringVariables{
-					"a": "b",
-					"c": "d",
-				},
-			},
-			expectedOut: `# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT.
-
-_a = "b"
-
-_c = "d"
-
-constants = struct(
-    a = _a,
-    c = _c,
-)`,
-		},
-		{
-			name: "exports string lists",
-			vars: []bazelVarExporter{
-				exportedStringListVariables{
-					"a": []string{"b1", "b2"},
-					"c": []string{"d1", "d2"},
-				},
-			},
-			expectedOut: `# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT.
-
-_a = [
-    "b1",
-    "b2",
-]
-
-_c = [
-    "d1",
-    "d2",
-]
-
-constants = struct(
-    a = _a,
-    c = _c,
-)`,
-		},
-		{
-			name: "exports string lists dicts",
-			vars: []bazelVarExporter{
-				exportedStringListDictVariables{
-					"a": map[string][]string{"b1": []string{"b2"}},
-					"c": map[string][]string{"d1": []string{"d2"}},
-				},
-			},
-			expectedOut: `# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT.
-
-_a = {
-    "b1": ["b2"],
-}
-
-_c = {
-    "d1": ["d2"],
-}
-
-constants = struct(
-    a = _a,
-    c = _c,
-)`,
-		},
-		{
-			name: "sorts across types",
-			vars: []bazelVarExporter{
-				exportedStringVariables{
-					"b": "b-val",
-					"d": "d-val",
-				},
-				exportedStringListVariables{
-					"c": []string{"c-val"},
-					"e": []string{"e-val"},
-				},
-				exportedStringListDictVariables{
-					"a": map[string][]string{"a1": []string{"a2"}},
-					"f": map[string][]string{"f1": []string{"f2"}},
-				},
-			},
-			expectedOut: `# GENERATED FOR BAZEL FROM SOONG. DO NOT EDIT.
-
-_a = {
-    "a1": ["a2"],
-}
-
-_b = "b-val"
-
-_c = ["c-val"]
-
-_d = "d-val"
-
-_e = ["e-val"]
-
-_f = {
-    "f1": ["f2"],
-}
-
-constants = struct(
-    a = _a,
-    b = _b,
-    c = _c,
-    d = _d,
-    e = _e,
-    f = _f,
-)`,
-		},
-	}
-
-	for _, tc := range testCases {
-		t.Run(tc.name, func(t *testing.T) {
-			out := bazelToolchainVars(tc.config, tc.vars...)
-			if out != tc.expectedOut {
-				t.Errorf("Expected \n%s, got \n%s", tc.expectedOut, out)
-			}
-		})
-	}
-}
diff --git a/cc/config/global.go b/cc/config/global.go
index 400be31..3caf327 100644
--- a/cc/config/global.go
+++ b/cc/config/global.go
@@ -23,6 +23,9 @@
 )
 
 var (
+	pctx         = android.NewPackageContext("android/soong/cc/config")
+	exportedVars = android.NewExportedVariables(pctx)
+
 	// Flags used by lots of devices.  Putting them in package static variables
 	// will save bytes in build.ninja so they aren't repeated for every file
 	commonGlobalCflags = []string{
@@ -214,7 +217,6 @@
 		// http://b/145211066
 		"-Wno-implicit-int-float-conversion",
 		// New warnings to be fixed after clang-r377782.
-		"-Wno-int-in-bool-context",          // http://b/148287349
 		"-Wno-sizeof-array-div",             // http://b/148815709
 		"-Wno-tautological-overlap-compare", // http://b/148815696
 		// New warnings to be fixed after clang-r383902.
@@ -223,10 +225,7 @@
 		"-Wno-misleading-indentation",               // http://b/153746954
 		"-Wno-zero-as-null-pointer-constant",        // http://b/68236239
 		"-Wno-deprecated-anon-enum-enum-conversion", // http://b/153746485
-		"-Wno-deprecated-enum-enum-conversion",      // http://b/153746563
 		"-Wno-string-compare",                       // http://b/153764102
-		"-Wno-enum-enum-conversion",                 // http://b/154138986
-		"-Wno-enum-float-conversion",                // http://b/154255917
 		"-Wno-pessimizing-move",                     // http://b/154270751
 		// New warnings to be fixed after clang-r399163
 		"-Wno-non-c-typedef-for-linkage", // http://b/161304145
@@ -287,8 +286,8 @@
 
 	// prebuilts/clang default settings.
 	ClangDefaultBase         = "prebuilts/clang/host"
-	ClangDefaultVersion      = "clang-r445002"
-	ClangDefaultShortVersion = "14.0.2"
+	ClangDefaultVersion      = "clang-r450784d"
+	ClangDefaultShortVersion = "14.0.6"
 
 	// Directories with warnings from Android.bp files.
 	WarningAllowedProjects = []string{
@@ -300,20 +299,28 @@
 	WarningAllowedOldProjects = []string{}
 )
 
-var pctx = android.NewPackageContext("android/soong/cc/config")
+// BazelCcToolchainVars generates bzl file content containing variables for
+// Bazel's cc_toolchain configuration.
+func BazelCcToolchainVars(config android.Config) string {
+	return android.BazelToolchainVars(config, exportedVars)
+}
+
+func ExportStringList(name string, value []string) {
+	exportedVars.ExportStringList(name, value)
+}
 
 func init() {
 	if runtime.GOOS == "linux" {
 		commonGlobalCflags = append(commonGlobalCflags, "-fdebug-prefix-map=/proc/self/cwd=")
 	}
 
-	exportStringListStaticVariable("CommonGlobalConlyflags", commonGlobalConlyflags)
-	exportStringListStaticVariable("DeviceGlobalCppflags", deviceGlobalCppflags)
-	exportStringListStaticVariable("DeviceGlobalLdflags", deviceGlobalLdflags)
-	exportStringListStaticVariable("DeviceGlobalLldflags", deviceGlobalLldflags)
-	exportStringListStaticVariable("HostGlobalCppflags", hostGlobalCppflags)
-	exportStringListStaticVariable("HostGlobalLdflags", hostGlobalLdflags)
-	exportStringListStaticVariable("HostGlobalLldflags", hostGlobalLldflags)
+	exportedVars.ExportStringListStaticVariable("CommonGlobalConlyflags", commonGlobalConlyflags)
+	exportedVars.ExportStringListStaticVariable("DeviceGlobalCppflags", deviceGlobalCppflags)
+	exportedVars.ExportStringListStaticVariable("DeviceGlobalLdflags", deviceGlobalLdflags)
+	exportedVars.ExportStringListStaticVariable("DeviceGlobalLldflags", deviceGlobalLldflags)
+	exportedVars.ExportStringListStaticVariable("HostGlobalCppflags", hostGlobalCppflags)
+	exportedVars.ExportStringListStaticVariable("HostGlobalLdflags", hostGlobalLdflags)
+	exportedVars.ExportStringListStaticVariable("HostGlobalLldflags", hostGlobalLldflags)
 
 	// Export the static default CommonGlobalCflags to Bazel.
 	// TODO(187086342): handle cflags that are set in VariableFuncs.
@@ -324,7 +331,7 @@
 			"-ftrivial-auto-var-init=zero",
 			"-enable-trivial-auto-var-init-zero-knowing-it-will-be-removed-from-clang",
 		}...)
-	exportedStringListVars.Set("CommonGlobalCflags", bazelCommonGlobalCflags)
+	exportedVars.ExportStringList("CommonGlobalCflags", bazelCommonGlobalCflags)
 
 	pctx.VariableFunc("CommonGlobalCflags", func(ctx android.PackageVarContext) string {
 		flags := commonGlobalCflags
@@ -353,17 +360,17 @@
 
 	// Export the static default DeviceGlobalCflags to Bazel.
 	// TODO(187086342): handle cflags that are set in VariableFuncs.
-	exportedStringListVars.Set("DeviceGlobalCflags", deviceGlobalCflags)
+	exportedVars.ExportStringList("DeviceGlobalCflags", deviceGlobalCflags)
 
 	pctx.VariableFunc("DeviceGlobalCflags", func(ctx android.PackageVarContext) string {
 		return strings.Join(deviceGlobalCflags, " ")
 	})
 
-	exportStringListStaticVariable("HostGlobalCflags", hostGlobalCflags)
-	exportStringListStaticVariable("NoOverrideGlobalCflags", noOverrideGlobalCflags)
-	exportStringListStaticVariable("NoOverrideExternalGlobalCflags", noOverrideExternalGlobalCflags)
-	exportStringListStaticVariable("CommonGlobalCppflags", commonGlobalCppflags)
-	exportStringListStaticVariable("ExternalCflags", extraExternalCflags)
+	exportedVars.ExportStringListStaticVariable("HostGlobalCflags", hostGlobalCflags)
+	exportedVars.ExportStringListStaticVariable("NoOverrideGlobalCflags", noOverrideGlobalCflags)
+	exportedVars.ExportStringListStaticVariable("NoOverrideExternalGlobalCflags", noOverrideExternalGlobalCflags)
+	exportedVars.ExportStringListStaticVariable("CommonGlobalCppflags", commonGlobalCppflags)
+	exportedVars.ExportStringListStaticVariable("ExternalCflags", extraExternalCflags)
 
 	// Everything in these lists is a crime against abstraction and dependency tracking.
 	// Do not add anything to this list.
@@ -378,11 +385,11 @@
 		"frameworks/native/opengl/include",
 		"frameworks/av/include",
 	}
-	exportedStringListVars.Set("CommonGlobalIncludes", commonGlobalIncludes)
+	exportedVars.ExportStringList("CommonGlobalIncludes", commonGlobalIncludes)
 	pctx.PrefixedExistentPathsForSourcesVariable("CommonGlobalIncludes", "-I", commonGlobalIncludes)
 
-	exportStringStaticVariable("CLANG_DEFAULT_VERSION", ClangDefaultVersion)
-	exportStringStaticVariable("CLANG_DEFAULT_SHORT_VERSION", ClangDefaultShortVersion)
+	exportedVars.ExportStringStaticVariable("CLANG_DEFAULT_VERSION", ClangDefaultVersion)
+	exportedVars.ExportStringStaticVariable("CLANG_DEFAULT_SHORT_VERSION", ClangDefaultShortVersion)
 
 	pctx.StaticVariableWithEnvOverride("ClangBase", "LLVM_PREBUILTS_BASE", ClangDefaultBase)
 	pctx.StaticVariableWithEnvOverride("ClangVersion", "LLVM_PREBUILTS_VERSION", ClangDefaultVersion)
@@ -422,7 +429,7 @@
 	pctx.StaticVariableWithEnvOverride("REAbiLinkerExecStrategy", "RBE_ABI_LINKER_EXEC_STRATEGY", remoteexec.LocalExecStrategy)
 }
 
-var HostPrebuiltTag = exportVariableConfigMethod("HostPrebuiltTag", android.Config.PrebuiltOS)
+var HostPrebuiltTag = exportedVars.ExportVariableConfigMethod("HostPrebuiltTag", android.Config.PrebuiltOS)
 
 func ClangPath(ctx android.PathContext, file string) android.SourcePath {
 	type clangToolKey string
diff --git a/cc/config/toolchain.go b/cc/config/toolchain.go
index 6cede11..7175fdc 100644
--- a/cc/config/toolchain.go
+++ b/cc/config/toolchain.go
@@ -227,14 +227,7 @@
 }
 
 func LibclangRuntimeLibrary(t Toolchain, library string) string {
-	arch := t.LibclangRuntimeLibraryArch()
-	if arch == "" {
-		return ""
-	}
-	if !t.Bionic() {
-		return "libclang_rt." + library + "-" + arch
-	}
-	return "libclang_rt." + library + "-" + arch + "-android"
+	return "libclang_rt." + library
 }
 
 func BuiltinsRuntimeLibrary(t Toolchain) string {
diff --git a/cc/config/x86_64_device.go b/cc/config/x86_64_device.go
index d789cde..aebda0b 100644
--- a/cc/config/x86_64_device.go
+++ b/cc/config/x86_64_device.go
@@ -37,10 +37,10 @@
 		"": []string{
 			"-march=x86-64",
 		},
+
 		"broadwell": []string{
 			"-march=broadwell",
 		},
-
 		"haswell": []string{
 			"-march=core-avx2",
 		},
@@ -78,14 +78,6 @@
 		"popcnt": []string{"-mpopcnt"},
 		"aes_ni": []string{"-maes"},
 	}
-
-	x86_64DefaultArchVariantFeatures = []string{
-		"ssse3",
-		"sse4",
-		"sse4_1",
-		"sse4_2",
-		"popcnt",
-	}
 )
 
 const (
@@ -93,34 +85,32 @@
 )
 
 func init() {
-	android.RegisterDefaultArchVariantFeatures(android.Android, android.X86_64, x86_64DefaultArchVariantFeatures...)
-	exportedStringListVars.Set("X86_64DefaultArchVariantFeatures", x86_64DefaultArchVariantFeatures)
 
 	pctx.StaticVariable("x86_64GccVersion", x86_64GccVersion)
 
 	pctx.SourcePathVariable("X86_64GccRoot",
 		"prebuilts/gcc/${HostPrebuiltTag}/x86/x86_64-linux-android-${x86_64GccVersion}")
 
-	exportStringListStaticVariable("X86_64ToolchainCflags", []string{"-m64"})
-	exportStringListStaticVariable("X86_64ToolchainLdflags", []string{"-m64"})
+	exportedVars.ExportStringListStaticVariable("X86_64ToolchainCflags", []string{"-m64"})
+	exportedVars.ExportStringListStaticVariable("X86_64ToolchainLdflags", []string{"-m64"})
 
-	exportStringListStaticVariable("X86_64Ldflags", x86_64Ldflags)
-	exportStringListStaticVariable("X86_64Lldflags", x86_64Ldflags)
+	exportedVars.ExportStringListStaticVariable("X86_64Ldflags", x86_64Ldflags)
+	exportedVars.ExportStringListStaticVariable("X86_64Lldflags", x86_64Ldflags)
 
 	// Clang cflags
-	exportStringListStaticVariable("X86_64Cflags", x86_64Cflags)
-	exportStringListStaticVariable("X86_64Cppflags", x86_64Cppflags)
+	exportedVars.ExportStringListStaticVariable("X86_64Cflags", x86_64Cflags)
+	exportedVars.ExportStringListStaticVariable("X86_64Cppflags", x86_64Cppflags)
 
 	// Yasm flags
-	exportStringListStaticVariable("X86_64YasmFlags", []string{
+	exportedVars.ExportStringListStaticVariable("X86_64YasmFlags", []string{
 		"-f elf64",
 		"-m amd64",
 	})
 
 	// Extended cflags
 
-	exportedStringListDictVars.Set("X86_64ArchVariantCflags", x86_64ArchVariantCflags)
-	exportedStringListDictVars.Set("X86_64ArchFeatureCflags", x86_64ArchFeatureCflags)
+	exportedVars.ExportStringListDict("X86_64ArchVariantCflags", x86_64ArchVariantCflags)
+	exportedVars.ExportStringListDict("X86_64ArchFeatureCflags", x86_64ArchFeatureCflags)
 
 	// Architecture variant cflags
 	for variant, cflags := range x86_64ArchVariantCflags {
diff --git a/cc/config/x86_device.go b/cc/config/x86_device.go
index e32e1bd..421b083 100644
--- a/cc/config/x86_device.go
+++ b/cc/config/x86_device.go
@@ -98,25 +98,25 @@
 	pctx.SourcePathVariable("X86GccRoot",
 		"prebuilts/gcc/${HostPrebuiltTag}/x86/x86_64-linux-android-${x86GccVersion}")
 
-	exportStringListStaticVariable("X86ToolchainCflags", []string{"-m32"})
-	exportStringListStaticVariable("X86ToolchainLdflags", []string{"-m32"})
+	exportedVars.ExportStringListStaticVariable("X86ToolchainCflags", []string{"-m32"})
+	exportedVars.ExportStringListStaticVariable("X86ToolchainLdflags", []string{"-m32"})
 
-	exportStringListStaticVariable("X86Ldflags", x86Ldflags)
-	exportStringListStaticVariable("X86Lldflags", x86Ldflags)
+	exportedVars.ExportStringListStaticVariable("X86Ldflags", x86Ldflags)
+	exportedVars.ExportStringListStaticVariable("X86Lldflags", x86Ldflags)
 
 	// Clang cflags
-	exportStringListStaticVariable("X86Cflags", x86Cflags)
-	exportStringListStaticVariable("X86Cppflags", x86Cppflags)
+	exportedVars.ExportStringListStaticVariable("X86Cflags", x86Cflags)
+	exportedVars.ExportStringListStaticVariable("X86Cppflags", x86Cppflags)
 
 	// Yasm flags
-	exportStringListStaticVariable("X86YasmFlags", []string{
+	exportedVars.ExportStringListStaticVariable("X86YasmFlags", []string{
 		"-f elf32",
 		"-m x86",
 	})
 
 	// Extended cflags
-	exportedStringListDictVars.Set("X86ArchVariantCflags", x86ArchVariantCflags)
-	exportedStringListDictVars.Set("X86ArchFeatureCflags", x86ArchFeatureCflags)
+	exportedVars.ExportStringListDict("X86ArchVariantCflags", x86ArchVariantCflags)
+	exportedVars.ExportStringListDict("X86ArchFeatureCflags", x86ArchFeatureCflags)
 
 	// Architecture variant cflags
 	for variant, cflags := range x86ArchVariantCflags {
diff --git a/cc/config/x86_linux_host.go b/cc/config/x86_linux_host.go
index ce6836b..4e8fd77 100644
--- a/cc/config/x86_linux_host.go
+++ b/cc/config/x86_linux_host.go
@@ -65,7 +65,6 @@
 
 	linuxMuslLdflags = []string{
 		"-nostdlib",
-		"-lgcc", "-lgcc_eh",
 		"--sysroot /dev/null",
 	}
 
@@ -122,40 +121,40 @@
 )
 
 func init() {
-	exportStringStaticVariable("LinuxGccVersion", linuxGccVersion)
-	exportStringStaticVariable("LinuxGlibcVersion", linuxGlibcVersion)
+	exportedVars.ExportStringStaticVariable("LinuxGccVersion", linuxGccVersion)
+	exportedVars.ExportStringStaticVariable("LinuxGlibcVersion", linuxGlibcVersion)
 
 	// Most places use the full GCC version. A few only use up to the first two numbers.
 	if p := strings.Split(linuxGccVersion, "."); len(p) > 2 {
-		exportStringStaticVariable("ShortLinuxGccVersion", strings.Join(p[:2], "."))
+		exportedVars.ExportStringStaticVariable("ShortLinuxGccVersion", strings.Join(p[:2], "."))
 	} else {
-		exportStringStaticVariable("ShortLinuxGccVersion", linuxGccVersion)
+		exportedVars.ExportStringStaticVariable("ShortLinuxGccVersion", linuxGccVersion)
 	}
 
-	exportSourcePathVariable("LinuxGccRoot",
+	exportedVars.ExportSourcePathVariable("LinuxGccRoot",
 		"prebuilts/gcc/linux-x86/host/x86_64-linux-glibc${LinuxGlibcVersion}-${ShortLinuxGccVersion}")
 
-	exportStringListStaticVariable("LinuxGccTriple", []string{"x86_64-linux"})
+	exportedVars.ExportStringListStaticVariable("LinuxGccTriple", []string{"x86_64-linux"})
 
-	exportStringListStaticVariable("LinuxCflags", linuxCflags)
-	exportStringListStaticVariable("LinuxLdflags", linuxLdflags)
-	exportStringListStaticVariable("LinuxLldflags", linuxLdflags)
-	exportStringListStaticVariable("LinuxGlibcCflags", linuxGlibcCflags)
-	exportStringListStaticVariable("LinuxGlibcLdflags", linuxGlibcLdflags)
-	exportStringListStaticVariable("LinuxGlibcLldflags", linuxGlibcLdflags)
-	exportStringListStaticVariable("LinuxMuslCflags", linuxMuslCflags)
-	exportStringListStaticVariable("LinuxMuslLdflags", linuxMuslLdflags)
-	exportStringListStaticVariable("LinuxMuslLldflags", linuxMuslLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxCflags", linuxCflags)
+	exportedVars.ExportStringListStaticVariable("LinuxLdflags", linuxLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxLldflags", linuxLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxGlibcCflags", linuxGlibcCflags)
+	exportedVars.ExportStringListStaticVariable("LinuxGlibcLdflags", linuxGlibcLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxGlibcLldflags", linuxGlibcLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxMuslCflags", linuxMuslCflags)
+	exportedVars.ExportStringListStaticVariable("LinuxMuslLdflags", linuxMuslLdflags)
+	exportedVars.ExportStringListStaticVariable("LinuxMuslLldflags", linuxMuslLdflags)
 
-	exportStringListStaticVariable("LinuxX86Cflags", linuxX86Cflags)
-	exportStringListStaticVariable("LinuxX8664Cflags", linuxX8664Cflags)
-	exportStringListStaticVariable("LinuxX86Ldflags", linuxX86Ldflags)
-	exportStringListStaticVariable("LinuxX86Lldflags", linuxX86Ldflags)
-	exportStringListStaticVariable("LinuxX8664Ldflags", linuxX8664Ldflags)
-	exportStringListStaticVariable("LinuxX8664Lldflags", linuxX8664Ldflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX86Cflags", linuxX86Cflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX8664Cflags", linuxX8664Cflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX86Ldflags", linuxX86Ldflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX86Lldflags", linuxX86Ldflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX8664Ldflags", linuxX8664Ldflags)
+	exportedVars.ExportStringListStaticVariable("LinuxX8664Lldflags", linuxX8664Ldflags)
 	// Yasm flags
-	exportStringListStaticVariable("LinuxX86YasmFlags", []string{"-f elf32 -m x86"})
-	exportStringListStaticVariable("LinuxX8664YasmFlags", []string{"-f elf64 -m amd64"})
+	exportedVars.ExportStringListStaticVariable("LinuxX86YasmFlags", []string{"-f elf32 -m x86"})
+	exportedVars.ExportStringListStaticVariable("LinuxX8664YasmFlags", []string{"-f elf64 -m amd64"})
 }
 
 type toolchainLinux struct {
diff --git a/cc/coverage.go b/cc/coverage.go
index f2b5425..d0902ea 100644
--- a/cc/coverage.go
+++ b/cc/coverage.go
@@ -77,6 +77,10 @@
 	return deps
 }
 
+func EnableContinuousCoverage(ctx android.BaseModuleContext) bool {
+	return ctx.DeviceConfig().ClangCoverageContinuousMode()
+}
+
 func (cov *coverage) flags(ctx ModuleContext, flags Flags, deps PathDeps) (Flags, PathDeps) {
 	clangCoverage := ctx.DeviceConfig().ClangCoverageEnabled()
 	gcovCoverage := ctx.DeviceConfig().GcovCoverageEnabled()
@@ -101,6 +105,9 @@
 			// Override -Wframe-larger-than.  We can expect frame size increase after
 			// coverage instrumentation.
 			flags.Local.CFlags = append(flags.Local.CFlags, "-Wno-frame-larger-than=")
+			if EnableContinuousCoverage(ctx) {
+				flags.Local.CommonFlags = append(flags.Local.CommonFlags, "-mllvm", "-runtime-counter-relocation")
+			}
 		}
 	}
 
@@ -152,6 +159,9 @@
 			flags.Local.LdFlags = append(flags.Local.LdFlags, "-Wl,--wrap,getenv")
 		} else if clangCoverage {
 			flags.Local.LdFlags = append(flags.Local.LdFlags, profileInstrFlag)
+			if EnableContinuousCoverage(ctx) {
+				flags.Local.LdFlags = append(flags.Local.LdFlags, "-Wl,-mllvm=-runtime-counter-relocation")
+			}
 
 			coverage := ctx.GetDirectDepWithTag(getClangProfileLibraryName(ctx), CoverageDepTag).(*Module)
 			deps.WholeStaticLibs = append(deps.WholeStaticLibs, coverage.OutputFile().Path())
diff --git a/cc/library.go b/cc/library.go
index 6aac7ae..0abcb6f 100644
--- a/cc/library.go
+++ b/cc/library.go
@@ -145,6 +145,8 @@
 
 	Tidy_disabled_srcs []string `android:"path,arch_variant"`
 
+	Tidy_timeout_srcs []string `android:"path,arch_variant"`
+
 	Sanitized Sanitized `android:"arch_variant"`
 
 	Cflags []string `android:"arch_variant"`
@@ -314,6 +316,7 @@
 		Implementation_whole_archive_deps: linkerAttrs.implementationWholeArchiveDeps,
 		Whole_archive_deps:                *linkerAttrs.wholeArchiveDeps.Clone().Append(staticAttrs.Whole_archive_deps),
 		System_dynamic_deps:               *linkerAttrs.systemDynamicDeps.Clone().Append(staticAttrs.System_dynamic_deps),
+		sdkAttributes:                     bp2BuildParseSdkAttributes(m),
 	}
 
 	sharedCommonAttrs := staticOrSharedAttributes{
@@ -329,6 +332,7 @@
 		Implementation_dynamic_deps: *linkerAttrs.implementationDynamicDeps.Clone().Append(sharedAttrs.Implementation_dynamic_deps),
 		Whole_archive_deps:          *linkerAttrs.wholeArchiveDeps.Clone().Append(sharedAttrs.Whole_archive_deps),
 		System_dynamic_deps:         *linkerAttrs.systemDynamicDeps.Clone().Append(sharedAttrs.System_dynamic_deps),
+		sdkAttributes:               bp2BuildParseSdkAttributes(m),
 	}
 
 	staticTargetAttrs := &bazelCcLibraryStaticAttributes{
@@ -348,6 +352,7 @@
 		Stl:                      compilerAttrs.stl,
 		Cpp_std:                  compilerAttrs.cppStd,
 		C_std:                    compilerAttrs.cStd,
+		Use_version_lib:          linkerAttrs.useVersionLib,
 
 		Features: linkerAttrs.features,
 	}
@@ -370,6 +375,7 @@
 		Stl:                      compilerAttrs.stl,
 		Cpp_std:                  compilerAttrs.cppStd,
 		C_std:                    compilerAttrs.cStd,
+		Use_version_lib:          linkerAttrs.useVersionLib,
 
 		Additional_linker_inputs: linkerAttrs.additionalLinkerInputs,
 
@@ -1079,11 +1085,13 @@
 		srcs := android.PathsForModuleSrc(ctx, library.StaticProperties.Static.Srcs)
 		objs = objs.Append(compileObjs(ctx, buildFlags, android.DeviceStaticLibrary, srcs,
 			android.PathsForModuleSrc(ctx, library.StaticProperties.Static.Tidy_disabled_srcs),
+			android.PathsForModuleSrc(ctx, library.StaticProperties.Static.Tidy_timeout_srcs),
 			library.baseCompiler.pathDeps, library.baseCompiler.cFlagsDeps))
 	} else if library.shared() {
 		srcs := android.PathsForModuleSrc(ctx, library.SharedProperties.Shared.Srcs)
 		objs = objs.Append(compileObjs(ctx, buildFlags, android.DeviceSharedLibrary, srcs,
 			android.PathsForModuleSrc(ctx, library.SharedProperties.Shared.Tidy_disabled_srcs),
+			android.PathsForModuleSrc(ctx, library.SharedProperties.Shared.Tidy_timeout_srcs),
 			library.baseCompiler.pathDeps, library.baseCompiler.cFlagsDeps))
 	}
 
@@ -2477,6 +2485,7 @@
 		Whole_archive_deps:                linkerAttrs.wholeArchiveDeps,
 		Implementation_whole_archive_deps: linkerAttrs.implementationWholeArchiveDeps,
 		System_dynamic_deps:               linkerAttrs.systemDynamicDeps,
+		sdkAttributes:                     bp2BuildParseSdkAttributes(module),
 	}
 
 	var attrs interface{}
diff --git a/cc/library_headers.go b/cc/library_headers.go
index 5d38fba..41ebcc7 100644
--- a/cc/library_headers.go
+++ b/cc/library_headers.go
@@ -117,6 +117,7 @@
 	Deps                     bazel.LabelListAttribute
 	Implementation_deps      bazel.LabelListAttribute
 	System_dynamic_deps      bazel.LabelListAttribute
+	sdkAttributes
 }
 
 func libraryHeadersBp2Build(ctx android.TopDownMutatorContext, module *Module) {
@@ -132,6 +133,7 @@
 		Deps:                     linkerAttrs.deps,
 		System_dynamic_deps:      linkerAttrs.systemDynamicDeps,
 		Hdrs:                     baseAttributes.hdrs,
+		sdkAttributes:            bp2BuildParseSdkAttributes(module),
 	}
 
 	props := bazel.BazelTargetModuleProperties{
diff --git a/cc/linkable.go b/cc/linkable.go
index d4b4770..6bec30c 100644
--- a/cc/linkable.go
+++ b/cc/linkable.go
@@ -176,10 +176,14 @@
 	IsVndk() bool
 	IsVndkExt() bool
 	IsVndkPrivate() bool
+	IsVendorPublicLibrary() bool
+	IsVndkPrebuiltLibrary() bool
 	HasVendorVariant() bool
 	HasProductVariant() bool
 	HasNonSystemVariants() bool
+	ProductSpecific() bool
 	InProduct() bool
+	SdkAndPlatformVariantVisibleToMake() bool
 
 	// SubName returns the modules SubName, used for image and NDK/SDK variations.
 	SubName() string
diff --git a/cc/makevars.go b/cc/makevars.go
index 1ad71ec..6752f8c 100644
--- a/cc/makevars.go
+++ b/cc/makevars.go
@@ -255,14 +255,43 @@
 	}, " "))
 
 	if target.Os.Class == android.Device {
-		ctx.Strict(secondPrefix+"ADDRESS_SANITIZER_RUNTIME_LIBRARY", strings.TrimSuffix(config.AddressSanitizerRuntimeLibrary(toolchain), ".so"))
-		ctx.Strict(secondPrefix+"HWADDRESS_SANITIZER_RUNTIME_LIBRARY", strings.TrimSuffix(config.HWAddressSanitizerRuntimeLibrary(toolchain), ".so"))
-		ctx.Strict(secondPrefix+"HWADDRESS_SANITIZER_STATIC_LIBRARY", strings.TrimSuffix(config.HWAddressSanitizerStaticLibrary(toolchain), ".a"))
-		ctx.Strict(secondPrefix+"UBSAN_RUNTIME_LIBRARY", strings.TrimSuffix(config.UndefinedBehaviorSanitizerRuntimeLibrary(toolchain), ".so"))
-		ctx.Strict(secondPrefix+"UBSAN_MINIMAL_RUNTIME_LIBRARY", strings.TrimSuffix(config.UndefinedBehaviorSanitizerMinimalRuntimeLibrary(toolchain), ".a"))
-		ctx.Strict(secondPrefix+"TSAN_RUNTIME_LIBRARY", strings.TrimSuffix(config.ThreadSanitizerRuntimeLibrary(toolchain), ".so"))
-		ctx.Strict(secondPrefix+"SCUDO_RUNTIME_LIBRARY", strings.TrimSuffix(config.ScudoRuntimeLibrary(toolchain), ".so"))
-		ctx.Strict(secondPrefix+"SCUDO_MINIMAL_RUNTIME_LIBRARY", strings.TrimSuffix(config.ScudoMinimalRuntimeLibrary(toolchain), ".so"))
+		sanitizerVariables := map[string]string{
+			"ADDRESS_SANITIZER_RUNTIME_LIBRARY":   config.AddressSanitizerRuntimeLibrary(toolchain),
+			"HWADDRESS_SANITIZER_RUNTIME_LIBRARY": config.HWAddressSanitizerRuntimeLibrary(toolchain),
+			"HWADDRESS_SANITIZER_STATIC_LIBRARY":  config.HWAddressSanitizerStaticLibrary(toolchain),
+			"UBSAN_RUNTIME_LIBRARY":               config.UndefinedBehaviorSanitizerRuntimeLibrary(toolchain),
+			"UBSAN_MINIMAL_RUNTIME_LIBRARY":       config.UndefinedBehaviorSanitizerMinimalRuntimeLibrary(toolchain),
+			"TSAN_RUNTIME_LIBRARY":                config.ThreadSanitizerRuntimeLibrary(toolchain),
+			"SCUDO_RUNTIME_LIBRARY":               config.ScudoRuntimeLibrary(toolchain),
+			"SCUDO_MINIMAL_RUNTIME_LIBRARY":       config.ScudoMinimalRuntimeLibrary(toolchain),
+		}
+
+		for variable, value := range sanitizerVariables {
+			ctx.Strict(secondPrefix+variable, value)
+		}
+
+		sanitizerLibs := android.SortedStringValues(sanitizerVariables)
+		var sanitizerLibStems []string
+		ctx.VisitAllModules(func(m android.Module) {
+			if !m.Enabled() {
+				return
+			}
+
+			ccModule, _ := m.(*Module)
+			if ccModule == nil || ccModule.library == nil || !ccModule.library.shared() {
+				return
+			}
+
+			if android.InList(strings.TrimPrefix(ctx.ModuleName(m), "prebuilt_"), sanitizerLibs) &&
+				m.Target().Os == target.Os && m.Target().Arch.ArchType == target.Arch.ArchType {
+				outputFile := ccModule.outputFile
+				if outputFile.Valid() {
+					sanitizerLibStems = append(sanitizerLibStems, outputFile.Path().Base())
+				}
+			}
+		})
+		sanitizerLibStems = android.SortedUniqueStrings(sanitizerLibStems)
+		ctx.Strict(secondPrefix+"SANITIZER_STEMS", strings.Join(sanitizerLibStems, " "))
 	}
 
 	// This is used by external/gentoo/...
diff --git a/cc/ndk_abi.go b/cc/ndk_abi.go
index 927fa2e..3456c32 100644
--- a/cc/ndk_abi.go
+++ b/cc/ndk_abi.go
@@ -46,7 +46,7 @@
 
 		if m, ok := module.(*Module); ok {
 			if installer, ok := m.installer.(*stubDecorator); ok {
-				if canDumpAbi() {
+				if canDumpAbi(ctx.Config()) {
 					depPaths = append(depPaths, installer.abiDumpPath)
 				}
 			}
diff --git a/cc/ndk_library.go b/cc/ndk_library.go
index 7efe134..5ef41ea 100644
--- a/cc/ndk_library.go
+++ b/cc/ndk_library.go
@@ -53,9 +53,9 @@
 
 	abitidy = pctx.AndroidStaticRule("abitidy",
 		blueprint.RuleParams{
-			Command:     "$abitidy --all -i $in -o $out",
+			Command:     "$abitidy --all $flags -i $in -o $out",
 			CommandDeps: []string{"$abitidy"},
-		})
+		}, "flags")
 
 	abidiff = pctx.AndroidStaticRule("abidiff",
 		blueprint.RuleParams{
@@ -104,6 +104,12 @@
 	// used. This is only needed to work around platform bugs like
 	// https://github.com/android-ndk/ndk/issues/265.
 	Unversioned_until *string
+
+	// If true, does not emit errors when APIs lacking type information are
+	// found. This is false by default and should not be enabled outside bionic,
+	// where it is enabled pending a fix for http://b/190554910 (no debug info
+	// for asm implemented symbols).
+	Allow_untyped_symbols *bool
 }
 
 type stubDecorator struct {
@@ -279,7 +285,7 @@
 
 func compileStubLibrary(ctx ModuleContext, flags Flags, src android.Path) Objects {
 	return compileObjs(ctx, flagsToBuilderFlags(flags), "",
-		android.Paths{src}, nil, nil, nil)
+		android.Paths{src}, nil, nil, nil, nil)
 }
 
 func (this *stubDecorator) findImplementationLibrary(ctx ModuleContext) android.Path {
@@ -315,8 +321,18 @@
 }
 
 // Feature flag.
-func canDumpAbi() bool {
-	return runtime.GOOS != "darwin"
+func canDumpAbi(config android.Config) bool {
+	if runtime.GOOS == "darwin" {
+		return false
+	}
+	// abidw doesn't currently handle top-byte-ignore correctly. Disable ABI
+	// dumping for those configs while we wait for a fix. We'll still have ABI
+	// checking coverage from non-hwasan builds.
+	// http://b/190554910
+	if android.InList("hwaddress", config.SanitizeDevice()) {
+		return false
+	}
+	return true
 }
 
 // Feature flag to disable diffing against prebuilts.
@@ -339,14 +355,22 @@
 			"symbolList": symbolList.String(),
 		},
 	})
+
 	this.abiDumpPath = getNdkAbiDumpInstallBase(ctx).Join(ctx,
 		this.apiLevel.String(), ctx.Arch().ArchType.String(),
 		this.libraryName(ctx), "abi.xml")
+	untypedFlag := "--abort-on-untyped-symbols"
+	if proptools.BoolDefault(this.properties.Allow_untyped_symbols, false) {
+		untypedFlag = ""
+	}
 	ctx.Build(pctx, android.BuildParams{
 		Rule:        abitidy,
 		Description: fmt.Sprintf("abitidy %s", implementationLibrary),
 		Input:       abiRawPath,
 		Output:      this.abiDumpPath,
+		Args: map[string]string{
+			"flags": untypedFlag,
+		},
 	})
 }
 
@@ -444,7 +468,7 @@
 	nativeAbiResult := parseNativeAbiDefinition(ctx, symbolFile, c.apiLevel, "")
 	objs := compileStubLibrary(ctx, flags, nativeAbiResult.stubSrc)
 	c.versionScriptPath = nativeAbiResult.versionScript
-	if canDumpAbi() {
+	if canDumpAbi(ctx.Config()) {
 		c.dumpAbi(ctx, nativeAbiResult.symbolList)
 		if canDiffAbi() {
 			c.diffAbi(ctx)
diff --git a/cc/object.go b/cc/object.go
index fdd0b11..bd5bd45 100644
--- a/cc/object.go
+++ b/cc/object.go
@@ -133,6 +133,7 @@
 	Absolute_includes   bazel.StringListAttribute
 	Stl                 *string
 	Linker_script       bazel.LabelAttribute
+	sdkAttributes
 }
 
 // objectBp2Build is the bp2build converter from cc_object modules to the
@@ -191,6 +192,7 @@
 		Absolute_includes:   compilerAttrs.absoluteIncludes,
 		Stl:                 compilerAttrs.stl,
 		Linker_script:       linkerScript,
+		sdkAttributes:       bp2BuildParseSdkAttributes(m),
 	}
 
 	props := bazel.BazelTargetModuleProperties{
diff --git a/cc/pgo.go b/cc/pgo.go
index aa0feae..0632c15 100644
--- a/cc/pgo.go
+++ b/cc/pgo.go
@@ -32,8 +32,8 @@
 	}
 
 	globalPgoProfileProjects = []string{
-		"toolchain/pgo-profiles",
-		"vendor/google_data/pgo_profile",
+		"toolchain/pgo-profiles/pgo",
+		"vendor/google_data/pgo_profile/pgo",
 	}
 )
 
diff --git a/cc/prebuilt.go b/cc/prebuilt.go
index 339a16d..f54c6f8 100644
--- a/cc/prebuilt.go
+++ b/cc/prebuilt.go
@@ -16,6 +16,7 @@
 
 import (
 	"path/filepath"
+	"strings"
 
 	"android/soong/android"
 	"android/soong/bazel"
@@ -188,6 +189,16 @@
 				TableOfContents: p.tocFile,
 			})
 
+			// TODO(b/220898484): Mainline module sdk prebuilts of stub libraries use a stub
+			// library as their source and must not be installed, but libclang_rt.* libraries
+			// have stubs because they are LLNDK libraries, but use an implementation library
+			// as their source and need to be installed.  This discrepancy should be resolved
+			// without the prefix hack below.
+			if p.hasStubsVariants() && !p.buildStubs() && !ctx.Host() &&
+				!strings.HasPrefix(ctx.baseModuleName(), "libclang_rt.") {
+				ctx.Module().MakeUninstallable()
+			}
+
 			return outputFile
 		}
 	}
@@ -242,6 +253,7 @@
 func NewPrebuiltLibrary(hod android.HostOrDeviceSupported, srcsProperty string) (*Module, *libraryDecorator) {
 	module, library := NewLibrary(hod)
 	module.compiler = nil
+	module.bazelable = true
 
 	prebuilt := &prebuiltLibraryLinker{
 		libraryDecorator: library,
@@ -327,8 +339,21 @@
 	Export_system_includes bazel.StringListAttribute
 }
 
-func prebuiltLibraryStaticBp2Build(ctx android.TopDownMutatorContext, module *Module) {
-	prebuiltAttrs := Bp2BuildParsePrebuiltLibraryProps(ctx, module)
+// TODO(b/228623543): The below is not entirely true until the bug is fixed. For now, both targets are always generated
+// Implements bp2build for cc_prebuilt_library modules. This will generate:
+// * Only a prebuilt_library_static if the shared.enabled property is set to false across all variants.
+// * Only a prebuilt_library_shared if the static.enabled property is set to false across all variants
+// * Both a prebuilt_library_static and prebuilt_library_shared if the aforementioned properties are not false across
+//   all variants
+//
+// In all cases, prebuilt_library_static target names will be appended with "_bp2build_cc_library_static".
+func prebuiltLibraryBp2Build(ctx android.TopDownMutatorContext, module *Module) {
+	prebuiltLibraryStaticBp2Build(ctx, module, true)
+	prebuiltLibrarySharedBp2Build(ctx, module)
+}
+
+func prebuiltLibraryStaticBp2Build(ctx android.TopDownMutatorContext, module *Module, fullBuild bool) {
+	prebuiltAttrs := Bp2BuildParsePrebuiltLibraryProps(ctx, module, true)
 	exportedIncludes := Bp2BuildParseExportedIncludesForPrebuiltLibrary(ctx, module)
 
 	attrs := &bazelPrebuiltLibraryStaticAttributes{
@@ -343,7 +368,10 @@
 	}
 
 	name := android.RemoveOptionalPrebuiltPrefix(module.Name())
-	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: name}, attrs)
+	if fullBuild {
+		name += "_bp2build_cc_library_static"
+	}
+	ctx.CreateBazelTargetModuleWithRestrictions(props, android.CommonAttributes{Name: name}, attrs, prebuiltAttrs.Enabled)
 }
 
 type bazelPrebuiltLibrarySharedAttributes struct {
@@ -351,7 +379,7 @@
 }
 
 func prebuiltLibrarySharedBp2Build(ctx android.TopDownMutatorContext, module *Module) {
-	prebuiltAttrs := Bp2BuildParsePrebuiltLibraryProps(ctx, module)
+	prebuiltAttrs := Bp2BuildParsePrebuiltLibraryProps(ctx, module, false)
 
 	attrs := &bazelPrebuiltLibrarySharedAttributes{
 		Shared_library: prebuiltAttrs.Src,
@@ -363,7 +391,7 @@
 	}
 
 	name := android.RemoveOptionalPrebuiltPrefix(module.Name())
-	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: name}, attrs)
+	ctx.CreateBazelTargetModuleWithRestrictions(props, android.CommonAttributes{Name: name}, attrs, prebuiltAttrs.Enabled)
 }
 
 type prebuiltObjectProperties struct {
@@ -499,7 +527,16 @@
 func (p *prebuiltObjectLinker) link(ctx ModuleContext,
 	flags Flags, deps PathDeps, objs Objects) android.Path {
 	if len(p.properties.Srcs) > 0 {
-		return p.Prebuilt.SingleSourcePath(ctx)
+		// Copy objects to a name matching the final installed name
+		in := p.Prebuilt.SingleSourcePath(ctx)
+		outputFile := android.PathForModuleOut(ctx, ctx.ModuleName()+".o")
+		ctx.Build(pctx, android.BuildParams{
+			Rule:        android.CpExecutable,
+			Description: "prebuilt",
+			Output:      outputFile,
+			Input:       in,
+		})
+		return outputFile
 	}
 	return nil
 }
diff --git a/cc/prebuilt_test.go b/cc/prebuilt_test.go
index 94f75fe..901f458 100644
--- a/cc/prebuilt_test.go
+++ b/cc/prebuilt_test.go
@@ -20,6 +20,7 @@
 
 	"android/soong/android"
 	"android/soong/bazel/cquery"
+
 	"github.com/google/blueprint"
 )
 
@@ -29,6 +30,7 @@
 )
 
 func testPrebuilt(t *testing.T, bp string, fs android.MockFS, handlers ...android.FixturePreparer) *android.TestContext {
+	t.Helper()
 	result := android.GroupFixturePreparers(
 		prepareForPrebuiltTest,
 		fs.AddToFixture(),
@@ -449,3 +451,72 @@
 	expectedOutputFiles := []string{pathPrefix + "foo.so"}
 	android.AssertDeepEquals(t, "output files", expectedOutputFiles, outputFiles.Strings())
 }
+
+func TestPrebuiltStubNoinstall(t *testing.T) {
+	testFunc := func(t *testing.T, bp string) {
+		result := android.GroupFixturePreparers(
+			prepareForPrebuiltTest,
+			android.PrepareForTestWithMakevars,
+		).RunTestWithBp(t, bp)
+
+		installRules := result.InstallMakeRulesForTesting(t)
+		var installedlibRule *android.InstallMakeRule
+		for i, rule := range installRules {
+			if rule.Target == "out/target/product/test_device/system/lib/installedlib.so" {
+				if installedlibRule != nil {
+					t.Errorf("Duplicate install rules for %s", rule.Target)
+				}
+				installedlibRule = &installRules[i]
+			}
+		}
+		if installedlibRule == nil {
+			t.Errorf("No install rule found for installedlib")
+			return
+		}
+
+		android.AssertStringListDoesNotContain(t,
+			"installedlib has install dependency on stub",
+			installedlibRule.Deps,
+			"out/target/product/test_device/system/lib/stublib.so")
+		android.AssertStringListDoesNotContain(t,
+			"installedlib has order-only install dependency on stub",
+			installedlibRule.OrderOnlyDeps,
+			"out/target/product/test_device/system/lib/stublib.so")
+	}
+
+	const prebuiltStublibBp = `
+		cc_prebuilt_library {
+			name: "stublib",
+			prefer: true,
+			srcs: ["foo.so"],
+			stubs: {
+				versions: ["1"],
+			},
+		}
+	`
+
+	const installedlibBp = `
+		cc_library {
+			name: "installedlib",
+			shared_libs: ["stublib"],
+		}
+	`
+
+	t.Run("prebuilt without source", func(t *testing.T) {
+		testFunc(t, prebuiltStublibBp+installedlibBp)
+	})
+
+	const disabledSourceStublibBp = `
+		cc_library {
+			name: "stublib",
+			enabled: false,
+			stubs: {
+				versions: ["1"],
+			},
+		}
+	`
+
+	t.Run("prebuilt with disabled source", func(t *testing.T) {
+		testFunc(t, disabledSourceStublibBp+prebuiltStublibBp+installedlibBp)
+	})
+}
diff --git a/cc/proto.go b/cc/proto.go
index 3cf1453..8e6d5ed 100644
--- a/cc/proto.go
+++ b/cc/proto.go
@@ -177,7 +177,7 @@
 func bp2buildProto(ctx android.Bp2buildMutatorContext, m *Module, protoSrcs bazel.LabelListAttribute) bp2buildProtoDeps {
 	var ret bp2buildProtoDeps
 
-	protoInfo, ok := android.Bp2buildProtoProperties(ctx, m, protoSrcs)
+	protoInfo, ok := android.Bp2buildProtoProperties(ctx, &m.ModuleBase, protoSrcs)
 	if !ok {
 		return ret
 	}
diff --git a/cc/sanitize.go b/cc/sanitize.go
index f8661a6..814fef6 100644
--- a/cc/sanitize.go
+++ b/cc/sanitize.go
@@ -76,7 +76,7 @@
 	minimalRuntimeFlags = []string{"-fsanitize-minimal-runtime", "-fno-sanitize-trap=integer,undefined",
 		"-fno-sanitize-recover=integer,undefined"}
 	hwasanGlobalOptions = []string{"heap_history_size=1023", "stack_history_size=512",
-		"export_memory_stats=0", "max_malloc_fill_size=0"}
+		"export_memory_stats=0", "max_malloc_fill_size=4096", "malloc_fill_byte=0"}
 )
 
 type SanitizerType int
@@ -480,8 +480,8 @@
 		s.Diag.Cfi = nil
 	}
 
-	// Disable sanitizers that depend on the UBSan runtime for windows/darwin/musl builds.
-	if !ctx.Os().Linux() || ctx.Os() == android.LinuxMusl {
+	// Disable sanitizers that depend on the UBSan runtime for windows/darwin builds.
+	if !ctx.Os().Linux() {
 		s.Cfi = nil
 		s.Diag.Cfi = nil
 		s.Misc_undefined = nil
@@ -490,6 +490,12 @@
 		s.Integer_overflow = nil
 	}
 
+	// Disable CFI for musl
+	if ctx.toolchain().Musl() {
+		s.Cfi = nil
+		s.Diag.Cfi = nil
+	}
+
 	// Also disable CFI for VNDK variants of components
 	if ctx.isVndk() && ctx.useVndk() {
 		if ctx.static() {
@@ -702,10 +708,10 @@
 		flags.Local.AsFlags = append(flags.Local.AsFlags, sanitizeArg)
 		flags.Local.LdFlags = append(flags.Local.LdFlags, sanitizeArg)
 
-		if ctx.toolchain().Bionic() {
-			// Bionic sanitizer runtimes have already been added as dependencies so that
-			// the right variant of the runtime will be used (with the "-android"
-			// suffix), so don't let clang the runtime library.
+		if ctx.toolchain().Bionic() || ctx.toolchain().Musl() {
+			// Bionic and musl sanitizer runtimes have already been added as dependencies so that
+			// the right variant of the runtime will be used (with the "-android" or "-musl"
+			// suffixes), so don't let clang the runtime library.
 			flags.Local.LdFlags = append(flags.Local.LdFlags, "-fno-sanitize-link-runtime")
 		} else {
 			// Host sanitizers only link symbols in the final executable, so
@@ -1217,7 +1223,7 @@
 			addStaticDeps(config.BuiltinsRuntimeLibrary(toolchain))
 		}
 
-		if runtimeLibrary != "" && (toolchain.Bionic() || c.sanitize.Properties.UbsanRuntimeDep) {
+		if runtimeLibrary != "" && (toolchain.Bionic() || toolchain.Musl() || c.sanitize.Properties.UbsanRuntimeDep) {
 			// UBSan is supported on non-bionic linux host builds as well
 
 			// Adding dependency to the runtime library. We are using *FarVariation*
diff --git a/cc/sanitize_test.go b/cc/sanitize_test.go
index 0070e40..c1ca034 100644
--- a/cc/sanitize_test.go
+++ b/cc/sanitize_test.go
@@ -24,11 +24,7 @@
 
 var prepareForAsanTest = android.FixtureAddFile("asan/Android.bp", []byte(`
 	cc_library_shared {
-		name: "libclang_rt.asan-aarch64-android",
-	}
-
-	cc_library_shared {
-		name: "libclang_rt.asan-arm-android",
+		name: "libclang_rt.asan",
 	}
 `))
 
diff --git a/cc/snapshot_prebuilt.go b/cc/snapshot_prebuilt.go
index 753d74c..9d40ad0 100644
--- a/cc/snapshot_prebuilt.go
+++ b/cc/snapshot_prebuilt.go
@@ -680,6 +680,9 @@
 		Input:       in,
 	})
 
+	// binary snapshots need symlinking
+	p.setSymlinkList(ctx)
+
 	return outputFile
 }
 
diff --git a/cc/test.go b/cc/test.go
index d8b7833..ead7877 100644
--- a/cc/test.go
+++ b/cc/test.go
@@ -25,7 +25,8 @@
 	"android/soong/tradefed"
 )
 
-type TestProperties struct {
+// TestLinkerProperties properties to be registered via the linker
+type TestLinkerProperties struct {
 	// if set, build against the gtest library. Defaults to true.
 	Gtest *bool
 
@@ -33,6 +34,12 @@
 	Isolated *bool
 }
 
+// TestInstallerProperties properties to be registered via the installer
+type TestInstallerProperties struct {
+	// list of compatibility suites (for example "cts", "vts") that the module should be installed into.
+	Test_suites []string `android:"arch_variant"`
+}
+
 // Test option struct.
 type TestOptions struct {
 	// The UID that you want to run the test as on a device.
@@ -83,10 +90,6 @@
 	// list of binary modules that should be installed alongside the test
 	Data_bins []string `android:"arch_variant"`
 
-	// list of compatibility suites (for example "cts", "vts") that the module should be
-	// installed into.
-	Test_suites []string `android:"arch_variant"`
-
 	// the name of the test configuration (for example "AndroidTest.xml") that should be
 	// installed with the module.
 	Test_config *string `android:"path,arch_variant"`
@@ -243,12 +246,14 @@
 }
 
 type testDecorator struct {
-	Properties TestProperties
-	linker     *baseLinker
+	LinkerProperties    TestLinkerProperties
+	InstallerProperties TestInstallerProperties
+	installer           *baseInstaller
+	linker              *baseLinker
 }
 
 func (test *testDecorator) gtest() bool {
-	return BoolDefault(test.Properties.Gtest, true)
+	return BoolDefault(test.LinkerProperties.Gtest, true)
 }
 
 func (test *testDecorator) testBinary() bool {
@@ -283,7 +288,7 @@
 	if test.gtest() {
 		if ctx.useSdk() && ctx.Device() {
 			deps.StaticLibs = append(deps.StaticLibs, "libgtest_main_ndk_c++", "libgtest_ndk_c++")
-		} else if BoolDefault(test.Properties.Isolated, false) {
+		} else if BoolDefault(test.LinkerProperties.Isolated, false) {
 			deps.StaticLibs = append(deps.StaticLibs, "libgtest_isolated_main")
 			// The isolated library requires liblog, but adding it
 			// as a static library means unit tests cannot override
@@ -316,7 +321,11 @@
 }
 
 func (test *testDecorator) linkerProps() []interface{} {
-	return []interface{}{&test.Properties}
+	return []interface{}{&test.LinkerProperties}
+}
+
+func (test *testDecorator) installerProps() []interface{} {
+	return []interface{}{&test.InstallerProperties}
 }
 
 func NewTestInstaller() *baseInstaller {
@@ -324,7 +333,7 @@
 }
 
 type testBinary struct {
-	testDecorator
+	*testDecorator
 	*binaryDecorator
 	*baseCompiler
 	Properties       TestBinaryProperties
@@ -358,6 +367,10 @@
 	return flags
 }
 
+func (test *testBinary) installerProps() []interface{} {
+	return append(test.baseInstaller.installerProps(), test.testDecorator.installerProps()...)
+}
+
 func (test *testBinary) install(ctx ModuleContext, file android.Path) {
 	// TODO: (b/167308193) Switch to /data/local/tests/unrestricted as the default install base.
 	testInstallBase := "/data/local/tmp"
@@ -411,7 +424,7 @@
 		var options []tradefed.Option
 		configs = append(configs, tradefed.Object{"target_preparer", "com.android.tradefed.targetprep.StopServicesSetup", options})
 	}
-	if Bool(test.testDecorator.Properties.Isolated) {
+	if Bool(test.testDecorator.LinkerProperties.Isolated) {
 		configs = append(configs, tradefed.Option{Name: "not-shardable", Value: "true"})
 	}
 	if test.Properties.Test_options.Run_test_as != nil {
@@ -441,7 +454,7 @@
 	}
 
 	test.testConfig = tradefed.AutoGenNativeTestConfig(ctx, test.Properties.Test_config,
-		test.Properties.Test_config_template, test.Properties.Test_suites, configs, test.Properties.Auto_gen_config, testInstallBase)
+		test.Properties.Test_config_template, test.testDecorator.InstallerProperties.Test_suites, configs, test.Properties.Auto_gen_config, testInstallBase)
 
 	test.extraTestConfigs = android.PathsForModuleSrc(ctx, test.Properties.Test_options.Extra_test_configs)
 
@@ -466,8 +479,9 @@
 	binary.baseInstaller = NewTestInstaller()
 
 	test := &testBinary{
-		testDecorator: testDecorator{
-			linker: binary.baseLinker,
+		testDecorator: &testDecorator{
+			linker:    binary.baseLinker,
+			installer: binary.baseInstaller,
 		},
 		binaryDecorator: binary,
 		baseCompiler:    NewBaseCompiler(),
@@ -479,12 +493,14 @@
 }
 
 type testLibrary struct {
-	testDecorator
+	*testDecorator
 	*libraryDecorator
 }
 
 func (test *testLibrary) linkerProps() []interface{} {
-	return append(test.testDecorator.linkerProps(), test.libraryDecorator.linkerProps()...)
+	var props []interface{}
+	props = append(props, test.testDecorator.linkerProps()...)
+	return append(props, test.libraryDecorator.linkerProps()...)
 }
 
 func (test *testLibrary) linkerInit(ctx BaseModuleContext) {
@@ -504,16 +520,22 @@
 	return flags
 }
 
+func (test *testLibrary) installerProps() []interface{} {
+	return append(test.baseInstaller.installerProps(), test.testDecorator.installerProps()...)
+}
+
 func NewTestLibrary(hod android.HostOrDeviceSupported) *Module {
 	module, library := NewLibrary(android.HostAndDeviceSupported)
 	library.baseInstaller = NewTestInstaller()
 	test := &testLibrary{
-		testDecorator: testDecorator{
-			linker: library.baseLinker,
+		testDecorator: &testDecorator{
+			linker:    library.baseLinker,
+			installer: library.baseInstaller,
 		},
 		libraryDecorator: library,
 	}
 	module.linker = test
+	module.installer = test
 	return module
 }
 
diff --git a/cc/testing.go b/cc/testing.go
index a03d147..32f7c60 100644
--- a/cc/testing.go
+++ b/cc/testing.go
@@ -86,54 +86,20 @@
 		}
 
 		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-arm-android",
-			defaults: ["toolchain_libs_defaults"],
-			native_bridge_supported: true,
-			vendor_ramdisk_available: true,
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-aarch64-android",
-			defaults: ["toolchain_libs_defaults"],
-			native_bridge_supported: true,
-			vendor_ramdisk_available: true,
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-x86_64",
+			name: "libclang_rt.builtins",
 			defaults: ["toolchain_libs_defaults"],
 			host_supported: true,
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-i386",
-			defaults: ["toolchain_libs_defaults"],
-			host_supported: true,
+	        vendor_available: true,
+			vendor_ramdisk_available: true,
+			native_bridge_supported: true,
 		}
 
 		cc_prebuilt_library_shared {
-			name: "libclang_rt.hwasan-aarch64-android",
+			name: "libclang_rt.hwasan",
 			defaults: ["toolchain_libs_defaults"],
 		}
 
 		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-i686-android",
-			defaults: ["toolchain_libs_defaults"],
-			vendor_ramdisk_available: true,
-			native_bridge_supported: true,
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.builtins-x86_64-android",
-			defaults: [
-				"linux_bionic_supported",
-				"toolchain_libs_defaults",
-			],
-			native_bridge_supported: true,
-			vendor_ramdisk_available: true,
-		}
-
-		cc_prebuilt_library_static {
 			name: "libunwind",
 			defaults: [
 				"linux_bionic_supported",
@@ -144,30 +110,7 @@
 		}
 
 		cc_prebuilt_library_static {
-			name: "libclang_rt.fuzzer-arm-android",
-			defaults: ["toolchain_libs_defaults"],
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.fuzzer-aarch64-android",
-			defaults: ["toolchain_libs_defaults"],
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.fuzzer-i686-android",
-			defaults: ["toolchain_libs_defaults"],
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.fuzzer-x86_64-android",
-			defaults: [
-				"linux_bionic_supported",
-				"toolchain_libs_defaults",
-			],
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.fuzzer-x86_64",
+			name: "libclang_rt.fuzzer",
 			defaults: [
 				"linux_bionic_supported",
 				"toolchain_libs_defaults",
@@ -176,17 +119,12 @@
 
 		// Needed for sanitizer
 		cc_prebuilt_library_shared {
-			name: "libclang_rt.ubsan_standalone-aarch64-android",
+			name: "libclang_rt.ubsan_standalone",
 			defaults: ["toolchain_libs_defaults"],
 		}
 
 		cc_prebuilt_library_static {
-			name: "libclang_rt.ubsan_minimal-aarch64-android",
-			defaults: ["toolchain_libs_defaults"],
-		}
-
-		cc_prebuilt_library_static {
-			name: "libclang_rt.ubsan_minimal-arm-android",
+			name: "libclang_rt.ubsan_minimal",
 			defaults: ["toolchain_libs_defaults"],
 		}
 
diff --git a/cc/vendor_snapshot.go b/cc/vendor_snapshot.go
index 8a17e2e..e7c05ac 100644
--- a/cc/vendor_snapshot.go
+++ b/cc/vendor_snapshot.go
@@ -146,6 +146,7 @@
 	// binary flags
 	Symlinks         []string `json:",omitempty"`
 	StaticExecutable bool     `json:",omitempty"`
+	InstallInRoot    bool     `json:",omitempty"`
 
 	// dependencies
 	SharedLibs  []string `json:",omitempty"`
@@ -320,6 +321,7 @@
 			// binary flags
 			prop.Symlinks = m.Symlinks()
 			prop.StaticExecutable = m.StaticExecutable()
+			prop.InstallInRoot = m.InstallInRoot()
 			prop.SharedLibs = m.SnapshotSharedLibs()
 			// static libs dependencies are required to collect the NOTICE files.
 			prop.StaticLibs = m.SnapshotStaticLibs()
diff --git a/cc/vendor_snapshot_test.go b/cc/vendor_snapshot_test.go
index 645b2cc..2bb43ab 100644
--- a/cc/vendor_snapshot_test.go
+++ b/cc/vendor_snapshot_test.go
@@ -741,6 +741,7 @@
 				src: "bin",
 			},
 		},
+		symlinks: ["binfoo", "binbar"],
 	}
 
 	vendor_snapshot_binary {
@@ -920,7 +921,21 @@
 	ctx.ModuleForTests("libvendor_without_snapshot", sharedVariant).Output("libvendor_without_snapshot.so")
 
 	// bin is installed by bin.vendor_binary.31.arm64
-	ctx.ModuleForTests("bin.vendor_binary.31.arm64", binaryVariant).Output("bin")
+	bin64Module := ctx.ModuleForTests("bin.vendor_binary.31.arm64", binaryVariant)
+	bin64Module.Output("bin")
+
+	// also test symlinks
+	bin64MkEntries := android.AndroidMkEntriesForTest(t, ctx, bin64Module.Module())
+	bin64KatiSymlinks := bin64MkEntries[0].EntryMap["LOCAL_SOONG_INSTALL_SYMLINKS"]
+
+	// Either AndroidMk entries contain symlinks, or symlinks should be installed by Soong
+	for _, symlink := range []string{"binfoo", "binbar"} {
+		if inList(symlink, bin64KatiSymlinks) {
+			continue
+		}
+
+		bin64Module.Output(symlink)
+	}
 
 	// bin32 is installed by bin32.vendor_binary.31.arm64
 	ctx.ModuleForTests("bin32.vendor_binary.31.arm64", binary32Variant).Output("bin32")
diff --git a/cc/vndk.go b/cc/vndk.go
index c9c9f2c..bf6148b 100644
--- a/cc/vndk.go
+++ b/cc/vndk.go
@@ -450,7 +450,7 @@
 // Therefore, by removing the library here, we cause it to only be installed if libc
 // depends on it.
 func llndkLibrariesTxtFactory() android.SingletonModule {
-	return newVndkLibrariesWithMakeVarFilter(llndkLibraries, "LLNDK_LIBRARIES", "libclang_rt.hwasan-")
+	return newVndkLibrariesWithMakeVarFilter(llndkLibraries, "LLNDK_LIBRARIES", "libclang_rt.hwasan")
 }
 
 // vndksp_libraries_txt is a singleton module whose content is a list of VNDKSP libraries
diff --git a/cmd/extract_linker/main.go b/cmd/extract_linker/main.go
index 5603b41..aaca1dd 100644
--- a/cmd/extract_linker/main.go
+++ b/cmd/extract_linker/main.go
@@ -116,7 +116,7 @@
 
 	// Discard the PT_INTERP section so that the linker doesn't need to be passed the
 	// --no-dynamic-linker flag.
-	fmt.Println(script, "    /DISCARD/ : { *(.interp) }")
+	fmt.Fprintln(script, "  /DISCARD/ : { *(.interp) }")
 
 	fmt.Fprintln(script, "}")
 	fmt.Fprintln(script, "INSERT BEFORE .note.android.embedded_linker;")
diff --git a/cmd/go2bp/go2bp.go b/cmd/go2bp/go2bp.go
index 07cb5df..fb5a746 100644
--- a/cmd/go2bp/go2bp.go
+++ b/cmd/go2bp/go2bp.go
@@ -335,12 +335,15 @@
 	}
 
 	cmd := exec.Command("go", "list", "-json", "./...")
-	output, err := cmd.Output()
-	if err != nil {
-		fmt.Fprintf(os.Stderr, "Failed to dump the go packages: %v\n", err)
+	var stdoutb, stderrb bytes.Buffer
+	cmd.Stdout = &stdoutb
+	cmd.Stderr = &stderrb
+	if err := cmd.Run(); err != nil {
+		fmt.Fprintf(os.Stderr, "Running %q to dump the Go packages failed: %v, stderr:\n%s\n",
+			cmd.String(), err, stderrb.Bytes())
 		os.Exit(1)
 	}
-	decoder := json.NewDecoder(bytes.NewReader(output))
+	decoder := json.NewDecoder(bytes.NewReader(stdoutb.Bytes()))
 
 	pkgs := []*GoPackage{}
 	pkgMap := map[string]*GoPackage{}
diff --git a/cmd/sbox/sbox.go b/cmd/sbox/sbox.go
index 4fa7486..4f7451d 100644
--- a/cmd/sbox/sbox.go
+++ b/cmd/sbox/sbox.go
@@ -38,9 +38,11 @@
 )
 
 var (
-	sandboxesRoot string
-	manifestFile  string
-	keepOutDir    bool
+	sandboxesRoot  string
+	outputDir      string
+	manifestFile   string
+	keepOutDir     bool
+	writeIfChanged bool
 )
 
 const (
@@ -51,10 +53,14 @@
 func init() {
 	flag.StringVar(&sandboxesRoot, "sandbox-path", "",
 		"root of temp directory to put the sandbox into")
+	flag.StringVar(&outputDir, "output-dir", "",
+		"directory which will contain all output files and only output files")
 	flag.StringVar(&manifestFile, "manifest", "",
 		"textproto manifest describing the sandboxed command(s)")
 	flag.BoolVar(&keepOutDir, "keep-out-dir", false,
 		"whether to keep the sandbox directory when done")
+	flag.BoolVar(&writeIfChanged, "write-if-changed", false,
+		"only write the output files if they have changed")
 }
 
 func usageViolation(violation string) {
@@ -197,23 +203,19 @@
 // createCommandScript will create and return an exec.Cmd that runs rawCommand.
 //
 // rawCommand is executed via a script in the sandbox.
-// tempDir is the temporary where the script is created.
-// toDirInSandBox is the path containing the script in the sbox environment.
-// toDirInSandBox is the path containing the script in the sbox environment.
-// seed is a unique integer used to distinguish different scripts that might be at location.
+// scriptPath is the temporary where the script is created.
+// scriptPathInSandbox is the path to the script in the sbox environment.
 //
 // returns an exec.Cmd that can be ran from within sbox context if no error, or nil if error.
 // caller must ensure script is cleaned up if function succeeds.
 //
-func createCommandScript(rawCommand string, tempDir, toDirInSandbox string, seed int) (*exec.Cmd, error) {
-	scriptName := fmt.Sprintf("sbox_command.%d.bash", seed)
-	scriptPathAndName := joinPath(tempDir, scriptName)
-	err := os.WriteFile(scriptPathAndName, []byte(rawCommand), 0644)
+func createCommandScript(rawCommand, scriptPath, scriptPathInSandbox string) (*exec.Cmd, error) {
+	err := os.WriteFile(scriptPath, []byte(rawCommand), 0644)
 	if err != nil {
 		return nil, fmt.Errorf("failed to write command %s... to %s",
-			rawCommand[0:40], scriptPathAndName)
+			rawCommand[0:40], scriptPath)
 	}
-	return exec.Command("bash", joinPath(toDirInSandbox, filepath.Base(scriptName))), nil
+	return exec.Command("bash", scriptPathInSandbox), nil
 }
 
 // readManifest reads an sbox manifest from a textproto file.
@@ -241,6 +243,12 @@
 		return "", fmt.Errorf("command is required")
 	}
 
+	// Remove files from the output directory
+	err = clearOutputDirectory(command.CopyAfter, outputDir, writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
+
 	pathToTempDirInSbox := tempDir
 	if command.GetChdir() {
 		pathToTempDirInSbox = "."
@@ -252,7 +260,7 @@
 	}
 
 	// Copy in any files specified by the manifest.
-	err = copyFiles(command.CopyBefore, "", tempDir, false)
+	err = copyFiles(command.CopyBefore, "", tempDir, requireFromExists, alwaysWrite)
 	if err != nil {
 		return "", err
 	}
@@ -277,7 +285,10 @@
 		return "", err
 	}
 
-	cmd, err := createCommandScript(rawCommand, tempDir, pathToTempDirInSbox, commandIndex)
+	scriptName := fmt.Sprintf("sbox_command.%d.bash", commandIndex)
+	scriptPath := joinPath(tempDir, scriptName)
+	scriptPathInSandbox := joinPath(pathToTempDirInSbox, scriptName)
+	cmd, err := createCommandScript(rawCommand, scriptPath, scriptPathInSandbox)
 	if err != nil {
 		return "", err
 	}
@@ -306,7 +317,7 @@
 		// especially useful for linters with baselines that print an error message on failure
 		// with a command to copy the output lint errors to the new baseline.  Use a copy instead of
 		// a move to leave the sandbox intact for manual inspection
-		copyFiles(command.CopyAfter, tempDir, "", true)
+		copyFiles(command.CopyAfter, tempDir, "", allowFromNotExists, writeType(writeIfChanged))
 	}
 
 	// If the command  was executed but failed with an error, print a debugging message before
@@ -315,9 +326,9 @@
 		fmt.Fprintf(os.Stderr,
 			"The failing command was run inside an sbox sandbox in temporary directory\n"+
 				"%s\n"+
-				"The failing command line was:\n"+
+				"The failing command line can be found in\n"+
 				"%s\n",
-			tempDir, rawCommand)
+			tempDir, scriptPath)
 	}
 
 	// Write the command's combined stdout/stderr.
@@ -327,39 +338,16 @@
 		return "", err
 	}
 
-	missingOutputErrors := validateOutputFiles(command.CopyAfter, tempDir)
-
-	if len(missingOutputErrors) > 0 {
-		// find all created files for making a more informative error message
-		createdFiles := findAllFilesUnder(tempDir)
-
-		// build error message
-		errorMessage := "mismatch between declared and actual outputs\n"
-		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
-		errorMessage += "in sandbox " + tempDir + ",\n"
-		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
-		for _, missingOutputError := range missingOutputErrors {
-			errorMessage += "  " + missingOutputError.Error() + "\n"
-		}
-		if len(createdFiles) < 1 {
-			errorMessage += "created 0 files."
-		} else {
-			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
-			creationMessages := createdFiles
-			maxNumCreationLines := 10
-			if len(creationMessages) > maxNumCreationLines {
-				creationMessages = creationMessages[:maxNumCreationLines]
-				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxNumCreationLines))
-			}
-			for _, creationMessage := range creationMessages {
-				errorMessage += "  " + creationMessage + "\n"
-			}
-		}
-
-		return "", errors.New(errorMessage)
+	err = validateOutputFiles(command.CopyAfter, tempDir, outputDir, rawCommand)
+	if err != nil {
+		return "", err
 	}
+
 	// the created files match the declared files; now move them
-	err = moveFiles(command.CopyAfter, tempDir, "")
+	err = moveFiles(command.CopyAfter, tempDir, "", writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
 
 	return depFile, nil
 }
@@ -380,8 +368,9 @@
 
 // validateOutputFiles verifies that all files that have a rule to be copied out of the sandbox
 // were created by the command.
-func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir string) []error {
+func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir, outputDir, rawCommand string) error {
 	var missingOutputErrors []error
+	var incorrectOutputDirectoryErrors []error
 	for _, copyPair := range copies {
 		fromPath := joinPath(sandboxDir, copyPair.GetFrom())
 		fileInfo, err := os.Stat(fromPath)
@@ -392,17 +381,91 @@
 		if fileInfo.IsDir() {
 			missingOutputErrors = append(missingOutputErrors, fmt.Errorf("%s: not a file", fromPath))
 		}
+
+		toPath := copyPair.GetTo()
+		if rel, err := filepath.Rel(outputDir, toPath); err != nil {
+			return err
+		} else if strings.HasPrefix(rel, "../") {
+			incorrectOutputDirectoryErrors = append(incorrectOutputDirectoryErrors,
+				fmt.Errorf("%s is not under %s", toPath, outputDir))
+		}
 	}
-	return missingOutputErrors
+
+	const maxErrors = 10
+
+	if len(incorrectOutputDirectoryErrors) > 0 {
+		errorMessage := ""
+		more := 0
+		if len(incorrectOutputDirectoryErrors) > maxErrors {
+			more = len(incorrectOutputDirectoryErrors) - maxErrors
+			incorrectOutputDirectoryErrors = incorrectOutputDirectoryErrors[:maxErrors]
+		}
+
+		for _, err := range incorrectOutputDirectoryErrors {
+			errorMessage += err.Error() + "\n"
+		}
+		if more > 0 {
+			errorMessage += fmt.Sprintf("...%v more", more)
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	if len(missingOutputErrors) > 0 {
+		// find all created files for making a more informative error message
+		createdFiles := findAllFilesUnder(sandboxDir)
+
+		// build error message
+		errorMessage := "mismatch between declared and actual outputs\n"
+		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
+		errorMessage += "in sandbox " + sandboxDir + ",\n"
+		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
+		for _, missingOutputError := range missingOutputErrors {
+			errorMessage += "  " + missingOutputError.Error() + "\n"
+		}
+		if len(createdFiles) < 1 {
+			errorMessage += "created 0 files."
+		} else {
+			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
+			creationMessages := createdFiles
+			if len(creationMessages) > maxErrors {
+				creationMessages = creationMessages[:maxErrors]
+				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxErrors))
+			}
+			for _, creationMessage := range creationMessages {
+				errorMessage += "  " + creationMessage + "\n"
+			}
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	return nil
 }
 
-// copyFiles copies files in or out of the sandbox.  If allowFromNotExists is true then errors
-// caused by a from path not existing are ignored.
-func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, allowFromNotExists bool) error {
+type existsType bool
+
+const (
+	requireFromExists  existsType = false
+	allowFromNotExists            = true
+)
+
+type writeType bool
+
+const (
+	alwaysWrite        writeType = false
+	onlyWriteIfChanged           = true
+)
+
+// copyFiles copies files in or out of the sandbox.  If exists is allowFromNotExists then errors
+// caused by a from path not existing are ignored.  If write is onlyWriteIfChanged then the output
+// file is compared to the input file and not written to if it is the same, avoiding updating
+// the timestamp.
+func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, exists existsType, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
-		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), allowFromNotExists)
+		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), exists, write)
 		if err != nil {
 			return fmt.Errorf("error copying %q to %q: %w", fromPath, toPath, err)
 		}
@@ -411,8 +474,11 @@
 }
 
 // copyOneFile copies a file and its permissions.  If forceExecutable is true it adds u+x to the
-// permissions.  If allowFromNotExists is true it returns nil if the from path doesn't exist.
-func copyOneFile(from string, to string, forceExecutable, allowFromNotExists bool) error {
+// permissions.  If exists is allowFromNotExists it returns nil if the from path doesn't exist.
+// If write is onlyWriteIfChanged then the output file is compared to the input file and not written to
+// if it is the same, avoiding updating the timestamp.
+func copyOneFile(from string, to string, forceExecutable bool, exists existsType,
+	write writeType) error {
 	err := os.MkdirAll(filepath.Dir(to), 0777)
 	if err != nil {
 		return err
@@ -420,7 +486,7 @@
 
 	stat, err := os.Stat(from)
 	if err != nil {
-		if os.IsNotExist(err) && allowFromNotExists {
+		if os.IsNotExist(err) && exists == allowFromNotExists {
 			return nil
 		}
 		return err
@@ -431,6 +497,10 @@
 		perm = perm | 0100 // u+x
 	}
 
+	if write == onlyWriteIfChanged && filesHaveSameContents(from, to) {
+		return nil
+	}
+
 	in, err := os.Open(from)
 	if err != nil {
 		return err
@@ -504,7 +574,7 @@
 		to := applyPathMappings(rspFile.PathMappings, from)
 
 		// Copy the file into the sandbox.
-		err := copyOneFile(from, joinPath(toDir, to), false, false)
+		err := copyOneFile(from, joinPath(toDir, to), false, requireFromExists, alwaysWrite)
 		if err != nil {
 			return err
 		}
@@ -551,9 +621,10 @@
 
 // moveFiles moves files specified by a set of copy rules.  It uses os.Rename, so it is restricted
 // to moving files where the source and destination are in the same filesystem.  This is OK for
-// sbox because the temporary directory is inside the out directory.  It updates the timestamp
-// of the new file.
-func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string) error {
+// sbox because the temporary directory is inside the out directory.  If write is onlyWriteIfChanged
+// then the output file is compared to the input file and not written to if it is the same, avoiding
+// updating the timestamp.  Otherwise it always updates the timestamp of the new file.
+func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
@@ -562,6 +633,10 @@
 			return err
 		}
 
+		if write == onlyWriteIfChanged && filesHaveSameContents(fromPath, toPath) {
+			continue
+		}
+
 		err = os.Rename(fromPath, toPath)
 		if err != nil {
 			return err
@@ -578,6 +653,37 @@
 	return nil
 }
 
+// clearOutputDirectory removes all files in the output directory if write is alwaysWrite, or
+// any files not listed in copies if write is onlyWriteIfChanged
+func clearOutputDirectory(copies []*sbox_proto.Copy, outputDir string, write writeType) error {
+	if outputDir == "" {
+		return fmt.Errorf("output directory must be set")
+	}
+
+	if write == alwaysWrite {
+		// When writing all the output files remove the whole output directory
+		return os.RemoveAll(outputDir)
+	}
+
+	outputFiles := make(map[string]bool, len(copies))
+	for _, copyPair := range copies {
+		outputFiles[copyPair.GetTo()] = true
+	}
+
+	existingFiles := findAllFilesUnder(outputDir)
+	for _, existingFile := range existingFiles {
+		fullExistingFile := filepath.Join(outputDir, existingFile)
+		if !outputFiles[fullExistingFile] {
+			err := os.Remove(fullExistingFile)
+			if err != nil {
+				return fmt.Errorf("failed to remove obsolete output file %s: %w", fullExistingFile, err)
+			}
+		}
+	}
+
+	return nil
+}
+
 // Rewrite one or more depfiles so that it doesn't include the (randomized) sandbox directory
 // to an output file.
 func rewriteDepFiles(ins []string, out string) error {
@@ -621,6 +727,66 @@
 	return filepath.Join(dir, file)
 }
 
+// filesHaveSameContents compares the contents if two files, returning true if they are the same
+// and returning false if they are different or any errors occur.
+func filesHaveSameContents(a, b string) bool {
+	// Compare the sizes of the two files
+	statA, err := os.Stat(a)
+	if err != nil {
+		return false
+	}
+	statB, err := os.Stat(b)
+	if err != nil {
+		return false
+	}
+
+	if statA.Size() != statB.Size() {
+		return false
+	}
+
+	// Open the two files
+	fileA, err := os.Open(a)
+	if err != nil {
+		return false
+	}
+	defer fileA.Close()
+	fileB, err := os.Open(b)
+	if err != nil {
+		return false
+	}
+	defer fileB.Close()
+
+	// Compare the files 1MB at a time
+	const bufSize = 1 * 1024 * 1024
+	bufA := make([]byte, bufSize)
+	bufB := make([]byte, bufSize)
+
+	remain := statA.Size()
+	for remain > 0 {
+		toRead := int64(bufSize)
+		if toRead > remain {
+			toRead = remain
+		}
+
+		_, err = io.ReadFull(fileA, bufA[:toRead])
+		if err != nil {
+			return false
+		}
+		_, err = io.ReadFull(fileB, bufB[:toRead])
+		if err != nil {
+			return false
+		}
+
+		if bytes.Compare(bufA[:toRead], bufB[:toRead]) != 0 {
+			return false
+		}
+
+		remain -= toRead
+	}
+
+	return true
+}
+
 func makeAbsPathEnv(pathEnv string) (string, error) {
 	pathEnvElements := filepath.SplitList(pathEnv)
 	for i, p := range pathEnvElements {
diff --git a/cmd/sbox/sbox_test.go b/cmd/sbox/sbox_test.go
new file mode 100644
index 0000000..3f13d2f
--- /dev/null
+++ b/cmd/sbox/sbox_test.go
@@ -0,0 +1,127 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+)
+
+func Test_filesHaveSameContents(t *testing.T) {
+
+	tests := []struct {
+		name     string
+		a        string
+		b        string
+		missingA bool
+		missingB bool
+
+		equal bool
+	}{
+		{
+			name:  "empty",
+			a:     "",
+			b:     "",
+			equal: true,
+		},
+		{
+			name:  "equal",
+			a:     "foo",
+			b:     "foo",
+			equal: true,
+		},
+		{
+			name:  "unequal",
+			a:     "foo",
+			b:     "bar",
+			equal: false,
+		},
+		{
+			name:  "unequal different sizes",
+			a:     "foo",
+			b:     "foobar",
+			equal: false,
+		},
+		{
+			name:  "equal large",
+			a:     strings.Repeat("a", 2*1024*1024),
+			b:     strings.Repeat("a", 2*1024*1024),
+			equal: true,
+		},
+		{
+			name:  "equal large unaligned",
+			a:     strings.Repeat("a", 2*1024*1024+10),
+			b:     strings.Repeat("a", 2*1024*1024+10),
+			equal: true,
+		},
+		{
+			name:  "unequal large",
+			a:     strings.Repeat("a", 2*1024*1024),
+			b:     strings.Repeat("a", 2*1024*1024-1) + "b",
+			equal: false,
+		},
+		{
+			name:  "unequal large unaligned",
+			a:     strings.Repeat("a", 2*1024*1024+10),
+			b:     strings.Repeat("a", 2*1024*1024+9) + "b",
+			equal: false,
+		},
+		{
+			name:     "missing a",
+			missingA: true,
+			b:        "foo",
+			equal:    false,
+		},
+		{
+			name:     "missing b",
+			a:        "foo",
+			missingB: true,
+			equal:    false,
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			tempDir, err := os.MkdirTemp("", "testFilesHaveSameContents")
+			if err != nil {
+				t.Fatalf("failed to create temp dir: %s", err)
+			}
+			defer os.RemoveAll(tempDir)
+
+			fileA := filepath.Join(tempDir, "a")
+			fileB := filepath.Join(tempDir, "b")
+
+			if !tt.missingA {
+				err := ioutil.WriteFile(fileA, []byte(tt.a), 0666)
+				if err != nil {
+					t.Fatalf("failed to write %s: %s", fileA, err)
+				}
+			}
+
+			if !tt.missingB {
+				err := ioutil.WriteFile(fileB, []byte(tt.b), 0666)
+				if err != nil {
+					t.Fatalf("failed to write %s: %s", fileB, err)
+				}
+			}
+
+			if got := filesHaveSameContents(fileA, fileB); got != tt.equal {
+				t.Errorf("filesHaveSameContents() = %v, want %v", got, tt.equal)
+			}
+		})
+	}
+}
diff --git a/cmd/soong_build/Android.bp b/cmd/soong_build/Android.bp
index e85163e..72af3e0 100644
--- a/cmd/soong_build/Android.bp
+++ b/cmd/soong_build/Android.bp
@@ -25,6 +25,7 @@
         "golang-protobuf-android",
         "soong",
         "soong-android",
+        "soong-provenance",
         "soong-bp2build",
         "soong-ui-metrics_proto",
     ],
diff --git a/cmd/soong_build/main.go b/cmd/soong_build/main.go
index b3a6ee0..4b3161b 100644
--- a/cmd/soong_build/main.go
+++ b/cmd/soong_build/main.go
@@ -26,10 +26,11 @@
 	"android/soong/android"
 	"android/soong/bp2build"
 	"android/soong/shared"
+	"android/soong/ui/metrics/bp2build_metrics_proto"
 
 	"github.com/google/blueprint/bootstrap"
 	"github.com/google/blueprint/deptools"
-	"github.com/google/blueprint/pathtools"
+	"github.com/google/blueprint/metrics"
 	androidProtobuf "google.golang.org/protobuf/android"
 )
 
@@ -134,8 +135,14 @@
 // TODO(cparsons): Don't output any ninja file, as the second pass will overwrite
 // the incorrect results from the first pass, and file I/O is expensive.
 func runMixedModeBuild(configuration android.Config, firstCtx *android.Context, extraNinjaDeps []string) {
-	bootstrap.RunBlueprint(cmdlineArgs, bootstrap.StopBeforeWriteNinja, firstCtx.Context, configuration)
+	firstCtx.EventHandler.Begin("mixed_build")
+	defer firstCtx.EventHandler.End("mixed_build")
 
+	firstCtx.EventHandler.Begin("prepare")
+	bootstrap.RunBlueprint(cmdlineArgs, bootstrap.StopBeforeWriteNinja, firstCtx.Context, configuration)
+	firstCtx.EventHandler.End("prepare")
+
+	firstCtx.EventHandler.Begin("bazel")
 	// Invoke bazel commands and save results for second pass.
 	if err := configuration.BazelContext.InvokeBazel(); err != nil {
 		fmt.Fprintf(os.Stderr, "%s", err)
@@ -147,18 +154,25 @@
 		fmt.Fprintf(os.Stderr, "%s", err)
 		os.Exit(1)
 	}
+	firstCtx.EventHandler.End("bazel")
+
 	secondCtx := newContext(secondConfig)
+	secondCtx.EventHandler = firstCtx.EventHandler
+	secondCtx.EventHandler.Begin("analyze")
 	ninjaDeps := bootstrap.RunBlueprint(cmdlineArgs, bootstrap.DoEverything, secondCtx.Context, secondConfig)
 	ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
+	secondCtx.EventHandler.End("analyze")
 
-	globListFiles := writeBuildGlobsNinjaFile(secondCtx.SrcDir(), configuration.SoongOutDir(), secondCtx.Globs, configuration)
+	globListFiles := writeBuildGlobsNinjaFile(secondCtx, configuration.SoongOutDir(), configuration)
 	ninjaDeps = append(ninjaDeps, globListFiles...)
 
-	writeDepFile(cmdlineArgs.OutFile, ninjaDeps)
+	writeDepFile(cmdlineArgs.OutFile, *secondCtx.EventHandler, ninjaDeps)
 }
 
 // Run the code-generation phase to convert BazelTargetModules to BUILD files.
 func runQueryView(queryviewDir, queryviewMarker string, configuration android.Config, ctx *android.Context) {
+	ctx.EventHandler.Begin("queryview")
+	defer ctx.EventHandler.End("queryview")
 	codegenContext := bp2build.NewCodegenContext(configuration, *ctx, bp2build.QueryView)
 	absoluteQueryViewDir := shared.JoinPath(topDir, queryviewDir)
 	if err := createBazelQueryView(codegenContext, absoluteQueryViewDir); err != nil {
@@ -169,9 +183,14 @@
 	touch(shared.JoinPath(topDir, queryviewMarker))
 }
 
-func writeMetrics(configuration android.Config) {
-	metricsFile := filepath.Join(configuration.SoongOutDir(), "soong_build_metrics.pb")
-	err := android.WriteMetrics(configuration, metricsFile)
+func writeMetrics(configuration android.Config, eventHandler metrics.EventHandler) {
+	metricsDir := configuration.Getenv("LOG_DIR")
+	if len(metricsDir) < 1 {
+		fmt.Fprintf(os.Stderr, "\nMissing required env var for generating soong metrics: LOG_DIR\n")
+		os.Exit(1)
+	}
+	metricsFile := filepath.Join(metricsDir, "soong_build_metrics.pb")
+	err := android.WriteMetrics(configuration, eventHandler, metricsFile)
 	if err != nil {
 		fmt.Fprintf(os.Stderr, "error writing soong_build metrics %s: %s", metricsFile, err)
 		os.Exit(1)
@@ -191,18 +210,23 @@
 	ctx.Context.PrintJSONGraphAndActions(graphFile, actionsFile)
 }
 
-func writeBuildGlobsNinjaFile(srcDir, buildDir string, globs func() pathtools.MultipleGlobResults, config interface{}) []string {
+func writeBuildGlobsNinjaFile(ctx *android.Context, buildDir string, config interface{}) []string {
+	ctx.EventHandler.Begin("globs_ninja_file")
+	defer ctx.EventHandler.End("globs_ninja_file")
+
 	globDir := bootstrap.GlobDirectory(buildDir, globListDir)
 	bootstrap.WriteBuildGlobsNinjaFile(&bootstrap.GlobSingleton{
-		GlobLister: globs,
+		GlobLister: ctx.Globs,
 		GlobFile:   globFile,
 		GlobDir:    globDir,
-		SrcDir:     srcDir,
+		SrcDir:     ctx.SrcDir(),
 	}, config)
 	return bootstrap.GlobFileListFiles(globDir)
 }
 
-func writeDepFile(outputFile string, ninjaDeps []string) {
+func writeDepFile(outputFile string, eventHandler metrics.EventHandler, ninjaDeps []string) {
+	eventHandler.Begin("ninja_deps")
+	defer eventHandler.End("ninja_deps")
 	depFile := shared.JoinPath(topDir, outputFile+".d")
 	err := deptools.WriteDepFile(depFile, outputFile, ninjaDeps)
 	if err != nil {
@@ -230,36 +254,36 @@
 
 	blueprintArgs := cmdlineArgs
 
-	var stopBefore bootstrap.StopBefore
-	if generateModuleGraphFile {
-		stopBefore = bootstrap.StopBeforeWriteNinja
-	} else if generateQueryView {
-		stopBefore = bootstrap.StopBeforePrepareBuildActions
-	} else if generateDocFile {
-		stopBefore = bootstrap.StopBeforePrepareBuildActions
-	} else {
-		stopBefore = bootstrap.DoEverything
-	}
-
 	ctx := newContext(configuration)
 	if mixedModeBuild {
 		runMixedModeBuild(configuration, ctx, extraNinjaDeps)
 	} else {
+		var stopBefore bootstrap.StopBefore
+		if generateModuleGraphFile {
+			stopBefore = bootstrap.StopBeforeWriteNinja
+		} else if generateQueryView {
+			stopBefore = bootstrap.StopBeforePrepareBuildActions
+		} else if generateDocFile {
+			stopBefore = bootstrap.StopBeforePrepareBuildActions
+		} else {
+			stopBefore = bootstrap.DoEverything
+		}
+
 		ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, stopBefore, ctx.Context, configuration)
 		ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
 
-		globListFiles := writeBuildGlobsNinjaFile(ctx.SrcDir(), configuration.SoongOutDir(), ctx.Globs, configuration)
+		globListFiles := writeBuildGlobsNinjaFile(ctx, configuration.SoongOutDir(), configuration)
 		ninjaDeps = append(ninjaDeps, globListFiles...)
 
 		// Convert the Soong module graph into Bazel BUILD files.
 		if generateQueryView {
 			queryviewMarkerFile := bazelQueryViewDir + ".marker"
 			runQueryView(bazelQueryViewDir, queryviewMarkerFile, configuration, ctx)
-			writeDepFile(queryviewMarkerFile, ninjaDeps)
+			writeDepFile(queryviewMarkerFile, *ctx.EventHandler, ninjaDeps)
 			return queryviewMarkerFile
 		} else if generateModuleGraphFile {
 			writeJsonModuleGraphAndActions(ctx, moduleGraphFile, moduleActionsFile)
-			writeDepFile(moduleGraphFile, ninjaDeps)
+			writeDepFile(moduleGraphFile, *ctx.EventHandler, ninjaDeps)
 			return moduleGraphFile
 		} else if generateDocFile {
 			// TODO: we could make writeDocs() return the list of documentation files
@@ -269,16 +293,16 @@
 				fmt.Fprintf(os.Stderr, "error building Soong documentation: %s\n", err)
 				os.Exit(1)
 			}
-			writeDepFile(docFile, ninjaDeps)
+			writeDepFile(docFile, *ctx.EventHandler, ninjaDeps)
 			return docFile
 		} else {
 			// The actual output (build.ninja) was written in the RunBlueprint() call
 			// above
-			writeDepFile(cmdlineArgs.OutFile, ninjaDeps)
+			writeDepFile(cmdlineArgs.OutFile, *ctx.EventHandler, ninjaDeps)
 		}
 	}
 
-	writeMetrics(configuration)
+	writeMetrics(configuration, *ctx.EventHandler)
 	return cmdlineArgs.OutFile
 }
 
@@ -335,6 +359,7 @@
 	}
 
 	finalOutputFile := doChosenActivity(configuration, extraNinjaDeps)
+
 	writeUsedEnvironmentFile(configuration, finalOutputFile)
 }
 
@@ -466,6 +491,9 @@
 // an alternate pipeline of mutators and singletons specifically for generating
 // Bazel BUILD files instead of Ninja files.
 func runBp2Build(configuration android.Config, extraNinjaDeps []string) {
+	eventHandler := metrics.EventHandler{}
+	eventHandler.Begin("bp2build")
+
 	// Register an alternate set of singletons and mutators for bazel
 	// conversion for Bazel conversion.
 	bp2buildCtx := android.NewContext(configuration)
@@ -500,7 +528,7 @@
 	ninjaDeps := bootstrap.RunBlueprint(blueprintArgs, bootstrap.StopBeforePrepareBuildActions, bp2buildCtx.Context, configuration)
 	ninjaDeps = append(ninjaDeps, extraNinjaDeps...)
 
-	globListFiles := writeBuildGlobsNinjaFile(bp2buildCtx.SrcDir(), configuration.SoongOutDir(), bp2buildCtx.Globs, configuration)
+	globListFiles := writeBuildGlobsNinjaFile(bp2buildCtx, configuration.SoongOutDir(), configuration)
 	ninjaDeps = append(ninjaDeps, globListFiles...)
 
 	// Run the code-generation phase to convert BazelTargetModules to BUILD files
@@ -537,27 +565,38 @@
 	symlinkForestDeps := bp2build.PlantSymlinkForest(
 		topDir, workspaceRoot, generatedRoot, ".", excludes)
 
+	ninjaDeps = append(ninjaDeps, codegenContext.AdditionalNinjaDeps()...)
+	ninjaDeps = append(ninjaDeps, symlinkForestDeps...)
+
+	writeDepFile(bp2buildMarker, eventHandler, ninjaDeps)
+
+	// Create an empty bp2build marker file.
+	touch(shared.JoinPath(topDir, bp2buildMarker))
+
+	eventHandler.End("bp2build")
+
 	// Only report metrics when in bp2build mode. The metrics aren't relevant
 	// for queryview, since that's a total repo-wide conversion and there's a
 	// 1:1 mapping for each module.
 	metrics.Print()
-	writeBp2BuildMetrics(&metrics, configuration)
-
-	ninjaDeps = append(ninjaDeps, codegenContext.AdditionalNinjaDeps()...)
-	ninjaDeps = append(ninjaDeps, symlinkForestDeps...)
-
-	writeDepFile(bp2buildMarker, ninjaDeps)
-
-	// Create an empty bp2build marker file.
-	touch(shared.JoinPath(topDir, bp2buildMarker))
+	writeBp2BuildMetrics(&metrics, configuration, eventHandler)
 }
 
 // Write Bp2Build metrics into $LOG_DIR
-func writeBp2BuildMetrics(metrics *bp2build.CodegenMetrics, configuration android.Config) {
+func writeBp2BuildMetrics(codegenMetrics *bp2build.CodegenMetrics,
+	configuration android.Config, eventHandler metrics.EventHandler) {
+	for _, event := range eventHandler.CompletedEvents() {
+		codegenMetrics.Events = append(codegenMetrics.Events,
+			&bp2build_metrics_proto.Event{
+				Name:      event.Id,
+				StartTime: uint64(event.Start.UnixNano()),
+				RealTime:  event.RuntimeNanoseconds(),
+			})
+	}
 	metricsDir := configuration.Getenv("LOG_DIR")
 	if len(metricsDir) < 1 {
 		fmt.Fprintf(os.Stderr, "\nMissing required env var for generating bp2build metrics: LOG_DIR\n")
 		os.Exit(1)
 	}
-	metrics.Write(metricsDir)
+	codegenMetrics.Write(metricsDir)
 }
diff --git a/cmd/soong_ui/main.go b/cmd/soong_ui/main.go
index a0cfbea..a03a86a 100644
--- a/cmd/soong_ui/main.go
+++ b/cmd/soong_ui/main.go
@@ -221,7 +221,6 @@
 		}
 		defer build.UploadMetrics(buildCtx, config, c.simpleOutput, buildStarted, files...)
 		defer met.Dump(soongMetricsFile)
-		defer build.DumpRBEMetrics(buildCtx, config, rbeMetricsFile)
 	}
 
 	// Read the time at the starting point.
diff --git a/cmd/symbols_map/Android.bp b/cmd/symbols_map/Android.bp
new file mode 100644
index 0000000..0ba3b07
--- /dev/null
+++ b/cmd/symbols_map/Android.bp
@@ -0,0 +1,34 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+blueprint_go_binary {
+    name: "symbols_map",
+    srcs: [
+        "elf.go",
+        "r8.go",
+        "symbols_map.go",
+    ],
+    testSrcs: [
+        "elf_test.go",
+        "r8_test.go",
+    ],
+    deps: [
+        "blueprint-pathtools",
+        "golang-protobuf-encoding-prototext",
+        "soong-response",
+        "symbols_map_proto",
+    ],
+}
+
+bootstrap_go_package {
+    name: "symbols_map_proto",
+    pkgPath: "android/soong/cmd/symbols_map/symbols_map_proto",
+    deps: [
+        "golang-protobuf-reflect-protoreflect",
+        "golang-protobuf-runtime-protoimpl",
+    ],
+    srcs: [
+        "symbols_map_proto/symbols_map.pb.go",
+    ],
+}
diff --git a/cmd/symbols_map/elf.go b/cmd/symbols_map/elf.go
new file mode 100644
index 0000000..950e3b2
--- /dev/null
+++ b/cmd/symbols_map/elf.go
@@ -0,0 +1,118 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"debug/elf"
+	"encoding/binary"
+	"encoding/hex"
+	"errors"
+	"fmt"
+	"io"
+	"os"
+)
+
+const gnuBuildID = "GNU\x00"
+
+// elfIdentifier extracts the elf build ID from an elf file.  If allowMissing is true it returns
+// an empty identifier if the file exists but the build ID note does not.
+func elfIdentifier(filename string, allowMissing bool) (string, error) {
+	f, err := os.Open(filename)
+	if err != nil {
+		return "", fmt.Errorf("failed to open %s: %w", filename, err)
+	}
+	defer f.Close()
+
+	return elfIdentifierFromReaderAt(f, filename, allowMissing)
+}
+
+// elfIdentifierFromReaderAt extracts the elf build ID from a ReaderAt.  If allowMissing is true it
+// returns an empty identifier if the file exists but the build ID note does not.
+func elfIdentifierFromReaderAt(r io.ReaderAt, filename string, allowMissing bool) (string, error) {
+	f, err := elf.NewFile(r)
+	if err != nil {
+		if allowMissing {
+			if errors.Is(err, io.EOF) || errors.Is(err, io.ErrUnexpectedEOF) {
+				return "", nil
+			}
+			if _, ok := err.(*elf.FormatError); ok {
+				// The file was not an elf file.
+				return "", nil
+			}
+		}
+		return "", fmt.Errorf("failed to parse elf file %s: %w", filename, err)
+	}
+	defer f.Close()
+
+	buildIDNote := f.Section(".note.gnu.build-id")
+	if buildIDNote == nil {
+		if allowMissing {
+			return "", nil
+		}
+		return "", fmt.Errorf("failed to find .note.gnu.build-id in  %s", filename)
+	}
+
+	buildIDs, err := readNote(buildIDNote.Open(), f.ByteOrder)
+	if err != nil {
+		return "", fmt.Errorf("failed to read .note.gnu.build-id: %w", err)
+	}
+
+	for name, desc := range buildIDs {
+		if name == gnuBuildID {
+			return hex.EncodeToString(desc), nil
+		}
+	}
+
+	return "", nil
+}
+
+// readNote reads the contents of a note section, returning it as a map from name to descriptor.
+func readNote(note io.Reader, byteOrder binary.ByteOrder) (map[string][]byte, error) {
+	var noteHeader struct {
+		Namesz uint32
+		Descsz uint32
+		Type   uint32
+	}
+
+	notes := make(map[string][]byte)
+	for {
+		err := binary.Read(note, byteOrder, &noteHeader)
+		if err != nil {
+			if err == io.EOF {
+				return notes, nil
+			}
+			return nil, fmt.Errorf("failed to read note header: %w", err)
+		}
+
+		nameBuf := make([]byte, align4(noteHeader.Namesz))
+		err = binary.Read(note, byteOrder, &nameBuf)
+		if err != nil {
+			return nil, fmt.Errorf("failed to read note name: %w", err)
+		}
+		name := string(nameBuf[:noteHeader.Namesz])
+
+		descBuf := make([]byte, align4(noteHeader.Descsz))
+		err = binary.Read(note, byteOrder, &descBuf)
+		if err != nil {
+			return nil, fmt.Errorf("failed to read note desc: %w", err)
+		}
+		notes[name] = descBuf[:noteHeader.Descsz]
+	}
+}
+
+// align4 rounds the input up to the next multiple of 4.
+func align4(i uint32) uint32 {
+	return (i + 3) &^ 3
+}
diff --git a/cmd/symbols_map/elf_test.go b/cmd/symbols_map/elf_test.go
new file mode 100644
index 0000000..a94c87f
--- /dev/null
+++ b/cmd/symbols_map/elf_test.go
@@ -0,0 +1,152 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"debug/elf"
+	"encoding/binary"
+	"reflect"
+	"testing"
+)
+
+func Test_elfIdentifierFromReaderAt_BadElfFile(t *testing.T) {
+	tests := []struct {
+		name     string
+		contents string
+	}{
+		{
+			name:     "empty",
+			contents: "",
+		},
+		{
+			name:     "text",
+			contents: "#!/bin/bash\necho foobar",
+		},
+		{
+			name:     "empty elf",
+			contents: emptyElfFile(),
+		},
+		{
+			name:     "short section header",
+			contents: shortSectionHeaderElfFile(),
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			buf := bytes.NewReader([]byte(tt.contents))
+			_, err := elfIdentifierFromReaderAt(buf, "<>", false)
+			if err == nil {
+				t.Errorf("expected error reading bad elf file without allowMissing")
+			}
+			_, err = elfIdentifierFromReaderAt(buf, "<>", true)
+			if err != nil {
+				t.Errorf("expected no error reading bad elf file with allowMissing, got %q", err.Error())
+			}
+		})
+	}
+}
+
+func Test_readNote(t *testing.T) {
+	note := []byte{
+		0x04, 0x00, 0x00, 0x00,
+		0x10, 0x00, 0x00, 0x00,
+		0x03, 0x00, 0x00, 0x00,
+		0x47, 0x4e, 0x55, 0x00,
+		0xca, 0xaf, 0x44, 0xd2, 0x82, 0x78, 0x68, 0xfe, 0xc0, 0x90, 0xa3, 0x43, 0x85, 0x36, 0x6c, 0xc7,
+	}
+
+	descs, err := readNote(bytes.NewBuffer(note), binary.LittleEndian)
+	if err != nil {
+		t.Fatalf("unexpected error in readNote: %s", err)
+	}
+
+	expectedDescs := map[string][]byte{
+		"GNU\x00": []byte{0xca, 0xaf, 0x44, 0xd2, 0x82, 0x78, 0x68, 0xfe, 0xc0, 0x90, 0xa3, 0x43, 0x85, 0x36, 0x6c, 0xc7},
+	}
+
+	if !reflect.DeepEqual(descs, expectedDescs) {
+		t.Errorf("incorrect return, want %#v got %#v", expectedDescs, descs)
+	}
+}
+
+// emptyElfFile returns an elf file header with no program headers or sections.
+func emptyElfFile() string {
+	ident := [elf.EI_NIDENT]byte{}
+	identBuf := bytes.NewBuffer(ident[0:0:elf.EI_NIDENT])
+	binary.Write(identBuf, binary.LittleEndian, []byte("\x7fELF"))
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFCLASS64)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFDATA2LSB)
+	binary.Write(identBuf, binary.LittleEndian, elf.EV_CURRENT)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFOSABI_LINUX)
+	binary.Write(identBuf, binary.LittleEndian, make([]byte, 8))
+
+	header := elf.Header64{
+		Ident:     ident,
+		Type:      uint16(elf.ET_EXEC),
+		Machine:   uint16(elf.EM_X86_64),
+		Version:   uint32(elf.EV_CURRENT),
+		Entry:     0,
+		Phoff:     uint64(binary.Size(elf.Header64{})),
+		Shoff:     uint64(binary.Size(elf.Header64{})),
+		Flags:     0,
+		Ehsize:    uint16(binary.Size(elf.Header64{})),
+		Phentsize: 0x38,
+		Phnum:     0,
+		Shentsize: 0x40,
+		Shnum:     0,
+		Shstrndx:  0,
+	}
+
+	buf := &bytes.Buffer{}
+	binary.Write(buf, binary.LittleEndian, header)
+	return buf.String()
+}
+
+// shortSectionHeader returns an elf file header with a section header that extends past the end of
+// the file.
+func shortSectionHeaderElfFile() string {
+	ident := [elf.EI_NIDENT]byte{}
+	identBuf := bytes.NewBuffer(ident[0:0:elf.EI_NIDENT])
+	binary.Write(identBuf, binary.LittleEndian, []byte("\x7fELF"))
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFCLASS64)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFDATA2LSB)
+	binary.Write(identBuf, binary.LittleEndian, elf.EV_CURRENT)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFOSABI_LINUX)
+	binary.Write(identBuf, binary.LittleEndian, make([]byte, 8))
+
+	header := elf.Header64{
+		Ident:     ident,
+		Type:      uint16(elf.ET_EXEC),
+		Machine:   uint16(elf.EM_X86_64),
+		Version:   uint32(elf.EV_CURRENT),
+		Entry:     0,
+		Phoff:     uint64(binary.Size(elf.Header64{})),
+		Shoff:     uint64(binary.Size(elf.Header64{})),
+		Flags:     0,
+		Ehsize:    uint16(binary.Size(elf.Header64{})),
+		Phentsize: 0x38,
+		Phnum:     0,
+		Shentsize: 0x40,
+		Shnum:     1,
+		Shstrndx:  0,
+	}
+
+	buf := &bytes.Buffer{}
+	binary.Write(buf, binary.LittleEndian, header)
+	binary.Write(buf, binary.LittleEndian, []byte{0})
+	return buf.String()
+}
diff --git a/cmd/symbols_map/r8.go b/cmd/symbols_map/r8.go
new file mode 100644
index 0000000..6f73e09
--- /dev/null
+++ b/cmd/symbols_map/r8.go
@@ -0,0 +1,56 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"fmt"
+	"io"
+	"os"
+	"strings"
+)
+
+const hashPrefix = "# pg_map_hash: "
+const hashTypePrefix = "SHA-256 "
+const commentPrefix = "#"
+
+// r8Identifier extracts the hash from the comments of a dictionary produced by R8. It returns
+// an empty identifier if no matching comment was found before the first non-comment line.
+func r8Identifier(filename string) (string, error) {
+	f, err := os.Open(filename)
+	if err != nil {
+		return "", fmt.Errorf("failed to open %s: %w", filename, err)
+	}
+	defer f.Close()
+
+	return extractR8CompilerHash(f)
+}
+
+func extractR8CompilerHash(r io.Reader) (string, error) {
+	s := bufio.NewScanner(r)
+	for s.Scan() {
+		line := s.Text()
+		if strings.HasPrefix(line, hashPrefix) {
+			hash := strings.TrimPrefix(line, hashPrefix)
+			if !strings.HasPrefix(hash, hashTypePrefix) {
+				return "", fmt.Errorf("invalid hash type found in %q", line)
+			}
+			return strings.TrimPrefix(hash, hashTypePrefix), nil
+		} else if !strings.HasPrefix(line, commentPrefix) {
+			break
+		}
+	}
+	return "", nil
+}
diff --git a/cmd/symbols_map/r8_test.go b/cmd/symbols_map/r8_test.go
new file mode 100644
index 0000000..5712da9
--- /dev/null
+++ b/cmd/symbols_map/r8_test.go
@@ -0,0 +1,91 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"strings"
+	"testing"
+)
+
+func Test_extractR8CompilerHash(t *testing.T) {
+	testCases := []struct {
+		name string
+		data string
+
+		hash string
+		err  string
+	}{
+		{
+			name: "simple",
+			data: `# compiler: R8
+# compiler_version: 3.3.18-dev
+# min_api: 10000
+# compiler_hash: bab44c1a04a2201b55fe10394f477994205c34e0
+# common_typos_disable
+# {"id":"com.android.tools.r8.mapping","version":"2.0"}
+# pg_map_id: 7fe8b95
+# pg_map_hash: SHA-256 7fe8b95ae71f179f63d2a585356fb9cf2c8fb94df9c9dd50621ffa6d9e9e88da
+android.car.userlib.UserHelper -> android.car.userlib.UserHelper:
+`,
+			hash: "7fe8b95ae71f179f63d2a585356fb9cf2c8fb94df9c9dd50621ffa6d9e9e88da",
+		},
+		{
+			name: "empty",
+			data: ``,
+			hash: "",
+		},
+		{
+			name: "non comment line",
+			data: `# compiler: R8
+# compiler_version: 3.3.18-dev
+# min_api: 10000
+# compiler_hash: bab44c1a04a2201b55fe10394f477994205c34e0
+# common_typos_disable
+# {"id":"com.android.tools.r8.mapping","version":"2.0"}
+# pg_map_id: 7fe8b95
+android.car.userlib.UserHelper -> android.car.userlib.UserHelper:
+# pg_map_hash: SHA-256 7fe8b95ae71f179f63d2a585356fb9cf2c8fb94df9c9dd50621ffa6d9e9e88da
+`,
+			hash: "",
+		},
+		{
+			name: "invalid hash",
+			data: `# pg_map_hash: foobar 7fe8b95ae71f179f63d2a585356fb9cf2c8fb94df9c9dd50621ffa6d9e9e88da`,
+			err:  "invalid hash type",
+		},
+	}
+
+	for _, tt := range testCases {
+		t.Run(tt.name, func(t *testing.T) {
+			hash, err := extractR8CompilerHash(bytes.NewBufferString(tt.data))
+			if err != nil {
+				if tt.err != "" {
+					if !strings.Contains(err.Error(), tt.err) {
+						t.Fatalf("incorrect error in extractR8CompilerHash, want %s got %s", tt.err, err)
+					}
+				} else {
+					t.Fatalf("unexpected error in extractR8CompilerHash: %s", err)
+				}
+			} else if tt.err != "" {
+				t.Fatalf("missing error in extractR8CompilerHash, want %s", tt.err)
+			}
+
+			if g, w := hash, tt.hash; g != w {
+				t.Errorf("incorrect hash, want %q got %q", w, g)
+			}
+		})
+	}
+}
diff --git a/cmd/symbols_map/symbols_map.go b/cmd/symbols_map/symbols_map.go
new file mode 100644
index 0000000..938446d
--- /dev/null
+++ b/cmd/symbols_map/symbols_map.go
@@ -0,0 +1,202 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"strings"
+
+	"android/soong/cmd/symbols_map/symbols_map_proto"
+	"android/soong/response"
+
+	"github.com/google/blueprint/pathtools"
+	"google.golang.org/protobuf/encoding/prototext"
+	"google.golang.org/protobuf/proto"
+)
+
+// This tool is used to extract a hash from an elf file or an r8 dictionary and store it as a
+// textproto, or to merge multiple textprotos together.
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	// Hide the flag package to prevent accidental references to flag instead of flags.
+	flag := struct{}{}
+	_ = flag
+
+	flags.Usage = func() {
+		fmt.Fprintf(flags.Output(), "Usage of %s:\n", os.Args[0])
+		fmt.Fprintf(flags.Output(), "  %s -elf|-r8 <input file> [-write_if_changed] <output file>\n", os.Args[0])
+		fmt.Fprintf(flags.Output(), "  %s -merge <output file> [-write_if_changed] [-ignore_missing_files] [-strip_prefix <prefix>] [<input file>...]\n", os.Args[0])
+		fmt.Fprintln(flags.Output())
+
+		flags.PrintDefaults()
+	}
+
+	elfFile := flags.String("elf", "", "extract identifier from an elf file")
+	r8File := flags.String("r8", "", "extract identifier from an r8 dictionary")
+	merge := flags.String("merge", "", "merge multiple identifier protos")
+
+	writeIfChanged := flags.Bool("write_if_changed", false, "only write output file if it is modified")
+	ignoreMissingFiles := flags.Bool("ignore_missing_files", false, "ignore missing input files in merge mode")
+	stripPrefix := flags.String("strip_prefix", "", "prefix to strip off of the location field in merge mode")
+
+	flags.Parse(expandedArgs)
+
+	if *merge != "" {
+		// If merge mode was requested perform the merge and exit early.
+		err := mergeProtos(*merge, flags.Args(), *stripPrefix, *writeIfChanged, *ignoreMissingFiles)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "failed to merge protos: %s", err)
+			os.Exit(1)
+		}
+		os.Exit(0)
+	}
+
+	if *elfFile == "" && *r8File == "" {
+		fmt.Fprintf(os.Stderr, "-elf or -r8 argument is required\n")
+		flags.Usage()
+		os.Exit(1)
+	}
+
+	if *elfFile != "" && *r8File != "" {
+		fmt.Fprintf(os.Stderr, "only one of -elf or -r8 argument is allowed\n")
+		flags.Usage()
+		os.Exit(1)
+	}
+
+	if flags.NArg() != 1 {
+		flags.Usage()
+		os.Exit(1)
+	}
+
+	output := flags.Arg(0)
+
+	var identifier string
+	var location string
+	var typ symbols_map_proto.Mapping_Type
+	var err error
+
+	if *elfFile != "" {
+		typ = symbols_map_proto.Mapping_ELF
+		location = *elfFile
+		identifier, err = elfIdentifier(*elfFile, true)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "error reading elf identifier: %s\n", err)
+			os.Exit(1)
+		}
+	} else if *r8File != "" {
+		typ = symbols_map_proto.Mapping_R8
+		identifier, err = r8Identifier(*r8File)
+		location = *r8File
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "error reading r8 identifier: %s\n", err)
+			os.Exit(1)
+		}
+	} else {
+		panic("shouldn't get here")
+	}
+
+	mapping := symbols_map_proto.Mapping{
+		Identifier: proto.String(identifier),
+		Location:   proto.String(location),
+		Type:       typ.Enum(),
+	}
+
+	err = writeTextProto(output, &mapping, *writeIfChanged)
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "error writing output: %s\n", err)
+		os.Exit(1)
+	}
+}
+
+// writeTextProto writes a proto to an output file as a textproto, optionally leaving the file
+// unmodified if it was already up to date.
+func writeTextProto(output string, message proto.Message, writeIfChanged bool) error {
+	marshaller := prototext.MarshalOptions{Multiline: true}
+	data, err := marshaller.Marshal(message)
+	if err != nil {
+		return fmt.Errorf("error marshalling textproto: %w", err)
+	}
+
+	if writeIfChanged {
+		err = pathtools.WriteFileIfChanged(output, data, 0666)
+	} else {
+		err = ioutil.WriteFile(output, data, 0666)
+	}
+
+	if err != nil {
+		return fmt.Errorf("error writing to %s: %w\n", output, err)
+	}
+
+	return nil
+}
+
+// mergeProtos merges a list of textproto files containing Mapping messages into a single textproto
+// containing a Mappings message.
+func mergeProtos(output string, inputs []string, stripPrefix string, writeIfChanged bool, ignoreMissingFiles bool) error {
+	mappings := symbols_map_proto.Mappings{}
+	for _, input := range inputs {
+		mapping := symbols_map_proto.Mapping{}
+		data, err := ioutil.ReadFile(input)
+		if err != nil {
+			if ignoreMissingFiles && os.IsNotExist(err) {
+				// Merge mode is used on a list of files in the packaging directory.  If multiple
+				// goals are included on the build command line, for example `dist` and `tests`,
+				// then the symbols packaging rule for `dist` can run while a dependency of `tests`
+				// is modifying the symbols packaging directory.  That can result in a file that
+				// existed when the file list was generated being deleted as part of updating it,
+				// resulting in sporadic ENOENT errors.  Ignore them if -ignore_missing_files
+				// was passed on the command line.
+				continue
+			}
+			return fmt.Errorf("failed to read %s: %w", input, err)
+		}
+		err = prototext.Unmarshal(data, &mapping)
+		if err != nil {
+			return fmt.Errorf("failed to parse textproto %s: %w", input, err)
+		}
+		if stripPrefix != "" && mapping.Location != nil {
+			mapping.Location = proto.String(strings.TrimPrefix(*mapping.Location, stripPrefix))
+		}
+		mappings.Mappings = append(mappings.Mappings, &mapping)
+	}
+
+	return writeTextProto(output, &mappings, writeIfChanged)
+}
diff --git a/cmd/symbols_map/symbols_map_proto/symbols_map.pb.go b/cmd/symbols_map/symbols_map_proto/symbols_map.pb.go
new file mode 100644
index 0000000..f9c0ce5
--- /dev/null
+++ b/cmd/symbols_map/symbols_map_proto/symbols_map.pb.go
@@ -0,0 +1,315 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// 	protoc-gen-go v1.27.1
+// 	protoc        v3.9.1
+// source: symbols_map.proto
+
+package symbols_map_proto
+
+import (
+	protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+	protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+	reflect "reflect"
+	sync "sync"
+)
+
+const (
+	// Verify that this generated code is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+	// Verify that runtime/protoimpl is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+// Type is the valid types of a mapping.
+type Mapping_Type int32
+
+const (
+	// ELF denotes a mapping from an elf build ID to an unstripped elf file.
+	Mapping_ELF Mapping_Type = 0
+	// R8 denotes a mapping from an R8 dictionary hash to an R8 dictionary.
+	Mapping_R8 Mapping_Type = 1
+)
+
+// Enum value maps for Mapping_Type.
+var (
+	Mapping_Type_name = map[int32]string{
+		0: "ELF",
+		1: "R8",
+	}
+	Mapping_Type_value = map[string]int32{
+		"ELF": 0,
+		"R8":  1,
+	}
+)
+
+func (x Mapping_Type) Enum() *Mapping_Type {
+	p := new(Mapping_Type)
+	*p = x
+	return p
+}
+
+func (x Mapping_Type) String() string {
+	return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (Mapping_Type) Descriptor() protoreflect.EnumDescriptor {
+	return file_symbols_map_proto_enumTypes[0].Descriptor()
+}
+
+func (Mapping_Type) Type() protoreflect.EnumType {
+	return &file_symbols_map_proto_enumTypes[0]
+}
+
+func (x Mapping_Type) Number() protoreflect.EnumNumber {
+	return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Do not use.
+func (x *Mapping_Type) UnmarshalJSON(b []byte) error {
+	num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)
+	if err != nil {
+		return err
+	}
+	*x = Mapping_Type(num)
+	return nil
+}
+
+// Deprecated: Use Mapping_Type.Descriptor instead.
+func (Mapping_Type) EnumDescriptor() ([]byte, []int) {
+	return file_symbols_map_proto_rawDescGZIP(), []int{0, 0}
+}
+
+type Mapping struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// identifier is a unique identifier of a location, generally the hash of the file.  For an
+	// elf file it is the elf build ID, for an R8 dictionary it is the hash from the comments in the
+	// top of the file.  It may be empty if no hash could be extracted from the file.
+	Identifier *string `protobuf:"bytes,1,opt,name=identifier" json:"identifier,omitempty"`
+	// location is the path to the file with the given identifier.  The location should be valid
+	// both on the local disk and in the distributed symbols.zip or proguard_dict.zip files.
+	Location *string `protobuf:"bytes,2,opt,name=location" json:"location,omitempty"`
+	// type is the type of the mapping, either ELF or R8.
+	Type *Mapping_Type `protobuf:"varint,3,opt,name=type,enum=symbols_map.Mapping_Type" json:"type,omitempty"`
+}
+
+func (x *Mapping) Reset() {
+	*x = Mapping{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_symbols_map_proto_msgTypes[0]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Mapping) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Mapping) ProtoMessage() {}
+
+func (x *Mapping) ProtoReflect() protoreflect.Message {
+	mi := &file_symbols_map_proto_msgTypes[0]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Mapping.ProtoReflect.Descriptor instead.
+func (*Mapping) Descriptor() ([]byte, []int) {
+	return file_symbols_map_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *Mapping) GetIdentifier() string {
+	if x != nil && x.Identifier != nil {
+		return *x.Identifier
+	}
+	return ""
+}
+
+func (x *Mapping) GetLocation() string {
+	if x != nil && x.Location != nil {
+		return *x.Location
+	}
+	return ""
+}
+
+func (x *Mapping) GetType() Mapping_Type {
+	if x != nil && x.Type != nil {
+		return *x.Type
+	}
+	return Mapping_ELF
+}
+
+type Mappings struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	Mappings []*Mapping `protobuf:"bytes,4,rep,name=mappings" json:"mappings,omitempty"`
+}
+
+func (x *Mappings) Reset() {
+	*x = Mappings{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_symbols_map_proto_msgTypes[1]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Mappings) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Mappings) ProtoMessage() {}
+
+func (x *Mappings) ProtoReflect() protoreflect.Message {
+	mi := &file_symbols_map_proto_msgTypes[1]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Mappings.ProtoReflect.Descriptor instead.
+func (*Mappings) Descriptor() ([]byte, []int) {
+	return file_symbols_map_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *Mappings) GetMappings() []*Mapping {
+	if x != nil {
+		return x.Mappings
+	}
+	return nil
+}
+
+var File_symbols_map_proto protoreflect.FileDescriptor
+
+var file_symbols_map_proto_rawDesc = []byte{
+	0x0a, 0x11, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x2e, 0x70, 0x72,
+	0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x5f, 0x6d, 0x61, 0x70,
+	0x22, 0x8d, 0x01, 0x0a, 0x07, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x12, 0x1e, 0x0a, 0x0a,
+	0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+	0x52, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x1a, 0x0a, 0x08,
+	0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
+	0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65,
+	0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73,
+	0x5f, 0x6d, 0x61, 0x70, 0x2e, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x2e, 0x54, 0x79, 0x70,
+	0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x17, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12,
+	0x07, 0x0a, 0x03, 0x45, 0x4c, 0x46, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x52, 0x38, 0x10, 0x01,
+	0x22, 0x3c, 0x0a, 0x08, 0x4d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x30, 0x0a, 0x08,
+	0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14,
+	0x2e, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x2e, 0x4d, 0x61, 0x70,
+	0x70, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x6d, 0x61, 0x70, 0x70, 0x69, 0x6e, 0x67, 0x73, 0x42, 0x31,
+	0x5a, 0x2f, 0x61, 0x6e, 0x64, 0x72, 0x6f, 0x69, 0x64, 0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f,
+	0x63, 0x6d, 0x64, 0x2f, 0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x2f,
+	0x73, 0x79, 0x6d, 0x62, 0x6f, 0x6c, 0x73, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x70, 0x72, 0x6f, 0x74,
+	0x6f,
+}
+
+var (
+	file_symbols_map_proto_rawDescOnce sync.Once
+	file_symbols_map_proto_rawDescData = file_symbols_map_proto_rawDesc
+)
+
+func file_symbols_map_proto_rawDescGZIP() []byte {
+	file_symbols_map_proto_rawDescOnce.Do(func() {
+		file_symbols_map_proto_rawDescData = protoimpl.X.CompressGZIP(file_symbols_map_proto_rawDescData)
+	})
+	return file_symbols_map_proto_rawDescData
+}
+
+var file_symbols_map_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
+var file_symbols_map_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
+var file_symbols_map_proto_goTypes = []interface{}{
+	(Mapping_Type)(0), // 0: symbols_map.Mapping.Type
+	(*Mapping)(nil),   // 1: symbols_map.Mapping
+	(*Mappings)(nil),  // 2: symbols_map.Mappings
+}
+var file_symbols_map_proto_depIdxs = []int32{
+	0, // 0: symbols_map.Mapping.type:type_name -> symbols_map.Mapping.Type
+	1, // 1: symbols_map.Mappings.mappings:type_name -> symbols_map.Mapping
+	2, // [2:2] is the sub-list for method output_type
+	2, // [2:2] is the sub-list for method input_type
+	2, // [2:2] is the sub-list for extension type_name
+	2, // [2:2] is the sub-list for extension extendee
+	0, // [0:2] is the sub-list for field type_name
+}
+
+func init() { file_symbols_map_proto_init() }
+func file_symbols_map_proto_init() {
+	if File_symbols_map_proto != nil {
+		return
+	}
+	if !protoimpl.UnsafeEnabled {
+		file_symbols_map_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Mapping); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+		file_symbols_map_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Mappings); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+	}
+	type x struct{}
+	out := protoimpl.TypeBuilder{
+		File: protoimpl.DescBuilder{
+			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+			RawDescriptor: file_symbols_map_proto_rawDesc,
+			NumEnums:      1,
+			NumMessages:   2,
+			NumExtensions: 0,
+			NumServices:   0,
+		},
+		GoTypes:           file_symbols_map_proto_goTypes,
+		DependencyIndexes: file_symbols_map_proto_depIdxs,
+		EnumInfos:         file_symbols_map_proto_enumTypes,
+		MessageInfos:      file_symbols_map_proto_msgTypes,
+	}.Build()
+	File_symbols_map_proto = out.File
+	file_symbols_map_proto_rawDesc = nil
+	file_symbols_map_proto_goTypes = nil
+	file_symbols_map_proto_depIdxs = nil
+}
diff --git a/cmd/symbols_map/symbols_map_proto/symbols_map.proto b/cmd/symbols_map/symbols_map_proto/symbols_map.proto
new file mode 100644
index 0000000..693fe3e
--- /dev/null
+++ b/cmd/symbols_map/symbols_map_proto/symbols_map.proto
@@ -0,0 +1,44 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package symbols_map;
+option go_package = "android/soong/cmd/symbols_map/symbols_map_proto";
+
+message Mapping {
+  // identifier is a unique identifier of a location, generally the hash of the file.  For an
+  // elf file it is the elf build ID, for an R8 dictionary it is the hash from the comments in the
+  // top of the file.  It may be empty if no hash could be extracted from the file.
+  optional string identifier = 1;
+
+  // location is the path to the file with the given identifier.  The location should be valid
+  // both on the local disk and in the distributed symbols.zip or proguard_dict.zip files.
+  optional string location = 2;
+
+  // Type is the valid types of a mapping.
+  enum Type {
+    // ELF denotes a mapping from an elf build ID to an unstripped elf file.
+    ELF = 0;
+    // R8 denotes a mapping from an R8 dictionary hash to an R8 dictionary.
+    R8 = 1;
+  }
+
+  // type is the type of the mapping, either ELF or R8.
+  optional Type type = 3;
+}
+
+message Mappings {
+  repeated Mapping mappings = 4;
+}
\ No newline at end of file
diff --git a/cmd/symbols_map/symbols_map_test.go b/cmd/symbols_map/symbols_map_test.go
new file mode 100644
index 0000000..754b7ef
--- /dev/null
+++ b/cmd/symbols_map/symbols_map_test.go
@@ -0,0 +1,217 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"android/soong/cmd/symbols_map/symbols_map_proto"
+
+	"google.golang.org/protobuf/encoding/prototext"
+	"google.golang.org/protobuf/proto"
+)
+
+func Test_mergeProtos(t *testing.T) {
+	type testFile struct {
+		filename string
+		contents *symbols_map_proto.Mapping
+		missing  bool
+	}
+
+	tests := []struct {
+		name               string
+		inputs             []testFile
+		stripPrefix        string
+		writeIfChanged     bool
+		ignoreMissingFiles bool
+
+		error  string
+		output *symbols_map_proto.Mappings
+	}{
+		{
+			name:   "empty",
+			output: &symbols_map_proto.Mappings{},
+		},
+		{
+			name: "merge",
+			inputs: []testFile{
+				{
+					filename: "foo",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+				},
+				{
+					filename: "bar",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("bar"),
+						Location:   proto.String("symbols/bar"),
+						Type:       symbols_map_proto.Mapping_R8.Enum(),
+					},
+				},
+			},
+			output: &symbols_map_proto.Mappings{
+				Mappings: []*symbols_map_proto.Mapping{
+					{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+					{
+						Identifier: proto.String("bar"),
+						Location:   proto.String("symbols/bar"),
+						Type:       symbols_map_proto.Mapping_R8.Enum(),
+					},
+				},
+			},
+		},
+		{
+			name: "strip prefix",
+			inputs: []testFile{
+				{
+					filename: "foo",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+				},
+				{
+					filename: "bar",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("bar"),
+						Location:   proto.String("symbols/bar"),
+						Type:       symbols_map_proto.Mapping_R8.Enum(),
+					},
+				},
+			},
+			stripPrefix: "symbols/",
+			output: &symbols_map_proto.Mappings{
+				Mappings: []*symbols_map_proto.Mapping{
+					{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+					{
+						Identifier: proto.String("bar"),
+						Location:   proto.String("bar"),
+						Type:       symbols_map_proto.Mapping_R8.Enum(),
+					},
+				},
+			},
+		},
+		{
+			name: "missing",
+			inputs: []testFile{
+				{
+					filename: "foo",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+				},
+				{
+					filename: "bar",
+					missing:  true,
+				},
+			},
+			error: "no such file or directory",
+		},
+		{
+			name: "ignore missing",
+			inputs: []testFile{
+				{
+					filename: "foo",
+					contents: &symbols_map_proto.Mapping{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+				},
+				{
+					filename: "bar",
+					missing:  true,
+				},
+			},
+			ignoreMissingFiles: true,
+			output: &symbols_map_proto.Mappings{
+				Mappings: []*symbols_map_proto.Mapping{
+					{
+						Identifier: proto.String("foo"),
+						Location:   proto.String("symbols/foo"),
+						Type:       symbols_map_proto.Mapping_ELF.Enum(),
+					},
+				},
+			},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			dir, err := os.MkdirTemp("", "test_mergeProtos")
+			if err != nil {
+				t.Fatalf("failed to create temporary directory: %s", err)
+			}
+			defer os.RemoveAll(dir)
+
+			var inputs []string
+			for _, in := range tt.inputs {
+				path := filepath.Join(dir, in.filename)
+				inputs = append(inputs, path)
+				if !in.missing {
+					err := writeTextProto(path, in.contents, false)
+					if err != nil {
+						t.Fatalf("failed to create input file %s: %s", path, err)
+					}
+				}
+			}
+			output := filepath.Join(dir, "out")
+
+			err = mergeProtos(output, inputs, tt.stripPrefix, tt.writeIfChanged, tt.ignoreMissingFiles)
+			if err != nil {
+				if tt.error != "" {
+					if !strings.Contains(err.Error(), tt.error) {
+						t.Fatalf("expected error %q, got %s", tt.error, err.Error())
+					}
+				} else {
+					t.Fatalf("unexpected error %q", err)
+				}
+			} else if tt.error != "" {
+				t.Fatalf("missing error %q", tt.error)
+			} else {
+				data, err := ioutil.ReadFile(output)
+				if err != nil {
+					t.Fatalf("failed to read output file %s: %s", output, err)
+				}
+				var got symbols_map_proto.Mappings
+				err = prototext.Unmarshal(data, &got)
+				if err != nil {
+					t.Fatalf("failed to unmarshal textproto %s: %s", output, err)
+				}
+
+				if !proto.Equal(tt.output, &got) {
+					t.Fatalf("expected output %q, got %q", tt.output.String(), got.String())
+				}
+			}
+		})
+	}
+}
diff --git a/dexpreopt/DEXPREOPT_IMPLEMENTATION.md b/dexpreopt/DEXPREOPT_IMPLEMENTATION.md
new file mode 100644
index 0000000..c3a1730
--- /dev/null
+++ b/dexpreopt/DEXPREOPT_IMPLEMENTATION.md
@@ -0,0 +1,258 @@
+## Dexpreopt implementation
+
+### Introduction
+
+All dexpreopted Java code falls into three categories:
+
+- bootclasspath
+- system server
+- apps and libraries
+
+Dexpreopt implementation for bootclasspath libraries (boot images) is located in
+[soong/java] (see e.g. [soong/java/dexpreopt_bootjars.go]), and install rules
+are in [make/core/dex_preopt.mk].
+
+Dexpreopt implementation for system server, libraries and apps is located in
+[soong/dexpreopt]. For the rest of this section we focus primarily on it (and
+not boot images).
+
+Dexpeopt implementation is split across the Soong part and the Make part. The
+core logic is in Soong, and Make only generates configs and scripts to pass
+information to Soong.
+
+### Global and module dexpreopt.config
+
+The build system generates a global JSON dexpreopt config that is populated from
+product variables. This is static configuration that is passed to both Soong and
+Make. The `$OUT/soong/dexpreopt.config` file is generated in
+[make/core/dex_preopt_config.mk]. Soong reads it in [soong/dexpreopt/config.go]
+and makes a device-specific copy (this is needed to ensure incremental build
+correctness). The global config contains lists of bootclasspath jars, system
+server jars, dex2oat options, global switches that enable and disable parts of
+dexpreopt and so on.
+
+The build system also generates a module config for each dexpreopted package. It
+contains package-specific configuration that is derived from the global
+configuration and Android.bp or Android.mk module for the package.
+
+Module configs for Make packages are generated in
+[make/core/dex_preopt_odex_install.mk]; they are materialized as per-package
+JSON dexpreopt.config files.
+
+Module configs in Soong are not materialized as dexpreopt.config files and exist
+as Go structures in memory, unless it is necessary to materialize them as a file
+for dependent Make packages or for post-dexpreopting. Module configs are defined
+in [soong/dexpreopt/config.go].
+
+### Dexpreopt in Soong
+
+The Soong implementation of dexpreopt consists roughly of the following steps:
+
+- Read global dexpreopt config passed from Make ([soong/dexpreopt/config.go]).
+
+- Construct a static boot image config ([soong/java/dexpreopt_config.go]).
+
+- During dependency mutator pass, for each suitable module:
+    - add uses-library dependencies (e.g. for apps: [soong/java/app.go:deps])
+
+- During rule generation pass, for each suitable module:
+    - compute transitive uses-library dependency closure
+      ([soong/java/java.go:addCLCFromDep])
+
+    - construct CLC from the dependency closure
+      ([soong/dexpreopt/class_loader_context.go])
+
+    - construct module config with CLC, boot image locations, etc.
+      ([soong/java/dexpreopt.go])
+
+    - generate build rules to verify build-time CLC against the manifest (e.g.
+      for apps: [soong/java/app.go:verifyUsesLibraries])
+
+    - generate dexpreopt build rule ([soong/dexpreopt/dexpreopt.go])
+
+- At the end of rule generation pass:
+    - generate build rules for boot images ([soong/java/dexpreopt_bootjars.go],
+      [soong/java/bootclasspath_fragment.go] and
+      [soong/java/platform_bootclasspath.go])
+
+### Dexpreopt in Make - dexpreopt_gen
+
+In order to reuse the same dexpreopt implementation for both Soong and Make
+packages, part of Soong is compiled into a standalone binary dexpreopt_gen. It
+runs during the Ninja stage of the build and generates shell scripts with
+dexpreopt build rules for Make packages, and then executes them.
+
+This setup causes many inconveniences. To name a few:
+
+- Errors in the build rules are only revealed at the late stage of the build.
+
+- These rules are not tested by the presubmit builds that run `m nothing` on
+  many build targets/products.
+
+- It is impossible to find dexpreopt build rules in the generated Ninja files.
+
+However all these issues are a lesser evil compared to having a duplicate
+dexpreopt implementation in Make. Also note that it would be problematic to
+reimplement the logic in Make anyway, because Android.mk modules are not
+processed in the order of uses-library dependencies and propagating dependency
+information from one module to another would require a similar workaround with
+a script.
+
+Dexpreopt for Make packages involves a few steps:
+
+- At Soong phase (during `m nothing`), see dexpreopt_gen:
+    - generate build rules for dexpreopt_gen binary
+
+- At Make/Kati phase (during `m nothing`), see
+  [make/core/dex_preopt_odex_install.mk]:
+    - generate build rules for module dexpreopt.config
+
+    - generate build rules for merging dependency dexpreopt.config files (see
+      [make/core/dex_preopt_config_merger.py])
+
+    - generate build rules for dexpreopt_gen invocation
+
+    - generate build rules for executing dexpreopt.sh scripts
+
+- At Ninja phase (during `m`):
+    - generate dexpreopt.config files
+
+    - execute dexpreopt_gen rules (generate dexpreopt.sh scripts)
+
+    - execute dexpreopt.sh scripts (this runs the actual dexpreopt)
+
+The Make/Kati phase adds all the necessary dependencies that trigger
+dexpreopt_gen and dexpreopt.sh rules. The real dexpreopt command (dex2oat
+invocation that will be executed to AOT-compile a package) is in the
+dexpreopt.sh script, which is generated close to the end of the build.
+
+### Indirect build rules
+
+The process described above for Make packages involves "indirect build rules",
+i.e. build rules that are generated not at the time when the build system is
+created (which is a small step at the very beginning of the build triggered with
+`m nothing`), but at the time when the actual build is done (`m` phase).
+
+Some build systems, such as Make, allow modifications of the build graph during
+the build. Other build systems, such as Soong, have a clear separation into the
+first "generation phase" (this is when build rules are created) and the second
+"build phase" (this is when the build rules are executed), and they do not allow
+modifications of the dependency graph during the second phase. The Soong
+approach is better from performance standpoint, because with the Make approach
+there are no guarantees regarding the time of the build --- recursive build
+graph modfications continue until fixpoint. However the Soong approach is also
+more restictive, as it can only generate build rules from the information that
+is passed to the build system via global configuration, Android.bp files or
+encoded in the Go code. Any other information (such as the contents of the Java
+manifest files) are not accessible and cannot be used to generate build rules.
+
+Hence the need for the "indirect build rules": during the generation phase only
+stubs of the build rules are generated, and the real rules are generated by the
+stub rules during the build phase (and executed immediately). Note that the
+build system still has to add all the necessary dependencies during the
+generation phase, because it will not be possible to change build order during
+the build phase.
+
+Indirect buils rules are used in a couple of places in dexpreopt:
+
+- [soong/scripts/manifest_check.py]: first to extract targetSdkVersion from the
+  manifest, and later to extract `<uses-library/>` tags from the manifest and
+  compare them to the uses-library list known to the build system
+
+- [soong/scripts/construct_context.py]: to trim compatibility libraries in CLC
+
+- [make/core/dex_preopt_config_merger.py]: to merge information from
+  dexpreopt.config files for uses-library dependencies into the dependent's
+  dexpreopt.config file (mostly the CLC)
+
+- autogenerated dexpreopt.sh scripts: to call dexpreopt_gen
+
+### Consistency check - manifest_check.py
+
+Because the information from the manifests has to be duplicated in the
+Android.bp/Android.mk files, there is a danger that it may get out of sync. To
+guard against that, the build system generates a rule that verifies
+uses-libraries: checks the metadata in the build files against the contents of a
+manifest. The manifest can be available as a source file, or as part of a
+prebuilt APK.
+
+The check is implemented in [soong/scripts/manifest_check.py].
+
+It is possible to turn off the check globally for a product by setting
+`PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true` in a product makefile, or for a
+particular build by setting `RELAX_USES_LIBRARY_CHECK=true`.
+
+### Compatibility libraries - construct_context.py
+
+Compatibility libraries are libraries that didn’t exist prior to a certain SDK
+version (say, `N`), but classes in them were in the bootclasspath jars, etc.,
+and in version `N` they have been separated into a standalone uses-library.
+Compatibility libraries should only be in the CLC of an app if its
+`targetSdkVersion` in the manifest is less than `N`.
+
+Currently compatibility libraries only affect apps (but not other libraries).
+
+The build system cannot see `targetSdkVersion` of an app at the time it
+generates dexpreopt build rules, so it doesn't know whether to add compatibility
+libaries to CLC or not. As a workaround, the build system includes all
+compatibility libraries regardless of the app version, and appends some extra
+logic to the dexpreopt rule that will extract `targetSdkVersion` from the
+manifest and filter CLC based on that version during Ninja stage of the build,
+immediately before executing the dexpreopt command (see the
+soong/scripts/construct_context.py script).
+
+As of the time of writing (January 2022), there are the following compatibility
+libraries:
+
+- org.apache.http.legacy (SDK 28)
+- android.hidl.base-V1.0-java (SDK 29)
+- android.hidl.manager-V1.0-java (SDK 29)
+- android.test.base (SDK 30)
+- android.test.mock (SDK 30)
+
+### Manifest fixer
+
+Sometimes uses-library tags are missing from the source manifest of a
+library/app. This may happen for example if one of the transitive dependencies
+of the library/app starts using another uses-library, and the library/app's
+manifest isn't updated to include it.
+
+Soong can compute some of the missing uses-library tags for a given library/app
+automatically as SDK libraries in the transitive dependency closure of the
+library/app. The closure is needed because a library/app may depend on a static
+library that may in turn depend on an SDK library (possibly transitively via
+another library).
+
+Not all uses-library tags can be computed in this way, because some of the
+uses-library dependencies are not SDK libraries, or they are not reachable via
+transitive dependency closure. But when possible, allowing Soong to calculate
+the manifest entries is less prone to errors and simplifies maintenance. For
+example, consider a situation when many apps use some static library that adds a
+new uses-library dependency -- all the apps will have to be updated. That is
+difficult to maintain.
+
+There is also a manifest merger, because sometimes the final manifest of an app
+is merged from a few dependency manifests, so the final manifest installed on
+devices contains a superset of uses-library tags of the source manifest of the
+app.
+
+
+[make/core/dex_preopt.mk]: https://cs.android.com/android/platform/superproject/+/master:build/make/core/dex_preopt.mk
+[make/core/dex_preopt_config.mk]: https://cs.android.com/android/platform/superproject/+/master:build/make/core/dex_preopt_config.mk
+[make/core/dex_preopt_config_merger.py]: https://cs.android.com/android/platform/superproject/+/master:build/make/core/dex_preopt_config_merger.py
+[make/core/dex_preopt_odex_install.mk]: https://cs.android.com/android/platform/superproject/+/master:build/make/core/dex_preopt_odex_install.mk
+[soong/dexpreopt]: https://cs.android.com/android/platform/superproject/+/master:build/soong/dexpreopt
+[soong/dexpreopt/class_loader_context.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/dexpreopt/class_loader_context.go
+[soong/dexpreopt/config.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/dexpreopt/config.go
+[soong/dexpreopt/dexpreopt.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/dexpreopt/dexpreopt.go
+[soong/java]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java
+[soong/java/app.go:deps]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/app.go?q=%22func%20\(u%20*usesLibrary\)%20deps%22
+[soong/java/app.go:verifyUsesLibraries]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/app.go?q=%22func%20\(u%20*usesLibrary\)%20verifyUsesLibraries%22
+[soong/java/bootclasspath_fragment.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/bootclasspath_fragment.go
+[soong/java/dexpreopt.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/dexpreopt.go
+[soong/java/dexpreopt_bootjars.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/dexpreopt_bootjars.go
+[soong/java/dexpreopt_config.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/dexpreopt_config.go
+[soong/java/java.go:addCLCFromDep]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/java.go?q=%22func%20addCLCfromDep%22
+[soong/java/platform_bootclasspath.go]: https://cs.android.com/android/platform/superproject/+/master:build/soong/java/platform_bootclasspath.go
+[soong/scripts/construct_context.py]: https://cs.android.com/android/platform/superproject/+/master:build/soong/scripts/construct_context.py
+[soong/scripts/manifest_check.py]: https://cs.android.com/android/platform/superproject/+/master:build/soong/scripts/manifest_check.py
diff --git a/docs/rbe.json b/docs/rbe.json
new file mode 100644
index 0000000..f6ff107
--- /dev/null
+++ b/docs/rbe.json
@@ -0,0 +1,24 @@
+{
+    "env": {
+        "USE_RBE": "1",
+
+        "RBE_R8_EXEC_STRATEGY": "remote_local_fallback",
+        "RBE_CXX_EXEC_STRATEGY": "remote_local_fallback",
+        "RBE_D8_EXEC_STRATEGY": "remote_local_fallback",
+        "RBE_JAVAC_EXEC_STRATEGY": "remote_local_fallback",
+        "RBE_JAVAC": "1",
+        "RBE_R8": "1",
+        "RBE_D8": "1",
+
+        "RBE_instance": "[replace with your RBE instance]",
+        "RBE_service": "[replace with your RBE service endpoint]",
+
+        "RBE_DIR": "prebuilts/remoteexecution-client/live",
+
+        "RBE_use_application_default_credentials": "true",
+
+        "RBE_log_dir": "/tmp",
+        "RBE_output_dir": "/tmp",
+        "RBE_proxy_log_dir": "/tmp"
+    }
+}
diff --git a/docs/rbe.md b/docs/rbe.md
new file mode 100644
index 0000000..cfe86d7
--- /dev/null
+++ b/docs/rbe.md
@@ -0,0 +1,70 @@
+# Build Android Platform on Remote Build Execution
+
+Soong is integrated with Google's Remote Build Execution(RBE) service, which
+implements the
+[Remote Executaion API](https://github.com/bazelbuild/remote-apis).
+
+With RBE enabled, it can speed up the Android Platform builds by distributing
+build actions through a worker pool sharing a central cache of build results.
+
+## Configuration
+
+To enable RBE, you need to set several environment variables before triggering
+the build. You can set them through a
+[environment variables config file](https://android.googlesource.com/platform/build/soong/+/master/README.md#environment-variables-config-file).
+As an example, [build/soong/docs/rbe.json](rbe.json) is a config that enables
+RBE in the build. Once the config file is created, you need to let Soong load
+the config file by specifying `ANDROID_BUILD_ENVIRONMENT_CONFIG_DIR` environment
+variable and `ANDROID_BUILD_ENVIRONMENT_CONFIG` environment variable. The
+following command starts Soong with [build/soong/docs/rbe.json](rbe.json)
+loaded:
+
+```shell
+ANDROID_BUILD_ENVIRONMENT_CONFIG=rbe \
+ANDROID_BUILD_ENVIRONMENT_CONFIG_DIR=build/soong/doc \
+  build/soong/soong_ui.bash
+```
+
+### Configuration Explanation
+
+Below a brief explanation of each field in
+[build/soong/docs/rbe.json](rbe.json):
+
+##### USE\_RBE:
+If set to 1, enable RBE for the build.
+
+##### RBE\_CXX\_EXEC\_STRATEGY / RBE\_JAVAC\_EXEC\_STRATEGY / RBE\_R8\_EXEC\_STRATEGY / RBE\_D8\_EXEC\_STRATEGY:
+
+Sets strategies for C++/javac/r8/d8 action types. Available options are
+(**Note**: all options will update the remote cache if the right permissions to
+update cache are given to the user.):
+
+*   **local**: Only execute locally.
+*   **remote**: Only execute remotely.
+*   **remote_local_fallback**: Try executing remotely and fall back to local
+    execution if failed.
+*   **racing**: Race remote execution and local execution and use the earlier
+    result.
+
+##### RBE\_JAVAC / RBE\_R8 / RBE\_D8
+
+If set to 1, enable javac/r8/d8 support. C++ compilation is enabled by default.
+
+##### RBE\_service / RBE\_instance
+
+The remote execution service endpoint and instance ID to target when calling
+remote execution via gRPC to execute actions.
+
+##### RBE\_DIR
+
+Where to find remote client binaries (rewrapper, reproxy)
+
+##### RBE\_use\_application\_default\_credentials
+
+reclient uses
+[application default credentials](https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login)
+for autentication, as generated by `gcloud auth application-default login`
+
+##### RBE\_log\_dir/RBE\_proxy\_log\_dir/RBE\_output\_dir
+
+Logs generated by rewrapper and reproxy will go here.
diff --git a/docs/tidy.md b/docs/tidy.md
index 3140198..890c3a0 100644
--- a/docs/tidy.md
+++ b/docs/tidy.md
@@ -225,6 +225,16 @@
 in several Android continuous builds where `WITH_TIDY=1` and
 `CLANG_ANALYZER_CHECKS=1` are set.
 
+Similar to `tidy_disabled_srcs` a `tidy_timeout_srcs` list
+can be used to include all source files that took too much time to compile
+with clang-tidy. Files listed in `tidy_timeout_srcs` will not
+be compiled by clang-tidy when `TIDY_TIMEOUT` is defined.
+This can save global build time, when it is necessary to set some
+time limit globally to finish in an acceptable time.
+For developers who want to find all clang-tidy warnings and
+are willing to spend more time on all files in a project,
+they should not define `TIDY_TIMEOUT` and build only the wanted project directories.
+
 ## Capabilities for Android.bp and Android.mk
 
 Some of the previously mentioned features are defined only
diff --git a/etc/prebuilt_etc.go b/etc/prebuilt_etc.go
index a142833..719771f 100644
--- a/etc/prebuilt_etc.go
+++ b/etc/prebuilt_etc.go
@@ -474,6 +474,7 @@
 	// This module is device-only
 	android.InitAndroidArchModule(module, android.DeviceSupported, android.MultilibFirst)
 	android.InitDefaultableModule(module)
+	android.InitBazelModule(module)
 	return module
 }
 
@@ -668,25 +669,17 @@
 
 // For Bazel / bp2build
 
-type bazelPrebuiltEtcAttributes struct {
+type bazelPrebuiltFileAttributes struct {
 	Src         bazel.LabelAttribute
 	Filename    string
-	Sub_dir     string
+	Dir         string
 	Installable bazel.BoolAttribute
 }
 
 // ConvertWithBp2build performs bp2build conversion of PrebuiltEtc
-func (p *PrebuiltEtc) ConvertWithBp2build(ctx android.TopDownMutatorContext) {
-	// All prebuilt_* modules are PrebuiltEtc, but at this time, we only convert prebuilt_etc modules.
-	if p.installDirBase != "etc" {
-		return
-	}
-
-	prebuiltEtcBp2BuildInternal(ctx, p)
-}
-
-func prebuiltEtcBp2BuildInternal(ctx android.TopDownMutatorContext, module *PrebuiltEtc) {
-	var srcLabelAttribute bazel.LabelAttribute
+// All prebuilt_* modules are PrebuiltEtc, which we treat uniformily as *PrebuiltFile*
+func (module *PrebuiltEtc) ConvertWithBp2build(ctx android.TopDownMutatorContext) {
+	var src bazel.LabelAttribute
 	for axis, configToProps := range module.GetArchVariantProperties(ctx, &prebuiltEtcProperties{}) {
 		for config, p := range configToProps {
 			props, ok := p.(*prebuiltEtcProperties)
@@ -695,7 +688,7 @@
 			}
 			if props.Src != nil {
 				label := android.BazelLabelForModuleSrcSingle(ctx, *props.Src)
-				srcLabelAttribute.SetSelectValue(axis, config, label)
+				src.SetSelectValue(axis, config, label)
 			}
 		}
 	}
@@ -705,26 +698,30 @@
 		filename = *module.properties.Filename
 	}
 
-	var subDir string
-	if module.subdirProperties.Sub_dir != nil {
-		subDir = *module.subdirProperties.Sub_dir
+	var dir = module.installDirBase
+	// prebuilt_file supports only `etc` or `usr/share`
+	if !(dir == "etc" || dir == "usr/share") {
+		return
+	}
+	if subDir := module.subdirProperties.Sub_dir; subDir != nil {
+		dir = dir + "/" + *subDir
 	}
 
-	var installableBoolAttribute bazel.BoolAttribute
-	if module.properties.Installable != nil {
-		installableBoolAttribute.Value = module.properties.Installable
+	var installable bazel.BoolAttribute
+	if install := module.properties.Installable; install != nil {
+		installable.Value = install
 	}
 
-	attrs := &bazelPrebuiltEtcAttributes{
-		Src:         srcLabelAttribute,
+	attrs := &bazelPrebuiltFileAttributes{
+		Src:         src,
 		Filename:    filename,
-		Sub_dir:     subDir,
-		Installable: installableBoolAttribute,
+		Dir:         dir,
+		Installable: installable,
 	}
 
 	props := bazel.BazelTargetModuleProperties{
-		Rule_class:        "prebuilt_etc",
-		Bzl_load_location: "//build/bazel/rules:prebuilt_etc.bzl",
+		Rule_class:        "prebuilt_file",
+		Bzl_load_location: "//build/bazel/rules:prebuilt_file.bzl",
 	}
 
 	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: module.Name()}, attrs)
diff --git a/filesystem/bootimg.go b/filesystem/bootimg.go
index 33beb37..352b451 100644
--- a/filesystem/bootimg.go
+++ b/filesystem/bootimg.go
@@ -228,6 +228,15 @@
 	return output
 }
 
+// Calculates avb_salt from some input for deterministic output.
+func (b *bootimg) salt() string {
+	var input []string
+	input = append(input, b.properties.Cmdline...)
+	input = append(input, proptools.StringDefault(b.properties.Partition_name, b.Name()))
+	input = append(input, proptools.String(b.properties.Header_version))
+	return sha1sum(input)
+}
+
 func (b *bootimg) buildPropFile(ctx android.ModuleContext) (propFile android.OutputPath, toolDeps android.Paths) {
 	var sb strings.Builder
 	var deps android.Paths
@@ -248,6 +257,7 @@
 	addStr("avb_add_hash_footer_args", "") // TODO(jiyong): add --rollback_index
 	partitionName := proptools.StringDefault(b.properties.Partition_name, b.Name())
 	addStr("partition_name", partitionName)
+	addStr("avb_salt", b.salt())
 
 	propFile = android.PathForModuleOut(ctx, "prop").OutputPath
 	android.WriteFileRule(ctx, propFile, sb.String())
diff --git a/filesystem/filesystem.go b/filesystem/filesystem.go
index 0796258..6e1e78a 100644
--- a/filesystem/filesystem.go
+++ b/filesystem/filesystem.go
@@ -15,7 +15,9 @@
 package filesystem
 
 import (
+	"crypto/sha256"
 	"fmt"
+	"io"
 	"path/filepath"
 	"strings"
 
@@ -43,8 +45,14 @@
 	// Function that builds extra files under the root directory and returns the files
 	buildExtraFiles func(ctx android.ModuleContext, root android.OutputPath) android.OutputPaths
 
+	// Function that filters PackagingSpecs returned by PackagingBase.GatherPackagingSpecs()
+	filterPackagingSpecs func(specs map[string]android.PackagingSpec)
+
 	output     android.OutputPath
 	installDir android.InstallPath
+
+	// For testing. Keeps the result of CopyDepsToZip()
+	entries []string
 }
 
 type symlinkDefinition struct {
@@ -82,6 +90,13 @@
 
 	// Symbolic links to be created under root with "ln -sf <target> <name>".
 	Symlinks []symlinkDefinition
+
+	// Seconds since unix epoch to override timestamps of file entries
+	Fake_timestamp *string
+
+	// When set, passed to mkuserimg_mke2fs --mke2fs_uuid & --mke2fs_hash_seed.
+	// Otherwise, they'll be set as random which might cause indeterministic build output.
+	Uuid *string
 }
 
 // android_filesystem packages a set of modules and their transitive dependencies into a filesystem
@@ -226,7 +241,7 @@
 
 func (f *filesystem) buildImageUsingBuildImage(ctx android.ModuleContext) android.OutputPath {
 	depsZipFile := android.PathForModuleOut(ctx, "deps.zip").OutputPath
-	f.CopyDepsToZip(ctx, depsZipFile)
+	f.entries = f.CopyDepsToZip(ctx, f.gatherFilteredPackagingSpecs(ctx), depsZipFile)
 
 	builder := android.NewRuleBuilder(pctx, ctx)
 	depsBase := proptools.StringDefault(f.properties.Base_dir, ".")
@@ -270,6 +285,11 @@
 	return fcBin.OutputPath
 }
 
+// Calculates avb_salt from entry list (sorted) for deterministic output.
+func (f *filesystem) salt() string {
+	return sha1sum(f.entries)
+}
+
 func (f *filesystem) buildPropFile(ctx android.ModuleContext) (propFile android.OutputPath, toolDeps android.Paths) {
 	type prop struct {
 		name  string
@@ -315,12 +335,19 @@
 		addStr("avb_add_hashtree_footer_args", "--do_not_generate_fec")
 		partitionName := proptools.StringDefault(f.properties.Partition_name, f.Name())
 		addStr("partition_name", partitionName)
+		addStr("avb_salt", f.salt())
 	}
 
 	if proptools.String(f.properties.File_contexts) != "" {
 		addPath("selinux_fc", f.buildFileContexts(ctx))
 	}
-
+	if timestamp := proptools.String(f.properties.Fake_timestamp); timestamp != "" {
+		addStr("timestamp", timestamp)
+	}
+	if uuid := proptools.String(f.properties.Uuid); uuid != "" {
+		addStr("uuid", uuid)
+		addStr("hash_seed", uuid)
+	}
 	propFile = android.PathForModuleOut(ctx, "prop").OutputPath
 	builder := android.NewRuleBuilder(pctx, ctx)
 	builder.Command().Text("rm").Flag("-rf").Output(propFile)
@@ -345,7 +372,7 @@
 	}
 
 	depsZipFile := android.PathForModuleOut(ctx, "deps.zip").OutputPath
-	f.CopyDepsToZip(ctx, depsZipFile)
+	f.entries = f.CopyDepsToZip(ctx, f.gatherFilteredPackagingSpecs(ctx), depsZipFile)
 
 	builder := android.NewRuleBuilder(pctx, ctx)
 	depsBase := proptools.StringDefault(f.properties.Base_dir, ".")
@@ -434,3 +461,22 @@
 	}
 	return nil
 }
+
+// Filter the result of GatherPackagingSpecs to discard items targeting outside "system" partition.
+// Note that "apex" module installs its contents to "apex"(fake partition) as well
+// for symbol lookup by imitating "activated" paths.
+func (f *filesystem) gatherFilteredPackagingSpecs(ctx android.ModuleContext) map[string]android.PackagingSpec {
+	specs := f.PackagingBase.GatherPackagingSpecs(ctx)
+	if f.filterPackagingSpecs != nil {
+		f.filterPackagingSpecs(specs)
+	}
+	return specs
+}
+
+func sha1sum(values []string) string {
+	h := sha256.New()
+	for _, value := range values {
+		io.WriteString(h, value)
+	}
+	return fmt.Sprintf("%x", h.Sum(nil))
+}
diff --git a/filesystem/filesystem_test.go b/filesystem/filesystem_test.go
index e78fdff..cda06d9 100644
--- a/filesystem/filesystem_test.go
+++ b/filesystem/filesystem_test.go
@@ -45,11 +45,11 @@
 
 func TestFileSystemFillsLinkerConfigWithStubLibs(t *testing.T) {
 	result := fixture.RunTestWithBp(t, `
-	        android_system_image {
+		android_system_image {
 			name: "myfilesystem",
 			deps: [
 				"libfoo",
-                                "libbar",
+				"libbar",
 			],
 			linker_config_src: "linker.config.json",
 		}
@@ -74,3 +74,54 @@
 	android.AssertStringDoesNotContain(t, "linker.config.pb should not have libbar",
 		output.RuleParams.Command, "libbar.so")
 }
+
+func registerComponent(ctx android.RegistrationContext) {
+	ctx.RegisterModuleType("component", componentFactory)
+}
+
+func componentFactory() android.Module {
+	m := &component{}
+	m.AddProperties(&m.properties)
+	android.InitAndroidArchModule(m, android.DeviceSupported, android.MultilibCommon)
+	return m
+}
+
+type component struct {
+	android.ModuleBase
+	properties struct {
+		Install_copy_in_data []string
+	}
+}
+
+func (c *component) GenerateAndroidBuildActions(ctx android.ModuleContext) {
+	output := android.PathForModuleOut(ctx, c.Name())
+	dir := android.PathForModuleInstall(ctx, "components")
+	ctx.InstallFile(dir, c.Name(), output)
+
+	dataDir := android.PathForModuleInPartitionInstall(ctx, "data", "components")
+	for _, d := range c.properties.Install_copy_in_data {
+		ctx.InstallFile(dataDir, d, output)
+	}
+}
+
+func TestFileSystemGathersItemsOnlyInSystemPartition(t *testing.T) {
+	f := android.GroupFixturePreparers(fixture, android.FixtureRegisterWithContext(registerComponent))
+	result := f.RunTestWithBp(t, `
+		android_system_image {
+			name: "myfilesystem",
+			multilib: {
+				common: {
+					deps: ["foo"],
+				},
+			},
+			linker_config_src: "linker.config.json",
+		}
+		component {
+			name: "foo",
+			install_copy_in_data: ["bar"],
+		}
+	`)
+
+	module := result.ModuleForTests("myfilesystem", "android_common").Module().(*systemImage)
+	android.AssertDeepEquals(t, "entries should have foo only", []string{"components/foo"}, module.entries)
+}
diff --git a/filesystem/system_image.go b/filesystem/system_image.go
index 1d24d6d..75abf70 100644
--- a/filesystem/system_image.go
+++ b/filesystem/system_image.go
@@ -37,6 +37,7 @@
 	module := &systemImage{}
 	module.AddProperties(&module.properties)
 	module.filesystem.buildExtraFiles = module.buildExtraFiles
+	module.filesystem.filterPackagingSpecs = module.filterPackagingSpecs
 	initFilesystemModule(&module.filesystem)
 	return module
 }
@@ -53,7 +54,7 @@
 
 	// we need "Module"s for packaging items
 	var otherModules []android.Module
-	deps := s.GatherPackagingSpecs(ctx)
+	deps := s.gatherFilteredPackagingSpecs(ctx)
 	ctx.WalkDeps(func(child, parent android.Module) bool {
 		for _, ps := range child.PackagingSpecs() {
 			if _, ok := deps[ps.RelPathInPackage()]; ok {
@@ -68,3 +69,14 @@
 	builder.Build("conv_linker_config", "Generate linker config protobuf "+output.String())
 	return output
 }
+
+// Filter the result of GatherPackagingSpecs to discard items targeting outside "system" partition.
+// Note that "apex" module installs its contents to "apex"(fake partition) as well
+// for symbol lookup by imitating "activated" paths.
+func (s *systemImage) filterPackagingSpecs(specs map[string]android.PackagingSpec) {
+	for k, ps := range specs {
+		if ps.Partition() != "system" {
+			delete(specs, k)
+		}
+	}
+}
diff --git a/genrule/genrule.go b/genrule/genrule.go
index 1679a57..c52ddee 100644
--- a/genrule/genrule.go
+++ b/genrule/genrule.go
@@ -294,7 +294,7 @@
 		if _, exists := locationLabels[label]; !exists {
 			locationLabels[label] = loc
 		} else {
-			ctx.ModuleErrorf("multiple labels for %q, %q and %q",
+			ctx.ModuleErrorf("multiple locations for label %q: %q and %q (do you have duplicate srcs entries?)",
 				label, locationLabels[label], loc)
 		}
 	}
@@ -382,9 +382,12 @@
 		addLocationLabel(toolFile, toolLocation{paths})
 	}
 
+	includeDirInPaths := ctx.DeviceConfig().BuildBrokenInputDir(g.Name())
 	var srcFiles android.Paths
 	for _, in := range g.properties.Srcs {
-		paths, missingDeps := android.PathsAndMissingDepsForModuleSrcExcludes(ctx, []string{in}, g.properties.Exclude_srcs)
+		paths, missingDeps := android.PathsAndMissingDepsRelativeToModuleSourceDir(android.SourceInput{
+			Context: ctx, Paths: []string{in}, ExcludePaths: g.properties.Exclude_srcs, IncludeDirs: includeDirInPaths,
+		})
 		if len(missingDeps) > 0 {
 			if !ctx.Config().AllowMissingDependencies() {
 				panic(fmt.Errorf("should never get here, the missing dependencies %q should have been reported in DepsMutator",
@@ -496,7 +499,7 @@
 						}
 						return paths[0], nil
 					} else {
-						return reportError("unknown location label %q", label)
+						return reportError("unknown location label %q is not in srcs, out, tools or tool_files.", label)
 					}
 				} else if strings.HasPrefix(name, "locations ") {
 					label := strings.TrimSpace(strings.TrimPrefix(name, "locations "))
@@ -507,7 +510,7 @@
 						}
 						return strings.Join(paths, " "), nil
 					} else {
-						return reportError("unknown locations label %q", label)
+						return reportError("unknown locations label %q is not in srcs, out, tools or tool_files.", label)
 					}
 				} else {
 					return reportError("unknown variable '$(%s)'", name)
diff --git a/genrule/genrule_test.go b/genrule/genrule_test.go
index 04c97fd..1b5cef2 100644
--- a/genrule/genrule_test.go
+++ b/genrule/genrule_test.go
@@ -341,7 +341,7 @@
 				out: ["out"],
 				cmd: "echo foo > $(location missing)",
 			`,
-			err: `unknown location label "missing"`,
+			err: `unknown location label "missing" is not in srcs, out, tools or tool_files.`,
 		},
 		{
 			name: "error locations",
@@ -349,7 +349,7 @@
 					out: ["out"],
 					cmd: "echo foo > $(locations missing)",
 			`,
-			err: `unknown locations label "missing"`,
+			err: `unknown locations label "missing" is not in srcs, out, tools or tool_files`,
 		},
 		{
 			name: "error location no files",
diff --git a/java/Android.bp b/java/Android.bp
index c062941..df0d1eb 100644
--- a/java/Android.bp
+++ b/java/Android.bp
@@ -15,6 +15,7 @@
         "soong-dexpreopt",
         "soong-genrule",
         "soong-java-config",
+        "soong-provenance",
         "soong-python",
         "soong-remoteexec",
         "soong-tradefed",
@@ -81,6 +82,7 @@
         "app_test.go",
         "bootclasspath_fragment_test.go",
         "device_host_converter_test.go",
+        "dex_test.go",
         "dexpreopt_test.go",
         "dexpreopt_bootjars_test.go",
         "droiddoc_test.go",
diff --git a/java/aar.go b/java/aar.go
index 8e10253..00ff7e7 100644
--- a/java/aar.go
+++ b/java/aar.go
@@ -598,16 +598,26 @@
 // AAR (android library) prebuilts
 //
 
+// Properties for android_library_import
 type AARImportProperties struct {
+	// ARR (android library prebuilt) filepath. Exactly one ARR is required.
 	Aars []string `android:"path"`
-
-	Sdk_version     *string
+	// If not blank, set to the version of the sdk to compile against.
+	// Defaults to private.
+	// Values are of one of the following forms:
+	// 1) numerical API level, "current", "none", or "core_platform"
+	// 2) An SDK kind with an API level: "<sdk kind>_<API level>"
+	// See build/soong/android/sdk_version.go for the complete and up to date list of SDK kinds.
+	// If the SDK kind is empty, it will be set to public
+	Sdk_version *string
+	// If not blank, set the minimum version of the sdk that the compiled artifacts will run against.
+	// Defaults to sdk_version if not set. See sdk_version for possible values.
 	Min_sdk_version *string
-
+	// List of java static libraries that the included ARR (android library prebuilts) has dependencies to.
 	Static_libs []string
-	Libs        []string
-
-	// if set to true, run Jetifier against .aar file. Defaults to false.
+	// List of java libraries that the included ARR (android library prebuilts) has dependencies to.
+	Libs []string
+	// If set to true, run Jetifier against .aar file. Defaults to false.
 	Jetifier *bool
 }
 
diff --git a/java/android_manifest.go b/java/android_manifest.go
index 7772b70..3fa3520 100644
--- a/java/android_manifest.go
+++ b/java/android_manifest.go
@@ -82,8 +82,8 @@
 		if minSdkVersion.FinalOrFutureInt() >= 23 {
 			args = append(args, fmt.Sprintf("--extract-native-libs=%v", !params.UseEmbeddedNativeLibs))
 		} else if params.UseEmbeddedNativeLibs {
-			ctx.ModuleErrorf("module attempted to store uncompressed native libraries, but minSdkVersion=%d doesn't support it",
-				minSdkVersion)
+			ctx.ModuleErrorf("module attempted to store uncompressed native libraries, but minSdkVersion=%s doesn't support it",
+				minSdkVersion.String())
 		}
 	}
 
diff --git a/java/androidmk.go b/java/androidmk.go
index b930441..80b828d 100644
--- a/java/androidmk.go
+++ b/java/androidmk.go
@@ -409,22 +409,6 @@
 				entries.SetOptionalPaths("LOCAL_SOONG_LINT_REPORTS", app.linter.reports)
 			},
 		},
-		ExtraFooters: []android.AndroidMkExtraFootersFunc{
-			func(w io.Writer, name, prefix, moduleDir string) {
-				if app.noticeOutputs.Merged.Valid() {
-					fmt.Fprintf(w, "$(call dist-for-goals,%s,%s:%s)\n",
-						app.installApkName, app.noticeOutputs.Merged.String(), app.installApkName+"_NOTICE")
-				}
-				if app.noticeOutputs.TxtOutput.Valid() {
-					fmt.Fprintf(w, "$(call dist-for-goals,%s,%s:%s)\n",
-						app.installApkName, app.noticeOutputs.TxtOutput.String(), app.installApkName+"_NOTICE.txt")
-				}
-				if app.noticeOutputs.HtmlOutput.Valid() {
-					fmt.Fprintf(w, "$(call dist-for-goals,%s,%s:%s)\n",
-						app.installApkName, app.noticeOutputs.HtmlOutput.String(), app.installApkName+"_NOTICE.html")
-				}
-			},
-		},
 	}}
 }
 
diff --git a/java/app.go b/java/app.go
index e4432ff..2b52eab 100755
--- a/java/app.go
+++ b/java/app.go
@@ -19,7 +19,6 @@
 
 import (
 	"path/filepath"
-	"sort"
 	"strings"
 
 	"github.com/google/blueprint"
@@ -164,8 +163,6 @@
 
 	additionalAaptFlags []string
 
-	noticeOutputs android.NoticeOutputs
-
 	overriddenManifestPackageName string
 
 	android.ApexBundleDepsInfo
@@ -523,53 +520,6 @@
 	return jniSymbols
 }
 
-func (a *AndroidApp) noticeBuildActions(ctx android.ModuleContext) {
-	// Collect NOTICE files from all dependencies.
-	seenModules := make(map[android.Module]bool)
-	noticePathSet := make(map[android.Path]bool)
-
-	ctx.WalkDeps(func(child android.Module, parent android.Module) bool {
-		// Have we already seen this?
-		if _, ok := seenModules[child]; ok {
-			return false
-		}
-		seenModules[child] = true
-
-		// Skip host modules.
-		if child.Target().Os.Class == android.Host {
-			return false
-		}
-
-		paths := child.(android.Module).NoticeFiles()
-		if len(paths) > 0 {
-			for _, path := range paths {
-				noticePathSet[path] = true
-			}
-		}
-		return true
-	})
-
-	// If the app has one, add it too.
-	if len(a.NoticeFiles()) > 0 {
-		for _, path := range a.NoticeFiles() {
-			noticePathSet[path] = true
-		}
-	}
-
-	if len(noticePathSet) == 0 {
-		return
-	}
-	var noticePaths []android.Path
-	for path := range noticePathSet {
-		noticePaths = append(noticePaths, path)
-	}
-	sort.Slice(noticePaths, func(i, j int) bool {
-		return noticePaths[i].String() < noticePaths[j].String()
-	})
-
-	a.noticeOutputs = android.BuildNoticeOutput(ctx, a.installDir, a.installApkName+".apk", noticePaths)
-}
-
 // Reads and prepends a main cert from the default cert dir if it hasn't been set already, i.e. it
 // isn't a cert module reference. Also checks and enforces system cert restriction if applicable.
 func processMainCert(m android.ModuleBase, certPropValue string, certificates []Certificate, ctx android.ModuleContext) []Certificate {
@@ -636,9 +586,16 @@
 	}
 	a.onDeviceDir = android.InstallPathToOnDevicePath(ctx, a.installDir)
 
-	a.noticeBuildActions(ctx)
 	if Bool(a.appProperties.Embed_notices) || ctx.Config().IsEnvTrue("ALWAYS_EMBED_NOTICES") {
-		a.aapt.noticeFile = a.noticeOutputs.HtmlGzOutput
+		noticeFile := android.PathForModuleOut(ctx, "NOTICE.html.gz")
+		android.BuildNoticeHtmlOutputFromLicenseMetadata(ctx, noticeFile)
+		noticeAssetPath := android.PathForModuleOut(ctx, "NOTICE", "NOTICE.html.gz")
+		builder := android.NewRuleBuilder(pctx, ctx)
+		builder.Command().Text("cp").
+			Input(noticeFile).
+			Output(noticeAssetPath)
+		builder.Build("notice_dir", "Building notice dir")
+		a.aapt.noticeFile = android.OptionalPathForPath(noticeAssetPath)
 	}
 
 	a.classLoaderContexts = a.usesLibrary.classLoaderContextForUsesLibDeps(ctx)
@@ -681,7 +638,21 @@
 	}
 
 	certificates := processMainCert(a.ModuleBase, a.getCertString(ctx), certificateDeps, ctx)
-	a.certificate = certificates[0]
+
+	// This can be reached with an empty certificate list if AllowMissingDependencies is set
+	// and the certificate property for this module is a module reference to a missing module.
+	if len(certificates) > 0 {
+		a.certificate = certificates[0]
+	} else {
+		if !ctx.Config().AllowMissingDependencies() && len(ctx.GetMissingDependencies()) > 0 {
+			panic("Should only get here if AllowMissingDependencies set and there are missing dependencies")
+		}
+		// Set a certificate to avoid panics later when accessing it.
+		a.certificate = Certificate{
+			Key: android.PathForModuleOut(ctx, "missing.pk8"),
+			Pem: android.PathForModuleOut(ctx, "missing.pem"),
+		}
+	}
 
 	// Build a final signed app package.
 	packageFile := android.PathForModuleOut(ctx, a.installApkName+".apk")
@@ -1443,22 +1414,34 @@
 
 	props := bazel.BazelTargetModuleProperties{
 		Rule_class:        "android_app_certificate",
-		Bzl_load_location: "//build/bazel/rules:android_app_certificate.bzl",
+		Bzl_load_location: "//build/bazel/rules/android:android_app_certificate.bzl",
 	}
 
 	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: module.Name()}, attrs)
 }
 
 type bazelAndroidAppAttributes struct {
-	*javaLibraryAttributes
-	Manifest       bazel.Label
-	Custom_package *string
-	Resource_files bazel.LabelListAttribute
+	*javaCommonAttributes
+	Deps             bazel.LabelListAttribute
+	Manifest         bazel.Label
+	Custom_package   *string
+	Resource_files   bazel.LabelListAttribute
+	Certificate      *bazel.Label
+	Certificate_name *string
 }
 
 // ConvertWithBp2build is used to convert android_app to Bazel.
 func (a *AndroidApp) ConvertWithBp2build(ctx android.TopDownMutatorContext) {
-	libAttrs := a.convertLibraryAttrsBp2Build(ctx)
+	commonAttrs, depLabels := a.convertLibraryAttrsBp2Build(ctx)
+
+	deps := depLabels.Deps
+	if !commonAttrs.Srcs.IsEmpty() {
+		deps.Append(depLabels.StaticDeps) // we should only append these if there are sources to use them
+	} else if !deps.IsEmpty() || !depLabels.StaticDeps.IsEmpty() {
+		ctx.ModuleErrorf("android_app has dynamic or static dependencies but no sources." +
+			" Bazel does not allow direct dependencies without sources nor exported" +
+			" dependencies on android_binary rule.")
+	}
 
 	manifest := proptools.StringDefault(a.aaptProperties.Manifest, "AndroidManifest.xml")
 
@@ -1470,15 +1453,31 @@
 		resourceFiles.Includes = append(resourceFiles.Includes, files...)
 	}
 
+	var certificate *bazel.Label
+	certificateNamePtr := a.overridableAppProperties.Certificate
+	certificateName := proptools.StringDefault(certificateNamePtr, "")
+	certModule := android.SrcIsModule(certificateName)
+	if certModule != "" {
+		c := android.BazelLabelForModuleDepSingle(ctx, certificateName)
+		certificate = &c
+		certificateNamePtr = nil
+	}
+
 	attrs := &bazelAndroidAppAttributes{
-		libAttrs,
+		commonAttrs,
+		deps,
 		android.BazelLabelForModuleSrcSingle(ctx, manifest),
 		// TODO(b/209576404): handle package name override by product variable PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES
 		a.overridableAppProperties.Package_name,
 		bazel.MakeLabelListAttribute(resourceFiles),
+		certificate,
+		certificateNamePtr,
 	}
-	props := bazel.BazelTargetModuleProperties{Rule_class: "android_binary",
-		Bzl_load_location: "@rules_android//rules:rules.bzl"}
+
+	props := bazel.BazelTargetModuleProperties{
+		Rule_class:        "android_binary",
+		Bzl_load_location: "//build/bazel/rules/android:android_binary.bzl",
+	}
 
 	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: a.Name()}, attrs)
 
diff --git a/java/app_import.go b/java/app_import.go
index 3e5f972..b017eca 100644
--- a/java/app_import.go
+++ b/java/app_import.go
@@ -22,6 +22,7 @@
 	"github.com/google/blueprint/proptools"
 
 	"android/soong/android"
+	"android/soong/provenance"
 )
 
 func init() {
@@ -57,6 +58,8 @@
 	installPath android.InstallPath
 
 	hideApexVariantFromMake bool
+
+	provenanceMetaDataFile android.OutputPath
 }
 
 type AndroidAppImportProperties struct {
@@ -343,6 +346,8 @@
 
 	if apexInfo.IsForPlatform() {
 		a.installPath = ctx.InstallFile(installDir, apkFilename, a.outputFile)
+		artifactPath := android.PathForModuleSrc(ctx, *a.properties.Apk)
+		a.provenanceMetaDataFile = provenance.GenerateArtifactProvenanceMetaData(ctx, artifactPath, a.installPath)
 	}
 
 	// TODO: androidmk converter jni libs
@@ -368,6 +373,10 @@
 	return a.certificate
 }
 
+func (a *AndroidAppImport) ProvenanceMetaDataFile() android.OutputPath {
+	return a.provenanceMetaDataFile
+}
+
 var dpiVariantGroupType reflect.Type
 var archVariantGroupType reflect.Type
 var supportedDpis = []string{"ldpi", "mdpi", "hdpi", "xhdpi", "xxhdpi", "xxxhdpi"}
@@ -457,7 +466,7 @@
 //                 apk: "prebuilts/example_xhdpi.apk",
 //             },
 //         },
-//         certificate: "PRESIGNED",
+//         presigned: true,
 //     }
 func AndroidAppImportFactory() android.Module {
 	module := &AndroidAppImport{}
diff --git a/java/app_import_test.go b/java/app_import_test.go
index efa52c1..8f6c75f 100644
--- a/java/app_import_test.go
+++ b/java/app_import_test.go
@@ -53,6 +53,11 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_NoDexPreopt(t *testing.T) {
@@ -74,6 +79,12 @@
 		variant.MaybeOutput("dexpreopt/oat/arm64/package.odex").Rule != nil {
 		t.Errorf("dexpreopt shouldn't have run.")
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_Presigned(t *testing.T) {
@@ -102,6 +113,12 @@
 	if variant.MaybeOutput("zip-aligned/foo.apk").Rule == nil {
 		t.Errorf("can't find aligning rule")
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_SigningLineage(t *testing.T) {
@@ -137,6 +154,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_SigningLineageFilegroup(t *testing.T) {
@@ -163,6 +186,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_DefaultDevCert(t *testing.T) {
@@ -192,6 +221,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_DpiVariants(t *testing.T) {
@@ -214,40 +249,46 @@
 		}
 		`
 	testCases := []struct {
-		name                string
-		aaptPreferredConfig *string
-		aaptPrebuiltDPI     []string
-		expected            string
+		name                                   string
+		aaptPreferredConfig                    *string
+		aaptPrebuiltDPI                        []string
+		expected                               string
+		expectedProvenanceMetaDataArtifactPath string
 	}{
 		{
-			name:                "no preferred",
-			aaptPreferredConfig: nil,
-			aaptPrebuiltDPI:     []string{},
-			expected:            "verify_uses_libraries/apk/app.apk",
+			name:                                   "no preferred",
+			aaptPreferredConfig:                    nil,
+			aaptPrebuiltDPI:                        []string{},
+			expected:                               "verify_uses_libraries/apk/app.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app.apk",
 		},
 		{
-			name:                "AAPTPreferredConfig matches",
-			aaptPreferredConfig: proptools.StringPtr("xhdpi"),
-			aaptPrebuiltDPI:     []string{"xxhdpi", "ldpi"},
-			expected:            "verify_uses_libraries/apk/app_xhdpi.apk",
+			name:                                   "AAPTPreferredConfig matches",
+			aaptPreferredConfig:                    proptools.StringPtr("xhdpi"),
+			aaptPrebuiltDPI:                        []string{"xxhdpi", "ldpi"},
+			expected:                               "verify_uses_libraries/apk/app_xhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xhdpi.apk",
 		},
 		{
-			name:                "AAPTPrebuiltDPI matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"xxhdpi", "xhdpi"},
-			expected:            "verify_uses_libraries/apk/app_xxhdpi.apk",
+			name:                                   "AAPTPrebuiltDPI matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"xxhdpi", "xhdpi"},
+			expected:                               "verify_uses_libraries/apk/app_xxhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xxhdpi.apk",
 		},
 		{
-			name:                "non-first AAPTPrebuiltDPI matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"ldpi", "xhdpi"},
-			expected:            "verify_uses_libraries/apk/app_xhdpi.apk",
+			name:                                   "non-first AAPTPrebuiltDPI matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"ldpi", "xhdpi"},
+			expected:                               "verify_uses_libraries/apk/app_xhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xhdpi.apk",
 		},
 		{
-			name:                "no matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"ldpi", "xxxhdpi"},
-			expected:            "verify_uses_libraries/apk/app.apk",
+			name:                                   "no matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"ldpi", "xxxhdpi"},
+			expected:                               "verify_uses_libraries/apk/app.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app.apk",
 		},
 	}
 
@@ -270,6 +311,12 @@
 		if strings.HasSuffix(matches[1], test.expected) {
 			t.Errorf("wrong src apk, expected: %q got: %q", test.expected, matches[1])
 		}
+
+		provenanceMetaDataRule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.expectedProvenanceMetaDataArtifactPath, provenanceMetaDataRule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", provenanceMetaDataRule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", "foo", provenanceMetaDataRule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", provenanceMetaDataRule.Args["install_path"])
 	}
 }
 
@@ -290,16 +337,25 @@
 		`)
 
 	testCases := []struct {
-		name     string
-		expected string
+		name                 string
+		expected             string
+		onDevice             string
+		expectedArtifactPath string
+		expectedMetaDataPath string
 	}{
 		{
-			name:     "foo",
-			expected: "foo.apk",
+			name:                 "foo",
+			expected:             "foo.apk",
+			onDevice:             "/system/app/foo/foo.apk",
+			expectedArtifactPath: "prebuilts/apk/app.apk",
+			expectedMetaDataPath: "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto",
 		},
 		{
-			name:     "bar",
-			expected: "bar_sample.apk",
+			name:                 "bar",
+			expected:             "bar_sample.apk",
+			onDevice:             "/system/app/bar/bar_sample.apk",
+			expectedArtifactPath: "prebuilts/apk/app.apk",
+			expectedMetaDataPath: "out/soong/.intermediates/provenance_metadata/bar/provenance_metadata.textproto",
 		},
 	}
 
@@ -316,15 +372,23 @@
 			t.Errorf("Incorrect LOCAL_INSTALLED_MODULE_STEM value '%s', expected '%s'",
 				actualValues, expectedValues)
 		}
+		rule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.expectedArtifactPath, rule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", test.expectedMetaDataPath, rule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", test.name, rule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", test.onDevice, rule.Args["install_path"])
 	}
 }
 
 func TestAndroidAppImport_ArchVariants(t *testing.T) {
 	// The test config's target arch is ARM64.
 	testCases := []struct {
-		name     string
-		bp       string
-		expected string
+		name         string
+		bp           string
+		expected     string
+		artifactPath string
+		metaDataPath string
+		installPath  string
 	}{
 		{
 			name: "matching arch",
@@ -343,7 +407,9 @@
 					},
 				}
 			`,
-			expected: "verify_uses_libraries/apk/app_arm64.apk",
+			expected:     "verify_uses_libraries/apk/app_arm64.apk",
+			artifactPath: "prebuilts/apk/app_arm64.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 		{
 			name: "no matching arch",
@@ -362,7 +428,9 @@
 					},
 				}
 			`,
-			expected: "verify_uses_libraries/apk/app.apk",
+			expected:     "verify_uses_libraries/apk/app.apk",
+			artifactPath: "prebuilts/apk/app.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 		{
 			name: "no matching arch without default",
@@ -380,7 +448,9 @@
 					},
 				}
 			`,
-			expected: "",
+			expected:     "",
+			artifactPath: "prebuilts/apk/app_arm.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 	}
 
@@ -393,6 +463,8 @@
 			if variant.Module().Enabled() {
 				t.Error("module should have been disabled, but wasn't")
 			}
+			rule := variant.MaybeRule("genProvenanceMetaData")
+			android.AssertDeepEquals(t, "Provenance metadata is not empty", android.TestingBuildParams{}, rule)
 			continue
 		}
 		jniRuleCommand := variant.Output("jnis-uncompressed/foo.apk").RuleParams.Command
@@ -403,6 +475,11 @@
 		if strings.HasSuffix(matches[1], test.expected) {
 			t.Errorf("wrong src apk, expected: %q got: %q", test.expected, matches[1])
 		}
+		rule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.artifactPath, rule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", test.installPath, rule.Args["install_path"])
 	}
 }
 
diff --git a/java/app_test.go b/java/app_test.go
index 16bbec1..6a4508c 100644
--- a/java/app_test.go
+++ b/java/app_test.go
@@ -27,7 +27,6 @@
 	"android/soong/android"
 	"android/soong/cc"
 	"android/soong/dexpreopt"
-	"android/soong/genrule"
 )
 
 // testApp runs tests using the prepareForJavaTest
@@ -2722,116 +2721,6 @@
 	}
 }
 
-func TestEmbedNotice(t *testing.T) {
-	result := android.GroupFixturePreparers(
-		PrepareForTestWithJavaDefaultModules,
-		cc.PrepareForTestWithCcDefaultModules,
-		genrule.PrepareForTestWithGenRuleBuildComponents,
-		android.MockFS{
-			"APP_NOTICE":     nil,
-			"GENRULE_NOTICE": nil,
-			"LIB_NOTICE":     nil,
-			"TOOL_NOTICE":    nil,
-		}.AddToFixture(),
-	).RunTestWithBp(t, `
-		android_app {
-			name: "foo",
-			srcs: ["a.java"],
-			static_libs: ["javalib"],
-			jni_libs: ["libjni"],
-			notice: "APP_NOTICE",
-			embed_notices: true,
-			sdk_version: "current",
-		}
-
-		// No embed_notice flag
-		android_app {
-			name: "bar",
-			srcs: ["a.java"],
-			jni_libs: ["libjni"],
-			notice: "APP_NOTICE",
-			sdk_version: "current",
-		}
-
-		// No NOTICE files
-		android_app {
-			name: "baz",
-			srcs: ["a.java"],
-			embed_notices: true,
-			sdk_version: "current",
-		}
-
-		cc_library {
-			name: "libjni",
-			system_shared_libs: [],
-			stl: "none",
-			notice: "LIB_NOTICE",
-			sdk_version: "current",
-		}
-
-		java_library {
-			name: "javalib",
-			srcs: [
-				":gen",
-			],
-			sdk_version: "current",
-		}
-
-		genrule {
-			name: "gen",
-			tools: ["gentool"],
-			out: ["gen.java"],
-			notice: "GENRULE_NOTICE",
-		}
-
-		java_binary_host {
-			name: "gentool",
-			srcs: ["b.java"],
-			notice: "TOOL_NOTICE",
-		}
-	`)
-
-	// foo has NOTICE files to process, and embed_notices is true.
-	foo := result.ModuleForTests("foo", "android_common")
-	// verify merge notices rule.
-	mergeNotices := foo.Rule("mergeNoticesRule")
-	noticeInputs := mergeNotices.Inputs.Strings()
-	// TOOL_NOTICE should be excluded as it's a host module.
-	if len(mergeNotices.Inputs) != 3 {
-		t.Errorf("number of input notice files: expected = 3, actual = %q", noticeInputs)
-	}
-	if !inList("APP_NOTICE", noticeInputs) {
-		t.Errorf("APP_NOTICE is missing from notice files, %q", noticeInputs)
-	}
-	if !inList("LIB_NOTICE", noticeInputs) {
-		t.Errorf("LIB_NOTICE is missing from notice files, %q", noticeInputs)
-	}
-	if !inList("GENRULE_NOTICE", noticeInputs) {
-		t.Errorf("GENRULE_NOTICE is missing from notice files, %q", noticeInputs)
-	}
-	// aapt2 flags should include -A <NOTICE dir> so that its contents are put in the APK's /assets.
-	res := foo.Output("package-res.apk")
-	aapt2Flags := res.Args["flags"]
-	e := "-A out/soong/.intermediates/foo/android_common/NOTICE"
-	android.AssertStringDoesContain(t, "expected.apkPath", aapt2Flags, e)
-
-	// bar has NOTICE files to process, but embed_notices is not set.
-	bar := result.ModuleForTests("bar", "android_common")
-	res = bar.Output("package-res.apk")
-	aapt2Flags = res.Args["flags"]
-	e = "-A out/soong/.intermediates/bar/android_common/NOTICE"
-	android.AssertStringDoesNotContain(t, "bar shouldn't have the asset dir flag for NOTICE", aapt2Flags, e)
-
-	// baz's embed_notice is true, but it doesn't have any NOTICE files.
-	baz := result.ModuleForTests("baz", "android_common")
-	res = baz.Output("package-res.apk")
-	aapt2Flags = res.Args["flags"]
-	e = "-A out/soong/.intermediates/baz/android_common/NOTICE"
-	if strings.Contains(aapt2Flags, e) {
-		t.Errorf("baz shouldn't have the asset dir flag for NOTICE: %q", e)
-	}
-}
-
 func TestUncompressDex(t *testing.T) {
 	testCases := []struct {
 		name string
@@ -3059,3 +2948,24 @@
 		android.AssertStringDoesContain(t, testCase.name, manifestFixerArgs, "--targetSdkVersion  "+testCase.targetSdkVersionExpected)
 	}
 }
+
+func TestAppMissingCertificateAllowMissingDependencies(t *testing.T) {
+	result := android.GroupFixturePreparers(
+		PrepareForTestWithJavaDefaultModules,
+		android.PrepareForTestWithAllowMissingDependencies,
+		android.PrepareForTestWithAndroidMk,
+	).RunTestWithBp(t, `
+		android_app {
+			name: "foo",
+			srcs: ["a.java"],
+			certificate: ":missing_certificate",
+			sdk_version: "current",
+		}`)
+
+	foo := result.ModuleForTests("foo", "android_common")
+	fooApk := foo.Output("foo.apk")
+	if fooApk.Rule != android.ErrorRule {
+		t.Fatalf("expected ErrorRule for foo.apk, got %s", fooApk.Rule.String())
+	}
+	android.AssertStringDoesContain(t, "expected error rule message", fooApk.Args["error"], "missing dependencies: missing_certificate\n")
+}
diff --git a/java/base.go b/java/base.go
index 42d7733..b925350 100644
--- a/java/base.go
+++ b/java/base.go
@@ -185,12 +185,12 @@
 // constructing a new module.
 type DeviceProperties struct {
 	// If not blank, set to the version of the sdk to compile against.
-	// Defaults to compiling against the current platform.
+	// Defaults to private.
 	// Values are of one of the following forms:
-	// 1) numerical API level or "current"
-	// 2) An SDK kind with an API level: "<sdk kind>_<API level>". See
-	// build/soong/android/sdk_version.go for the complete and up to date list of
-	// SDK kinds. If the SDK kind value is empty, it will be set to public.
+	// 1) numerical API level, "current", "none", or "core_platform"
+	// 2) An SDK kind with an API level: "<sdk kind>_<API level>"
+	// See build/soong/android/sdk_version.go for the complete and up to date list of SDK kinds.
+	// If the SDK kind is empty, it will be set to public.
 	Sdk_version *string
 
 	// if not blank, set the minimum version of the sdk that the compiled artifacts will run against.
@@ -207,7 +207,7 @@
 
 	// Whether to compile against the platform APIs instead of an SDK.
 	// If true, then sdk_version must be empty. The value of this field
-	// is ignored when module's type isn't android_app.
+	// is ignored when module's type isn't android_app, android_test, or android_test_helper_app.
 	Platform_apis *bool
 
 	Aidl struct {
@@ -227,6 +227,12 @@
 		// whether to generate Binder#GetTransaction name method.
 		Generate_get_transaction_name *bool
 
+		// whether all interfaces should be annotated with required permissions.
+		Enforce_permissions *bool
+
+		// allowlist for interfaces that (temporarily) do not require annotation for permissions.
+		Enforce_permissions_exceptions []string `android:"path"`
+
 		// list of flags that will be passed to the AIDL compiler
 		Flags []string
 	}
@@ -418,7 +424,8 @@
 	outputFile       android.Path
 	extraOutputFiles android.Paths
 
-	exportAidlIncludeDirs android.Paths
+	exportAidlIncludeDirs     android.Paths
+	ignoredAidlPermissionList android.Paths
 
 	logtagsSrcs android.Paths
 
@@ -474,6 +481,8 @@
 	sdkVersion    android.SdkSpec
 	minSdkVersion android.SdkSpec
 	maxSdkVersion android.SdkSpec
+
+	sourceExtensions []string
 }
 
 func (j *Module) CheckStableSdkVersion(ctx android.BaseModuleContext) error {
@@ -772,6 +781,17 @@
 	return hasSrcExt(j.properties.Srcs, ext)
 }
 
+func (j *Module) individualAidlFlags(ctx android.ModuleContext, aidlFile android.Path) string {
+	var flags string
+
+	if Bool(j.deviceProperties.Aidl.Enforce_permissions) {
+		if !android.InList(aidlFile.String(), j.ignoredAidlPermissionList.Strings()) {
+			flags = "-Wmissing-permission-annotation -Werror"
+		}
+	}
+	return flags
+}
+
 func (j *Module) aidlFlags(ctx android.ModuleContext, aidlPreprocess android.OptionalPath,
 	aidlIncludeDirs android.Paths) (string, android.Paths) {
 
@@ -814,6 +834,11 @@
 		flags = append(flags, "--transaction_names")
 	}
 
+	if Bool(j.deviceProperties.Aidl.Enforce_permissions) {
+		exceptions := j.deviceProperties.Aidl.Enforce_permissions_exceptions
+		j.ignoredAidlPermissionList = android.PathsForModuleSrcExcludes(ctx, exceptions, nil)
+	}
+
 	aidlMinSdkVersion := j.MinSdkVersion(ctx).ApiLevel.String()
 	flags = append(flags, "--min_sdk_version="+aidlMinSdkVersion)
 
@@ -839,7 +864,7 @@
 		}
 		errorProneFlags = append(errorProneFlags, j.properties.Errorprone.Javacflags...)
 
-		flags.errorProneExtraJavacFlags = "${config.ErrorProneFlags} " +
+		flags.errorProneExtraJavacFlags = "${config.ErrorProneHeapFlags} ${config.ErrorProneFlags} " +
 			"'" + strings.Join(errorProneFlags, " ") + "'"
 		flags.errorProneProcessorPath = classpath(android.PathsForSource(ctx, config.ErrorProneClasspath))
 	}
@@ -847,6 +872,7 @@
 	// classpath
 	flags.bootClasspath = append(flags.bootClasspath, deps.bootClasspath...)
 	flags.classpath = append(flags.classpath, deps.classpath...)
+	flags.dexClasspath = append(flags.dexClasspath, deps.dexClasspath...)
 	flags.java9Classpath = append(flags.java9Classpath, deps.java9Classpath...)
 	flags.processorPath = append(flags.processorPath, deps.processorPath...)
 	flags.errorProneProcessorPath = append(flags.errorProneProcessorPath, deps.errorProneProcessorPath...)
@@ -959,6 +985,14 @@
 	return flags
 }
 
+func (j *Module) AddJSONData(d *map[string]interface{}) {
+	(&j.ModuleBase).AddJSONData(d)
+	(*d)["Java"] = map[string]interface{}{
+		"SourceExtensions": j.sourceExtensions,
+	}
+
+}
+
 func (j *Module) compile(ctx android.ModuleContext, aaptSrcJar android.Path) {
 	j.exportAidlIncludeDirs = android.PathsForModuleSrc(ctx, j.deviceProperties.Aidl.Export_include_dirs)
 
@@ -970,6 +1004,12 @@
 	}
 
 	srcFiles := android.PathsForModuleSrcExcludes(ctx, j.properties.Srcs, j.properties.Exclude_srcs)
+	j.sourceExtensions = []string{}
+	for _, ext := range []string{".kt", ".proto", ".aidl", ".java", ".logtags"} {
+		if hasSrcExt(srcFiles.Strings(), ext) {
+			j.sourceExtensions = append(j.sourceExtensions, ext)
+		}
+	}
 	if hasSrcExt(srcFiles.Strings(), ".proto") {
 		flags = protoFlags(ctx, &j.properties, &j.protoProperties, flags)
 	}
@@ -1008,12 +1048,24 @@
 		}
 	}
 
+	// We don't currently run annotation processors in turbine, which means we can't use turbine
+	// generated header jars when an annotation processor that generates API is enabled.  One
+	// exception (handled further below) is when kotlin sources are enabled, in which case turbine
+	//  is used to run all of the annotation processors.
+	disableTurbine := deps.disableTurbine
+
 	// Collect .java files for AIDEGen
 	j.expandIDEInfoCompiledSrcs = append(j.expandIDEInfoCompiledSrcs, uniqueSrcFiles.Strings()...)
 
 	var kotlinJars android.Paths
+	var kotlinHeaderJars android.Paths
 
 	if srcFiles.HasExt(".kt") {
+		// When using kotlin sources turbine is used to generate annotation processor sources,
+		// including for annotation processors that generate API, so we can use turbine for
+		// java sources too.
+		disableTurbine = false
+
 		// user defined kotlin flags.
 		kotlincFlags := j.properties.Kotlincflags
 		CheckKotlincFlags(ctx, kotlincFlags)
@@ -1051,6 +1103,8 @@
 		flags.classpath = append(flags.classpath, deps.kotlinStdlib...)
 		flags.classpath = append(flags.classpath, deps.kotlinAnnotations...)
 
+		flags.dexClasspath = append(flags.dexClasspath, deps.kotlinAnnotations...)
+
 		flags.kotlincClasspath = append(flags.kotlincClasspath, flags.bootClasspath...)
 		flags.kotlincClasspath = append(flags.kotlincClasspath, flags.classpath...)
 
@@ -1067,18 +1121,24 @@
 		}
 
 		kotlinJar := android.PathForModuleOut(ctx, "kotlin", jarName)
-		kotlinCompile(ctx, kotlinJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
+		kotlinHeaderJar := android.PathForModuleOut(ctx, "kotlin_headers", jarName)
+		kotlinCompile(ctx, kotlinJar, kotlinHeaderJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
 		if ctx.Failed() {
 			return
 		}
 
 		// Make javac rule depend on the kotlinc rule
-		flags.classpath = append(flags.classpath, kotlinJar)
+		flags.classpath = append(classpath{kotlinHeaderJar}, flags.classpath...)
 
 		kotlinJars = append(kotlinJars, kotlinJar)
+		kotlinHeaderJars = append(kotlinHeaderJars, kotlinHeaderJar)
+
 		// Jar kotlin classes into the final jar after javac
 		if BoolDefault(j.properties.Static_kotlin_stdlib, true) {
 			kotlinJars = append(kotlinJars, deps.kotlinStdlib...)
+			kotlinHeaderJars = append(kotlinHeaderJars, deps.kotlinStdlib...)
+		} else {
+			flags.dexClasspath = append(flags.dexClasspath, deps.kotlinStdlib...)
 		}
 	}
 
@@ -1090,7 +1150,7 @@
 
 	enableSharding := false
 	var headerJarFileWithoutDepsOrJarjar android.Path
-	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !deps.disableTurbine {
+	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !disableTurbine {
 		if j.properties.Javac_shard_size != nil && *(j.properties.Javac_shard_size) > 0 {
 			enableSharding = true
 			// Formerly, there was a check here that prevented annotation processors
@@ -1100,7 +1160,7 @@
 			// with sharding enabled. See: b/77284273.
 		}
 		headerJarFileWithoutDepsOrJarjar, j.headerJarFile =
-			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinJars)
+			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinHeaderJars)
 		if ctx.Failed() {
 			return
 		}
@@ -1634,6 +1694,8 @@
 		dpInfo.Jarjar_rules = append(dpInfo.Jarjar_rules, j.expandJarjarRules.String())
 	}
 	dpInfo.Paths = append(dpInfo.Paths, j.modulePaths...)
+	dpInfo.Static_libs = append(dpInfo.Static_libs, j.properties.Static_libs...)
+	dpInfo.Libs = append(dpInfo.Libs, j.properties.Libs...)
 }
 
 func (j *Module) CompilerDeps() []string {
@@ -1803,6 +1865,7 @@
 		} else if sdkDep.useFiles {
 			// sdkDep.jar is actually equivalent to turbine header.jar.
 			deps.classpath = append(deps.classpath, sdkDep.jars...)
+			deps.dexClasspath = append(deps.dexClasspath, sdkDep.jars...)
 			deps.aidlPreprocess = sdkDep.aidl
 		} else {
 			deps.aidlPreprocess = sdkDep.aidl
@@ -1827,7 +1890,9 @@
 		if dep, ok := module.(SdkLibraryDependency); ok {
 			switch tag {
 			case libTag:
-				deps.classpath = append(deps.classpath, dep.SdkHeaderJars(ctx, j.SdkVersion(ctx))...)
+				depHeaderJars := dep.SdkHeaderJars(ctx, j.SdkVersion(ctx))
+				deps.classpath = append(deps.classpath, depHeaderJars...)
+				deps.dexClasspath = append(deps.dexClasspath, depHeaderJars...)
 			case staticLibTag:
 				ctx.ModuleErrorf("dependency on java_sdk_library %q can only be in libs", otherName)
 			}
@@ -1846,6 +1911,7 @@
 				deps.bootClasspath = append(deps.bootClasspath, dep.HeaderJars...)
 			case libTag, instrumentationForTag:
 				deps.classpath = append(deps.classpath, dep.HeaderJars...)
+				deps.dexClasspath = append(deps.dexClasspath, dep.HeaderJars...)
 				deps.aidlIncludeDirs = append(deps.aidlIncludeDirs, dep.AidlIncludeDirs...)
 				addPlugins(&deps, dep.ExportedPlugins, dep.ExportedPluginClasses...)
 				deps.disableTurbine = deps.disableTurbine || dep.ExportedPluginDisableTurbine
@@ -1913,6 +1979,7 @@
 			case libTag:
 				checkProducesJars(ctx, dep)
 				deps.classpath = append(deps.classpath, dep.Srcs()...)
+				deps.dexClasspath = append(deps.classpath, dep.Srcs()...)
 			case staticLibTag:
 				checkProducesJars(ctx, dep)
 				deps.classpath = append(deps.classpath, dep.Srcs()...)
diff --git a/java/bootclasspath_fragment.go b/java/bootclasspath_fragment.go
index 5fe409e..c3a5d5f 100644
--- a/java/bootclasspath_fragment.go
+++ b/java/bootclasspath_fragment.go
@@ -16,6 +16,7 @@
 
 import (
 	"fmt"
+	"io"
 	"path/filepath"
 	"reflect"
 	"strings"
@@ -139,7 +140,7 @@
 	BootclasspathFragmentsDepsProperties
 }
 
-type SourceOnlyBootclasspathProperties struct {
+type HiddenApiPackageProperties struct {
 	Hidden_api struct {
 		// Contains prefixes of a package hierarchy that is provided solely by this
 		// bootclasspath_fragment.
@@ -148,6 +149,14 @@
 		// hidden API flags. See split_packages property for more details.
 		Package_prefixes []string
 
+		// A list of individual packages that are provided solely by this
+		// bootclasspath_fragment but which cannot be listed in package_prefixes
+		// because there are sub-packages which are provided by other modules.
+		//
+		// This should only be used for legacy packages. New packages should be
+		// covered by a package prefix.
+		Single_packages []string
+
 		// The list of split packages provided by this bootclasspath_fragment.
 		//
 		// A split package is one that contains classes which are provided by multiple
@@ -207,6 +216,11 @@
 	}
 }
 
+type SourceOnlyBootclasspathProperties struct {
+	HiddenApiPackageProperties
+	Coverage HiddenApiPackageProperties
+}
+
 type BootclasspathFragmentModule struct {
 	android.ModuleBase
 	android.ApexModuleBase
@@ -270,6 +284,12 @@
 				ctx.PropertyErrorf("coverage", "error trying to append coverage specific properties: %s", err)
 				return
 			}
+
+			err = proptools.AppendProperties(&m.sourceOnlyProperties.HiddenApiPackageProperties, &m.sourceOnlyProperties.Coverage, nil)
+			if err != nil {
+				ctx.PropertyErrorf("coverage", "error trying to append hidden api coverage specific properties: %s", err)
+				return
+			}
 		}
 
 		// Initialize the contents property from the image_name.
@@ -588,6 +608,19 @@
 			// Provide the apex content info.
 			b.provideApexContentInfo(ctx, imageConfig, hiddenAPIOutput, bootImageFilesByArch)
 		}
+	} else {
+		// Versioned fragments are not needed by make.
+		b.HideFromMake()
+	}
+
+	// In order for information about bootclasspath_fragment modules to be added to module-info.json
+	// it is necessary to output an entry to Make. As bootclasspath_fragment modules are part of an
+	// APEX there can be multiple variants, including the default/platform variant and only one can
+	// be output to Make but it does not really matter which variant is output. The default/platform
+	// variant is the first (ctx.PrimaryModule()) and is usually hidden from make so this just picks
+	// the last variant (ctx.FinalModule()).
+	if ctx.Module() != ctx.FinalModule() {
+		b.HideFromMake()
 	}
 }
 
@@ -717,7 +750,8 @@
 	// TODO(b/192868581): Remove once the source and prebuilts provide a signature patterns file of
 	//  their own.
 	if output.SignaturePatternsPath == nil {
-		output.SignaturePatternsPath = buildRuleSignaturePatternsFile(ctx, output.AllFlagsPath, []string{"*"}, nil)
+		output.SignaturePatternsPath = buildRuleSignaturePatternsFile(
+			ctx, output.AllFlagsPath, []string{"*"}, nil, nil)
 	}
 
 	// Initialize a HiddenAPIInfo structure.
@@ -792,11 +826,13 @@
 	// signature patterns.
 	splitPackages := b.sourceOnlyProperties.Hidden_api.Split_packages
 	packagePrefixes := b.sourceOnlyProperties.Hidden_api.Package_prefixes
-	if splitPackages != nil || packagePrefixes != nil {
+	singlePackages := b.sourceOnlyProperties.Hidden_api.Single_packages
+	if splitPackages != nil || packagePrefixes != nil || singlePackages != nil {
 		if splitPackages == nil {
 			splitPackages = []string{"*"}
 		}
-		output.SignaturePatternsPath = buildRuleSignaturePatternsFile(ctx, output.AllFlagsPath, splitPackages, packagePrefixes)
+		output.SignaturePatternsPath = buildRuleSignaturePatternsFile(
+			ctx, output.AllFlagsPath, splitPackages, packagePrefixes, singlePackages)
 	}
 
 	return output
@@ -849,7 +885,22 @@
 }
 
 func (b *BootclasspathFragmentModule) AndroidMkEntries() []android.AndroidMkEntries {
-	var entriesList []android.AndroidMkEntries
+	// Use the generated classpath proto as the output.
+	outputFile := b.outputFilepath
+	// Create a fake entry that will cause this to be added to the module-info.json file.
+	entriesList := []android.AndroidMkEntries{{
+		Class:      "FAKE",
+		OutputFile: android.OptionalPathForPath(outputFile),
+		Include:    "$(BUILD_PHONY_PACKAGE)",
+		ExtraFooters: []android.AndroidMkExtraFootersFunc{
+			func(w io.Writer, name, prefix, moduleDir string) {
+				// Allow the bootclasspath_fragment to be built by simply passing its name on the command
+				// line.
+				fmt.Fprintln(w, ".PHONY:", b.Name())
+				fmt.Fprintln(w, b.Name()+":", outputFile.String())
+			},
+		},
+	}}
 	for _, install := range b.bootImageDeviceInstalls {
 		entriesList = append(entriesList, install.ToMakeEntries())
 	}
diff --git a/java/builder.go b/java/builder.go
index e64a61f..c0fadd4 100644
--- a/java/builder.go
+++ b/java/builder.go
@@ -131,31 +131,28 @@
 
 	turbine, turbineRE = pctx.RemoteStaticRules("turbine",
 		blueprint.RuleParams{
-			Command: `rm -rf "$outDir" && mkdir -p "$outDir" && ` +
-				`$reTemplate${config.JavaCmd} ${config.JavaVmFlags} -jar ${config.TurbineJar} --output $out.tmp ` +
-				`--temp_dir "$outDir" --sources @$out.rsp  --source_jars $srcJars ` +
+			Command: `$reTemplate${config.JavaCmd} ${config.JavaVmFlags} -jar ${config.TurbineJar} $outputFlags ` +
+				`--sources @$out.rsp  --source_jars $srcJars ` +
 				`--javacopts ${config.CommonJdkFlags} ` +
-				`$javacFlags -source $javaVersion -target $javaVersion -- $bootClasspath $classpath && ` +
-				`${config.Ziptime} $out.tmp && ` +
-				`(if cmp -s $out.tmp $out ; then rm $out.tmp ; else mv $out.tmp $out ; fi )`,
+				`$javacFlags -source $javaVersion -target $javaVersion -- $turbineFlags && ` +
+				`(for o in $outputs; do if cmp -s $${o}.tmp $${o} ; then rm $${o}.tmp ; else mv $${o}.tmp $${o} ; fi; done )`,
 			CommandDeps: []string{
 				"${config.TurbineJar}",
 				"${config.JavaCmd}",
-				"${config.Ziptime}",
 			},
 			Rspfile:        "$out.rsp",
 			RspfileContent: "$in",
 			Restat:         true,
 		},
 		&remoteexec.REParams{Labels: map[string]string{"type": "tool", "name": "turbine"},
-			ExecStrategy:      "${config.RETurbineExecStrategy}",
-			Inputs:            []string{"${config.TurbineJar}", "${out}.rsp", "$implicits"},
-			RSPFiles:          []string{"${out}.rsp"},
-			OutputFiles:       []string{"$out.tmp"},
-			OutputDirectories: []string{"$outDir"},
-			ToolchainInputs:   []string{"${config.JavaCmd}"},
-			Platform:          map[string]string{remoteexec.PoolKey: "${config.REJavaPool}"},
-		}, []string{"javacFlags", "bootClasspath", "classpath", "srcJars", "outDir", "javaVersion"}, []string{"implicits"})
+			ExecStrategy:    "${config.RETurbineExecStrategy}",
+			Inputs:          []string{"${config.TurbineJar}", "${out}.rsp", "$implicits"},
+			RSPFiles:        []string{"${out}.rsp"},
+			OutputFiles:     []string{"$rbeOutputs"},
+			ToolchainInputs: []string{"${config.JavaCmd}"},
+			Platform:        map[string]string{remoteexec.PoolKey: "${config.REJavaPool}"},
+		},
+		[]string{"javacFlags", "turbineFlags", "outputFlags", "javaVersion", "outputs", "rbeOutputs", "srcJars"}, []string{"implicits"})
 
 	jar, jarRE = pctx.RemoteStaticRules("jar",
 		blueprint.RuleParams{
@@ -247,16 +244,33 @@
 }
 
 type javaBuilderFlags struct {
-	javacFlags     string
-	bootClasspath  classpath
-	classpath      classpath
+	javacFlags string
+
+	// bootClasspath is the list of jars that form the boot classpath (generally the java.* and
+	// android.* classes) for tools that still use it.  javac targeting 1.9 or higher uses
+	// systemModules and java9Classpath instead.
+	bootClasspath classpath
+
+	// classpath is the list of jars that form the classpath for javac and kotlinc rules.  It
+	// contains header jars for all static and non-static dependencies.
+	classpath classpath
+
+	// dexClasspath is the list of jars that form the classpath for d8 and r8 rules.  It contains
+	// header jars for all non-static dependencies.  Static dependencies have already been
+	// combined into the program jar.
+	dexClasspath classpath
+
+	// java9Classpath is the list of jars that will be added to the classpath when targeting
+	// 1.9 or higher.  It generally contains the android.* classes, while the java.* classes
+	// are provided by systemModules.
 	java9Classpath classpath
-	processorPath  classpath
-	processors     []string
-	systemModules  *systemModules
-	aidlFlags      string
-	aidlDeps       android.Paths
-	javaVersion    javaVersion
+
+	processorPath classpath
+	processors    []string
+	systemModules *systemModules
+	aidlFlags     string
+	aidlDeps      android.Paths
+	javaVersion   javaVersion
 
 	errorProneExtraJavacFlags string
 	errorProneProcessorPath   classpath
@@ -341,11 +355,8 @@
 		})
 }
 
-func TransformJavaToHeaderClasses(ctx android.ModuleContext, outputFile android.WritablePath,
-	srcFiles, srcJars android.Paths, flags javaBuilderFlags) {
-
+func turbineFlags(ctx android.ModuleContext, flags javaBuilderFlags) (string, android.Paths) {
 	var deps android.Paths
-	deps = append(deps, srcJars...)
 
 	classpath := flags.classpath
 
@@ -367,20 +378,31 @@
 	}
 
 	deps = append(deps, classpath...)
-	deps = append(deps, flags.processorPath...)
+	turbineFlags := bootClasspath + " " + classpath.FormTurbineClassPath("--classpath ")
+
+	return turbineFlags, deps
+}
+
+func TransformJavaToHeaderClasses(ctx android.ModuleContext, outputFile android.WritablePath,
+	srcFiles, srcJars android.Paths, flags javaBuilderFlags) {
+
+	turbineFlags, deps := turbineFlags(ctx, flags)
+
+	deps = append(deps, srcJars...)
 
 	rule := turbine
 	args := map[string]string{
-		"javacFlags":    flags.javacFlags,
-		"bootClasspath": bootClasspath,
-		"srcJars":       strings.Join(srcJars.Strings(), " "),
-		"classpath":     classpath.FormTurbineClassPath("--classpath "),
-		"outDir":        android.PathForModuleOut(ctx, "turbine", "classes").String(),
-		"javaVersion":   flags.javaVersion.String(),
+		"javacFlags":   flags.javacFlags,
+		"srcJars":      strings.Join(srcJars.Strings(), " "),
+		"javaVersion":  flags.javaVersion.String(),
+		"turbineFlags": turbineFlags,
+		"outputFlags":  "--output " + outputFile.String() + ".tmp",
+		"outputs":      outputFile.String(),
 	}
 	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_TURBINE") {
 		rule = turbineRE
 		args["implicits"] = strings.Join(deps.Strings(), ",")
+		args["rbeOutputs"] = outputFile.String() + ".tmp"
 	}
 	ctx.Build(pctx, android.BuildParams{
 		Rule:        rule,
@@ -392,6 +414,47 @@
 	})
 }
 
+// TurbineApt produces a rule to run annotation processors using turbine.
+func TurbineApt(ctx android.ModuleContext, outputSrcJar, outputResJar android.WritablePath,
+	srcFiles, srcJars android.Paths, flags javaBuilderFlags) {
+
+	turbineFlags, deps := turbineFlags(ctx, flags)
+
+	deps = append(deps, srcJars...)
+
+	deps = append(deps, flags.processorPath...)
+	turbineFlags += " " + flags.processorPath.FormTurbineClassPath("--processorpath ")
+	turbineFlags += " --processors " + strings.Join(flags.processors, " ")
+
+	outputs := android.WritablePaths{outputSrcJar, outputResJar}
+	outputFlags := "--gensrc_output " + outputSrcJar.String() + ".tmp " +
+		"--resource_output " + outputResJar.String() + ".tmp"
+
+	rule := turbine
+	args := map[string]string{
+		"javacFlags":   flags.javacFlags,
+		"srcJars":      strings.Join(srcJars.Strings(), " "),
+		"javaVersion":  flags.javaVersion.String(),
+		"turbineFlags": turbineFlags,
+		"outputFlags":  outputFlags,
+		"outputs":      strings.Join(outputs.Strings(), " "),
+	}
+	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_TURBINE") {
+		rule = turbineRE
+		args["implicits"] = strings.Join(deps.Strings(), ",")
+		args["rbeOutputs"] = outputSrcJar.String() + ".tmp," + outputResJar.String() + ".tmp"
+	}
+	ctx.Build(pctx, android.BuildParams{
+		Rule:            rule,
+		Description:     "turbine apt",
+		Output:          outputs[0],
+		ImplicitOutputs: outputs[1:],
+		Inputs:          srcFiles,
+		Implicits:       deps,
+		Args:            args,
+	})
+}
+
 // transformJavaToClasses takes source files and converts them to a jar containing .class files.
 // srcFiles is a list of paths to sources, srcJars is a list of paths to jar files that contain
 // sources.  flags contains various command line flags to be passed to the compiler.
@@ -653,6 +716,6 @@
 	} else if forceEmpty {
 		return `--bootclasspath ""`, nil
 	} else {
-		return "", nil
+		return "--system ${config.JavaHome}", nil
 	}
 }
diff --git a/java/config/config.go b/java/config/config.go
index ea2f934..46c91a2 100644
--- a/java/config/config.go
+++ b/java/config/config.go
@@ -26,7 +26,8 @@
 )
 
 var (
-	pctx = android.NewPackageContext("android/soong/java/config")
+	pctx         = android.NewPackageContext("android/soong/java/config")
+	exportedVars = android.NewExportedVariables(pctx)
 
 	LegacyCorePlatformBootclasspathLibraries = []string{"legacy.core.platform.api.stubs", "core-lambda-stubs"}
 	LegacyCorePlatformSystemModules          = "legacy-core-platform-api-stubs-system-modules"
@@ -50,27 +51,53 @@
 		"core-icu4j",
 		"core-oj",
 		"core-libart",
-		// TODO: Could this be all updatable bootclasspath jars?
-		"updatable-media",
-		"framework-mediaprovider",
-		"framework-sdkextensions",
-		"android.net.ipsec.ike",
 	}
 )
 
-const (
-	JavaVmFlags  = `-XX:OnError="cat hs_err_pid%p.log" -XX:CICompilerCount=6 -XX:+UseDynamicNumberOfGCThreads`
-	JavacVmFlags = `-J-XX:OnError="cat hs_err_pid%p.log" -J-XX:CICompilerCount=6 -J-XX:+UseDynamicNumberOfGCThreads -J-XX:+TieredCompilation -J-XX:TieredStopAtLevel=1`
+var (
+	JavacVmFlags    = strings.Join(javacVmFlagsList, " ")
+	javaVmFlagsList = []string{
+		`-XX:OnError="cat hs_err_pid%p.log"`,
+		"-XX:CICompilerCount=6",
+		"-XX:+UseDynamicNumberOfGCThreads",
+	}
+	javacVmFlagsList = []string{
+		`-J-XX:OnError="cat hs_err_pid%p.log"`,
+		"-J-XX:CICompilerCount=6",
+		"-J-XX:+UseDynamicNumberOfGCThreads",
+		"-J-XX:+TieredCompilation",
+		"-J-XX:TieredStopAtLevel=1",
+	}
 )
 
 func init() {
 	pctx.Import("github.com/google/blueprint/bootstrap")
 
-	pctx.StaticVariable("JavacHeapSize", "2048M")
-	pctx.StaticVariable("JavacHeapFlags", "-J-Xmx${JavacHeapSize}")
-	pctx.StaticVariable("DexFlags", "-JXX:OnError='cat hs_err_pid%p.log' -JXX:CICompilerCount=6 -JXX:+UseDynamicNumberOfGCThreads")
+	exportedVars.ExportStringStaticVariable("JavacHeapSize", "2048M")
+	exportedVars.ExportStringStaticVariable("JavacHeapFlags", "-J-Xmx${JavacHeapSize}")
 
-	pctx.StaticVariable("CommonJdkFlags", strings.Join([]string{
+	// ErrorProne can use significantly more memory than javac alone, give it a higher heap
+	// size (b/221480398).
+	exportedVars.ExportStringStaticVariable("ErrorProneHeapSize", "4096M")
+	exportedVars.ExportStringStaticVariable("ErrorProneHeapFlags", "-J-Xmx${ErrorProneHeapSize}")
+
+	// D8 invocations are shorter lived, so we restrict their JIT tiering relative to R8.
+	// Note that the `-JXX` prefix syntax is specific to the R8/D8 invocation wrappers.
+	exportedVars.ExportStringListStaticVariable("D8Flags", []string{
+		`-JXX:OnError="cat hs_err_pid%p.log"`,
+		"-JXX:CICompilerCount=6",
+		"-JXX:+UseDynamicNumberOfGCThreads",
+		"-JXX:+TieredCompilation",
+		"-JXX:TieredStopAtLevel=1",
+	})
+
+	exportedVars.ExportStringListStaticVariable("R8Flags", []string{
+		`-JXX:OnError="cat hs_err_pid%p.log"`,
+		"-JXX:CICompilerCount=6",
+		"-JXX:+UseDynamicNumberOfGCThreads",
+	})
+
+	exportedVars.ExportStringListStaticVariable("CommonJdkFlags", []string{
 		`-Xmaxerrs 9999999`,
 		`-encoding UTF-8`,
 		`-sourcepath ""`,
@@ -84,10 +111,10 @@
 
 		// b/65004097: prevent using java.lang.invoke.StringConcatFactory when using -target 1.9
 		`-XDstringConcat=inline`,
-	}, " "))
+	})
 
-	pctx.StaticVariable("JavaVmFlags", JavaVmFlags)
-	pctx.StaticVariable("JavacVmFlags", JavacVmFlags)
+	exportedVars.ExportStringListStaticVariable("JavaVmFlags", javaVmFlagsList)
+	exportedVars.ExportStringListStaticVariable("JavacVmFlags", javacVmFlagsList)
 
 	pctx.VariableConfigMethod("hostPrebuiltTag", android.Config.PrebuiltOS)
 
@@ -99,7 +126,12 @@
 		if override := ctx.Config().Getenv("OVERRIDE_JLINK_VERSION_NUMBER"); override != "" {
 			return override
 		}
-		return "11"
+		switch ctx.Config().Getenv("EXPERIMENTAL_USE_OPENJDK17_TOOLCHAIN") {
+		case "true":
+			return "17"
+		default:
+			return "11"
+		}
 	})
 
 	pctx.SourcePathVariable("JavaToolchain", "${JavaHome}/bin")
@@ -178,6 +210,10 @@
 	hostJNIToolVariableWithSdkToolsPrebuilt("SignapkJniLibrary", "libconscrypt_openjdk_jni")
 }
 
+func BazelJavaToolchainVars(config android.Config) string {
+	return android.BazelToolchainVars(config, exportedVars)
+}
+
 func hostBinToolVariableWithSdkToolsPrebuilt(name, tool string) {
 	pctx.VariableFunc(name, func(ctx android.PackageVarContext) string {
 		if ctx.Config().AlwaysUsePrebuiltSdks() {
diff --git a/java/config/error_prone.go b/java/config/error_prone.go
index 48681b5..5f853c8 100644
--- a/java/config/error_prone.go
+++ b/java/config/error_prone.go
@@ -16,8 +16,6 @@
 
 import (
 	"strings"
-
-	"android/soong/android"
 )
 
 var (
@@ -31,23 +29,23 @@
 )
 
 // Wrapper that grabs value of val late so it can be initialized by a later module's init function
-func errorProneVar(name string, val *[]string, sep string) {
-	pctx.VariableFunc(name, func(android.PackageVarContext) string {
+func errorProneVar(val *[]string, sep string) func() string {
+	return func() string {
 		return strings.Join(*val, sep)
-	})
+	}
 }
 
 func init() {
-	errorProneVar("ErrorProneClasspath", &ErrorProneClasspath, ":")
-	errorProneVar("ErrorProneChecksError", &ErrorProneChecksError, " ")
-	errorProneVar("ErrorProneChecksWarning", &ErrorProneChecksWarning, " ")
-	errorProneVar("ErrorProneChecksDefaultDisabled", &ErrorProneChecksDefaultDisabled, " ")
-	errorProneVar("ErrorProneChecksOff", &ErrorProneChecksOff, " ")
-	errorProneVar("ErrorProneFlags", &ErrorProneFlags, " ")
-	pctx.StaticVariable("ErrorProneChecks", strings.Join([]string{
+	exportedVars.ExportVariableFuncVariable("ErrorProneClasspath", errorProneVar(&ErrorProneClasspath, ":"))
+	exportedVars.ExportVariableFuncVariable("ErrorProneChecksError", errorProneVar(&ErrorProneChecksError, " "))
+	exportedVars.ExportVariableFuncVariable("ErrorProneChecksWarning", errorProneVar(&ErrorProneChecksWarning, " "))
+	exportedVars.ExportVariableFuncVariable("ErrorProneChecksDefaultDisabled", errorProneVar(&ErrorProneChecksDefaultDisabled, " "))
+	exportedVars.ExportVariableFuncVariable("ErrorProneChecksOff", errorProneVar(&ErrorProneChecksOff, " "))
+	exportedVars.ExportVariableFuncVariable("ErrorProneFlags", errorProneVar(&ErrorProneFlags, " "))
+	exportedVars.ExportStringListStaticVariable("ErrorProneChecks", []string{
 		"${ErrorProneChecksOff}",
 		"${ErrorProneChecksError}",
 		"${ErrorProneChecksWarning}",
 		"${ErrorProneChecksDefaultDisabled}",
-	}, " "))
+	})
 }
diff --git a/java/config/kotlin.go b/java/config/kotlin.go
index 6cb61f3..fc63f4d 100644
--- a/java/config/kotlin.go
+++ b/java/config/kotlin.go
@@ -34,6 +34,7 @@
 	pctx.SourcePathVariable("KotlinKaptJar", "external/kotlinc/lib/kotlin-annotation-processing.jar")
 	pctx.SourcePathVariable("KotlinAnnotationJar", "external/kotlinc/lib/annotations-13.0.jar")
 	pctx.SourcePathVariable("KotlinStdlibJar", KotlinStdlibJar)
+	pctx.SourcePathVariable("KotlinAbiGenPluginJar", "external/kotlinc/lib/jvm-abi-gen.jar")
 
 	// These flags silence "Illegal reflective access" warnings when running kapt in OpenJDK9+
 	pctx.StaticVariable("KaptSuppressJDK9Warnings", strings.Join([]string{
@@ -47,4 +48,9 @@
 	pctx.StaticVariable("KotlincSuppressJDK9Warnings", strings.Join([]string{
 		"-J--add-opens=java.base/java.util=ALL-UNNAMED", // https://youtrack.jetbrains.com/issue/KT-43704
 	}, " "))
+
+	pctx.StaticVariable("KotlincGlobalFlags", strings.Join([]string{
+		// b/222162908: prevent kotlinc from reading /tmp/build.txt
+		"-Didea.plugins.compatible.build=999.SNAPSHOT",
+	}, " "))
 }
diff --git a/java/config/makevars.go b/java/config/makevars.go
index df447a1..bc6848f 100644
--- a/java/config/makevars.go
+++ b/java/config/makevars.go
@@ -78,7 +78,8 @@
 	ctx.Strict("CLASS2NONSDKLIST", "${Class2NonSdkList}")
 	ctx.Strict("HIDDENAPI", "${HiddenAPI}")
 
-	ctx.Strict("DEX_FLAGS", "${DexFlags}")
+	ctx.Strict("D8_FLAGS", "${D8Flags}")
+	ctx.Strict("R8_FLAGS", "${R8Flags}")
 
 	ctx.Strict("AIDL", "${AidlCmd}")
 	ctx.Strict("AAPT2", "${Aapt2Cmd}")
diff --git a/java/dex.go b/java/dex.go
index 474694a..13d6e4a 100644
--- a/java/dex.go
+++ b/java/dex.go
@@ -95,7 +95,7 @@
 		Command: `rm -rf "$outDir" && mkdir -p "$outDir" && ` +
 			`mkdir -p $$(dirname $tmpJar) && ` +
 			`${config.Zip2ZipCmd} -i $in -o $tmpJar -x '**/*.dex' && ` +
-			`$d8Template${config.D8Cmd} ${config.DexFlags} --output $outDir $d8Flags $tmpJar && ` +
+			`$d8Template${config.D8Cmd} ${config.D8Flags} --output $outDir $d8Flags $tmpJar && ` +
 			`$zipTemplate${config.SoongZipCmd} $zipFlags -o $outDir/classes.dex.jar -C $outDir -f "$outDir/classes*.dex" && ` +
 			`${config.MergeZipsCmd} -D -stripFile "**/*.class" $mergeZipsFlags $out $outDir/classes.dex.jar $in`,
 		CommandDeps: []string{
@@ -128,16 +128,19 @@
 			`mkdir -p $$(dirname ${outUsage}) && ` +
 			`mkdir -p $$(dirname $tmpJar) && ` +
 			`${config.Zip2ZipCmd} -i $in -o $tmpJar -x '**/*.dex' && ` +
-			`$r8Template${config.R8Cmd} ${config.DexFlags} -injars $tmpJar --output $outDir ` +
+			`$r8Template${config.R8Cmd} ${config.R8Flags} -injars $tmpJar --output $outDir ` +
 			`--no-data-resources ` +
 			`-printmapping ${outDict} ` +
 			`-printusage ${outUsage} ` +
+			`--deps-file ${out}.d ` +
 			`$r8Flags && ` +
 			`touch "${outDict}" "${outUsage}" && ` +
 			`${config.SoongZipCmd} -o ${outUsageZip} -C ${outUsageDir} -f ${outUsage} && ` +
 			`rm -rf ${outUsageDir} && ` +
 			`$zipTemplate${config.SoongZipCmd} $zipFlags -o $outDir/classes.dex.jar -C $outDir -f "$outDir/classes*.dex" && ` +
 			`${config.MergeZipsCmd} -D -stripFile "**/*.class" $mergeZipsFlags $out $outDir/classes.dex.jar $in`,
+		Depfile: "${out}.d",
+		Deps:    blueprint.DepsGCC,
 		CommandDeps: []string{
 			"${config.R8Cmd}",
 			"${config.Zip2ZipCmd}",
@@ -205,10 +208,10 @@
 
 func d8Flags(flags javaBuilderFlags) (d8Flags []string, d8Deps android.Paths) {
 	d8Flags = append(d8Flags, flags.bootClasspath.FormRepeatedClassPath("--lib ")...)
-	d8Flags = append(d8Flags, flags.classpath.FormRepeatedClassPath("--lib ")...)
+	d8Flags = append(d8Flags, flags.dexClasspath.FormRepeatedClassPath("--lib ")...)
 
 	d8Deps = append(d8Deps, flags.bootClasspath...)
-	d8Deps = append(d8Deps, flags.classpath...)
+	d8Deps = append(d8Deps, flags.dexClasspath...)
 
 	return d8Flags, d8Deps
 }
@@ -231,11 +234,11 @@
 
 	r8Flags = append(r8Flags, proguardRaiseDeps.FormJavaClassPath("-libraryjars"))
 	r8Flags = append(r8Flags, flags.bootClasspath.FormJavaClassPath("-libraryjars"))
-	r8Flags = append(r8Flags, flags.classpath.FormJavaClassPath("-libraryjars"))
+	r8Flags = append(r8Flags, flags.dexClasspath.FormJavaClassPath("-libraryjars"))
 
 	r8Deps = append(r8Deps, proguardRaiseDeps...)
 	r8Deps = append(r8Deps, flags.bootClasspath...)
-	r8Deps = append(r8Deps, flags.classpath...)
+	r8Deps = append(r8Deps, flags.dexClasspath...)
 
 	flagFiles := android.Paths{
 		android.PathForSource(ctx, "build/make/core/proguard.flags"),
diff --git a/java/dex_test.go b/java/dex_test.go
new file mode 100644
index 0000000..fbdccb6
--- /dev/null
+++ b/java/dex_test.go
@@ -0,0 +1,103 @@
+// Copyright 2022 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package java
+
+import (
+	"testing"
+
+	"android/soong/android"
+)
+
+func TestR8(t *testing.T) {
+	result := PrepareForTestWithJavaDefaultModulesWithoutFakeDex2oatd.RunTestWithBp(t, `
+		android_app {
+			name: "app",
+			srcs: ["foo.java"],
+			libs: ["lib"],
+			static_libs: ["static_lib"],
+			platform_apis: true,
+		}
+
+		java_library {
+			name: "lib",
+			srcs: ["foo.java"],
+		}
+
+		java_library {
+			name: "static_lib",
+			srcs: ["foo.java"],
+		}
+	`)
+
+	app := result.ModuleForTests("app", "android_common")
+	lib := result.ModuleForTests("lib", "android_common")
+	staticLib := result.ModuleForTests("static_lib", "android_common")
+
+	appJavac := app.Rule("javac")
+	appR8 := app.Rule("r8")
+	libHeader := lib.Output("turbine-combined/lib.jar").Output
+	staticLibHeader := staticLib.Output("turbine-combined/static_lib.jar").Output
+
+	android.AssertStringDoesContain(t, "expected lib header jar in app javac classpath",
+		appJavac.Args["classpath"], libHeader.String())
+	android.AssertStringDoesContain(t, "expected static_lib header jar in app javac classpath",
+		appJavac.Args["classpath"], staticLibHeader.String())
+
+	android.AssertStringDoesContain(t, "expected lib header jar in app r8 classpath",
+		appR8.Args["r8Flags"], libHeader.String())
+	android.AssertStringDoesNotContain(t, "expected no  static_lib header jar in app javac classpath",
+		appR8.Args["r8Flags"], staticLibHeader.String())
+}
+
+func TestD8(t *testing.T) {
+	result := PrepareForTestWithJavaDefaultModulesWithoutFakeDex2oatd.RunTestWithBp(t, `
+		java_library {
+			name: "foo",
+			srcs: ["foo.java"],
+			libs: ["lib"],
+			static_libs: ["static_lib"],
+			installable: true,
+		}
+
+		java_library {
+			name: "lib",
+			srcs: ["foo.java"],
+		}
+
+		java_library {
+			name: "static_lib",
+			srcs: ["foo.java"],
+		}
+	`)
+
+	foo := result.ModuleForTests("foo", "android_common")
+	lib := result.ModuleForTests("lib", "android_common")
+	staticLib := result.ModuleForTests("static_lib", "android_common")
+
+	fooJavac := foo.Rule("javac")
+	fooD8 := foo.Rule("d8")
+	libHeader := lib.Output("turbine-combined/lib.jar").Output
+	staticLibHeader := staticLib.Output("turbine-combined/static_lib.jar").Output
+
+	android.AssertStringDoesContain(t, "expected lib header jar in foo javac classpath",
+		fooJavac.Args["classpath"], libHeader.String())
+	android.AssertStringDoesContain(t, "expected static_lib header jar in foo javac classpath",
+		fooJavac.Args["classpath"], staticLibHeader.String())
+
+	android.AssertStringDoesContain(t, "expected lib header jar in foo d8 classpath",
+		fooD8.Args["d8Flags"], libHeader.String())
+	android.AssertStringDoesNotContain(t, "expected no  static_lib header jar in foo javac classpath",
+		fooD8.Args["d8Flags"], staticLibHeader.String())
+}
diff --git a/java/dexpreopt_bootjars.go b/java/dexpreopt_bootjars.go
index cad9c33..3d91aec 100644
--- a/java/dexpreopt_bootjars.go
+++ b/java/dexpreopt_bootjars.go
@@ -276,11 +276,17 @@
 	// Rules which should be used in make to install the outputs.
 	profileInstalls android.RuleBuilderInstalls
 
+	// Path to the license metadata file for the module that built the profile.
+	profileLicenseMetadataFile android.OptionalPath
+
 	// Path to the image profile file on host (or empty, if profile is not generated).
 	profilePathOnHost android.Path
 
 	// Target-dependent fields.
 	variants []*bootImageVariant
+
+	// Path of the preloaded classes file.
+	preloadedClassesFile string
 }
 
 // Target-dependent description of a boot image.
@@ -320,6 +326,9 @@
 
 	// Rules which should be used in make to install the outputs on device.
 	deviceInstalls android.RuleBuilderInstalls
+
+	// Path to the license metadata file for the module that built the image.
+	licenseMetadataFile android.OptionalPath
 }
 
 // Get target-specific boot image variant for the given boot image config and target.
@@ -680,6 +689,13 @@
 		cmd.FlagWithArg("--base=", ctx.Config().LibartImgDeviceBaseAddress())
 	}
 
+	// We always expect a preloaded classes file to be available. However, if we cannot find it, it's
+	// OK to not pass the flag to dex2oat.
+	preloadedClassesPath := android.ExistentPathForSource(ctx, image.preloadedClassesFile)
+	if preloadedClassesPath.Valid() {
+		cmd.FlagWithInput("--preloaded-classes=", preloadedClassesPath.Path())
+	}
+
 	cmd.
 		FlagForEachInput("--dex-file=", image.dexPaths.Paths()).
 		FlagForEachArg("--dex-location=", image.dexLocations).
@@ -759,6 +775,7 @@
 	image.vdexInstalls = vdexInstalls
 	image.unstrippedInstalls = unstrippedInstalls
 	image.deviceInstalls = deviceInstalls
+	image.licenseMetadataFile = android.OptionalPathForPath(ctx.LicenseMetadataFile())
 }
 
 const failureMessage = `ERROR: Dex2oat failed to compile a boot image.
@@ -807,6 +824,7 @@
 	if image == defaultBootImageConfig(ctx) {
 		rule.Install(profile, "/system/etc/boot-image.prof")
 		image.profileInstalls = append(image.profileInstalls, rule.Installs()...)
+		image.profileLicenseMetadataFile = android.OptionalPathForPath(ctx.LicenseMetadataFile())
 	}
 
 	rule.Build("bootJarsProfile", "profile boot jars")
@@ -844,6 +862,7 @@
 	rule.Install(profile, "/system/etc/boot-image.bprof")
 	rule.Build("bootFrameworkProfile", "profile boot framework jars")
 	image.profileInstalls = append(image.profileInstalls, rule.Installs()...)
+	image.profileLicenseMetadataFile = android.OptionalPathForPath(ctx.LicenseMetadataFile())
 
 	return profile
 }
@@ -909,6 +928,9 @@
 	image := d.defaultBootImage
 	if image != nil {
 		ctx.Strict("DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED", image.profileInstalls.String())
+		if image.profileLicenseMetadataFile.Valid() {
+			ctx.Strict("DEXPREOPT_IMAGE_PROFILE_LICENSE_METADATA", image.profileLicenseMetadataFile.String())
+		}
 
 		global := dexpreopt.GetGlobalConfig(ctx)
 		dexPaths, dexLocations := bcpForDexpreopt(ctx, global.PreoptWithUpdatableBcp)
@@ -934,6 +956,9 @@
 				ctx.Strict("DEXPREOPT_IMAGE_DEPS_"+sfx, strings.Join(variant.imagesDeps.Strings(), " "))
 				ctx.Strict("DEXPREOPT_IMAGE_BUILT_INSTALLED_"+sfx, variant.installs.String())
 				ctx.Strict("DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_"+sfx, variant.unstrippedInstalls.String())
+				if variant.licenseMetadataFile.Valid() {
+					ctx.Strict("DEXPREOPT_IMAGE_LICENSE_METADATA_"+sfx, variant.licenseMetadataFile.String())
+				}
 			}
 			imageLocationsOnHost, imageLocationsOnDevice := current.getAnyAndroidVariant().imageLocations()
 			ctx.Strict("DEXPREOPT_IMAGE_LOCATIONS_ON_HOST"+current.name, strings.Join(imageLocationsOnHost, ":"))
diff --git a/java/dexpreopt_config.go b/java/dexpreopt_config.go
index 21e1d12..4d0bd09 100644
--- a/java/dexpreopt_config.go
+++ b/java/dexpreopt_config.go
@@ -62,18 +62,20 @@
 			installDirOnDevice:       "system/framework",
 			profileInstallPathInApex: "etc/boot-image.prof",
 			modules:                  artModules,
+			preloadedClassesFile:     "art/build/boot/preloaded-classes",
 		}
 
 		// Framework config for the boot image extension.
 		// It includes framework libraries and depends on the ART config.
 		frameworkSubdir := "system/framework"
 		frameworkCfg := bootImageConfig{
-			extends:            &artCfg,
-			name:               frameworkBootImageName,
-			stem:               "boot",
-			installDirOnHost:   frameworkSubdir,
-			installDirOnDevice: frameworkSubdir,
-			modules:            frameworkModules,
+			extends:              &artCfg,
+			name:                 frameworkBootImageName,
+			stem:                 "boot",
+			installDirOnHost:     frameworkSubdir,
+			installDirOnDevice:   frameworkSubdir,
+			modules:              frameworkModules,
+			preloadedClassesFile: "frameworks/base/config/preloaded-classes",
 		}
 
 		return map[string]*bootImageConfig{
diff --git a/java/droiddoc.go b/java/droiddoc.go
index c84a15c..023d619 100644
--- a/java/droiddoc.go
+++ b/java/droiddoc.go
@@ -330,7 +330,7 @@
 
 	// Process all aidl files together to support sharding them into one or more rules that produce srcjars.
 	if len(aidlSrcs) > 0 {
-		srcJarFiles := genAidl(ctx, aidlSrcs, flags.aidlFlags+aidlIncludeFlags, flags.aidlDeps)
+		srcJarFiles := genAidl(ctx, aidlSrcs, flags.aidlFlags+aidlIncludeFlags, nil, flags.aidlDeps)
 		outSrcFiles = append(outSrcFiles, srcJarFiles...)
 	}
 
diff --git a/java/droidstubs.go b/java/droidstubs.go
index f9dcfd6..3b1f7c0 100644
--- a/java/droidstubs.go
+++ b/java/droidstubs.go
@@ -335,10 +335,10 @@
 		cmd.FlagWithArg("--hide ", "HiddenTypedefConstant").
 			FlagWithArg("--hide ", "SuperfluousPrefix").
 			FlagWithArg("--hide ", "AnnotationExtraction").
-			// (b/217545629)
-			FlagWithArg("--hide ", "ChangedThrows").
-			// (b/217552813)
-			FlagWithArg("--hide ", "ChangedAbstract")
+			// b/222738070
+			FlagWithArg("--hide ", "BannedThrow").
+			// b/223382732
+			FlagWithArg("--hide ", "ChangedDefault")
 	}
 }
 
@@ -433,6 +433,10 @@
 	}
 }
 
+func metalavaUseRbe(ctx android.ModuleContext) bool {
+	return ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA")
+}
+
 func metalavaCmd(ctx android.ModuleContext, rule *android.RuleBuilder, javaVersion javaVersion, srcs android.Paths,
 	srcJarList android.Path, bootclasspath, classpath classpath, homeDir android.WritablePath) *android.RuleBuilderCommand {
 	rule.Command().Text("rm -rf").Flag(homeDir.String())
@@ -441,7 +445,7 @@
 	cmd := rule.Command()
 	cmd.FlagWithArg("ANDROID_PREFS_ROOT=", homeDir.String())
 
-	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA") {
+	if metalavaUseRbe(ctx) {
 		rule.Remoteable(android.RemoteRuleSupports{RBE: true})
 		execStrategy := ctx.Config().GetenvWithDefault("RBE_METALAVA_EXEC_STRATEGY", remoteexec.LocalExecStrategy)
 		labels := map[string]string{"type": "tool", "name": "metalava"}
@@ -476,7 +480,10 @@
 		Flag("--quiet").
 		Flag("--format=v2").
 		FlagWithArg("--repeat-errors-max ", "10").
-		FlagWithArg("--hide ", "UnresolvedImport")
+		FlagWithArg("--hide ", "UnresolvedImport").
+		FlagWithArg("--hide ", "InvalidNullabilityOverride").
+		// b/223382732
+		FlagWithArg("--hide ", "ChangedDefault")
 
 	return cmd
 }
@@ -662,7 +669,9 @@
 	}
 
 	// TODO(b/183630617): rewrapper doesn't support restat rules
-	// rule.Restat()
+	if !metalavaUseRbe(ctx) {
+		rule.Restat()
+	}
 
 	zipSyncCleanupCmd(rule, srcJarDir)
 
diff --git a/java/gen.go b/java/gen.go
index 445a2d8..1572bf0 100644
--- a/java/gen.go
+++ b/java/gen.go
@@ -44,7 +44,7 @@
 		})
 )
 
-func genAidl(ctx android.ModuleContext, aidlFiles android.Paths, aidlFlags string, deps android.Paths) android.Paths {
+func genAidl(ctx android.ModuleContext, aidlFiles android.Paths, aidlGlobalFlags string, aidlIndividualFlags map[string]string, deps android.Paths) android.Paths {
 	// Shard aidl files into groups of 50 to avoid having to recompile all of them if one changes and to avoid
 	// hitting command line length limits.
 	shards := android.ShardPaths(aidlFiles, 50)
@@ -61,15 +61,17 @@
 
 		rule.Command().Text("rm -rf").Flag(outDir.String())
 		rule.Command().Text("mkdir -p").Flag(outDir.String())
-		rule.Command().Text("FLAGS=' " + aidlFlags + "'")
+		rule.Command().Text("FLAGS=' " + aidlGlobalFlags + "'")
 
 		for _, aidlFile := range shard {
+			localFlag := aidlIndividualFlags[aidlFile.String()]
 			depFile := srcJarFile.InSameDir(ctx, aidlFile.String()+".d")
 			javaFile := outDir.Join(ctx, pathtools.ReplaceExtension(aidlFile.String(), "java"))
 			rule.Command().
 				Tool(ctx.Config().HostToolPath(ctx, "aidl")).
 				FlagWithDepFile("-d", depFile).
 				Flag("$FLAGS").
+				Flag(localFlag).
 				Input(aidlFile).
 				Output(javaFile).
 				Implicits(deps)
@@ -159,7 +161,14 @@
 
 	// Process all aidl files together to support sharding them into one or more rules that produce srcjars.
 	if len(aidlSrcs) > 0 {
-		srcJarFiles := genAidl(ctx, aidlSrcs, flags.aidlFlags+aidlIncludeFlags, flags.aidlDeps)
+		individualFlags := make(map[string]string)
+		for _, aidlSrc := range aidlSrcs {
+			flags := j.individualAidlFlags(ctx, aidlSrc)
+			if flags != "" {
+				individualFlags[aidlSrc.String()] = flags
+			}
+		}
+		srcJarFiles := genAidl(ctx, aidlSrcs, flags.aidlFlags+aidlIncludeFlags, individualFlags, flags.aidlDeps)
 		outSrcFiles = append(outSrcFiles, srcJarFiles...)
 	}
 
diff --git a/java/hiddenapi_modular.go b/java/hiddenapi_modular.go
index 0cc960d..534a814 100644
--- a/java/hiddenapi_modular.go
+++ b/java/hiddenapi_modular.go
@@ -295,6 +295,12 @@
 	return dexJar.Path()
 }
 
+// HIDDENAPI_STUB_FLAGS_IMPL_FLAGS is the set of flags that identify implementation only signatures,
+// i.e. those signatures that are not part of any API (including the hidden API).
+var HIDDENAPI_STUB_FLAGS_IMPL_FLAGS = []string{}
+
+var HIDDENAPI_FLAGS_CSV_IMPL_FLAGS = []string{"blocked"}
+
 // buildRuleToGenerateHiddenAPIStubFlagsFile creates a rule to create a hidden API stub flags file.
 //
 // The rule is initialized but not built so that the caller can modify it and select an appropriate
@@ -345,7 +351,8 @@
 	// If there are stub flag files that have been generated by fragments on which this depends then
 	// use them to validate the stub flag file generated by the rules created by this method.
 	if len(stubFlagSubsets) > 0 {
-		validFile := buildRuleValidateOverlappingCsvFiles(ctx, name, desc, outputPath, stubFlagSubsets)
+		validFile := buildRuleValidateOverlappingCsvFiles(ctx, name, desc, outputPath, stubFlagSubsets,
+			HIDDENAPI_STUB_FLAGS_IMPL_FLAGS)
 
 		// Add the file that indicates that the file generated by this is valid.
 		//
@@ -904,7 +911,8 @@
 	// If there are flag files that have been generated by fragments on which this depends then use
 	// them to validate the flag file generated by the rules created by this method.
 	if len(flagSubsets) > 0 {
-		validFile := buildRuleValidateOverlappingCsvFiles(ctx, name, desc, outputPath, flagSubsets)
+		validFile := buildRuleValidateOverlappingCsvFiles(ctx, name, desc, outputPath, flagSubsets,
+			HIDDENAPI_FLAGS_CSV_IMPL_FLAGS)
 
 		// Add the file that indicates that the file generated by this is valid.
 		//
@@ -943,7 +951,9 @@
 
 // buildRuleSignaturePatternsFile creates a rule to generate a file containing the set of signature
 // patterns that will select a subset of the monolithic flags.
-func buildRuleSignaturePatternsFile(ctx android.ModuleContext, flagsPath android.Path, splitPackages []string, packagePrefixes []string) android.Path {
+func buildRuleSignaturePatternsFile(
+	ctx android.ModuleContext, flagsPath android.Path,
+	splitPackages []string, packagePrefixes []string, singlePackages []string) android.Path {
 	patternsFile := android.PathForModuleOut(ctx, "modular-hiddenapi", "signature-patterns.csv")
 	// Create a rule to validate the output from the following rule.
 	rule := android.NewRuleBuilder(pctx, ctx)
@@ -959,19 +969,36 @@
 		FlagWithInput("--flags ", flagsPath).
 		FlagForEachArg("--split-package ", quotedSplitPackages).
 		FlagForEachArg("--package-prefix ", packagePrefixes).
+		FlagForEachArg("--single-package ", singlePackages).
 		FlagWithOutput("--output ", patternsFile)
 	rule.Build("hiddenAPISignaturePatterns", "hidden API signature patterns")
 
 	return patternsFile
 }
 
-// buildRuleRemoveBlockedFlag creates a rule that will remove entries from the input path which
-// only have blocked flags. It will not remove entries that have blocked as well as other flags,
-// e.g. blocked,core-platform-api.
-func buildRuleRemoveBlockedFlag(ctx android.BuilderContext, name string, desc string, inputPath android.Path, filteredPath android.WritablePath) {
+// buildRuleRemoveSignaturesWithImplementationFlags creates a rule that will remove signatures from
+// the input flags file which have only the implementation flags, i.e. are not part of an API.
+//
+// The implementationFlags specifies the set of default flags that identifies the signature of a
+// private, implementation only, member. Signatures that match those flags are removed from the
+// flags as they are implementation only.
+//
+// This is used to remove implementation only signatures from the signature files that are persisted
+// in the sdk snapshot as the sdk snapshots should not include implementation details. The
+// signatures generated by this method will be compared by the buildRuleValidateOverlappingCsvFiles
+// method which treats any missing signatures as if they were implementation only signatures.
+func buildRuleRemoveSignaturesWithImplementationFlags(ctx android.BuilderContext,
+	name string, desc string, inputPath android.Path, filteredPath android.WritablePath,
+	implementationFlags []string) {
+
 	rule := android.NewRuleBuilder(pctx, ctx)
+	implementationFlagPattern := ""
+	for _, implementationFlag := range implementationFlags {
+		implementationFlagPattern = implementationFlagPattern + "," + implementationFlag
+	}
 	rule.Command().
-		Text(`grep -vE "^[^,]+,blocked$"`).Input(inputPath).Text(">").Output(filteredPath).
+		Text(`grep -vE "^[^,]+` + implementationFlagPattern + `$"`).Input(inputPath).
+		Text(">").Output(filteredPath).
 		// Grep's exit code depends on whether it finds anything. It is 0 (build success) when it finds
 		// something and 1 (build failure) when it does not and 2 (when it encounters an error).
 		// However, while it is unlikely it is not an error if this does not find any matches. The
@@ -983,7 +1010,14 @@
 
 // buildRuleValidateOverlappingCsvFiles checks that the modular CSV files, i.e. the files generated
 // by the individual bootclasspath_fragment modules are subsets of the monolithic CSV file.
-func buildRuleValidateOverlappingCsvFiles(ctx android.BuilderContext, name string, desc string, monolithicFilePath android.WritablePath, csvSubsets SignatureCsvSubsets) android.WritablePath {
+//
+// The implementationFlags specifies the set of default flags that identifies the signature of a
+// private, implementation only, member. A signature which is present in a monolithic flags subset
+// defined by SignatureCsvSubset but which is not present in the flags file from the corresponding
+// module is assumed to be an implementation only member and so must have these flags.
+func buildRuleValidateOverlappingCsvFiles(ctx android.BuilderContext, name string, desc string,
+	monolithicFilePath android.WritablePath, csvSubsets SignatureCsvSubsets,
+	implementationFlags []string) android.WritablePath {
 	// The file which is used to record that the flags file is valid.
 	validFile := pathForValidation(ctx, monolithicFilePath)
 
@@ -991,14 +1025,19 @@
 	rule := android.NewRuleBuilder(pctx, ctx)
 	command := rule.Command().
 		BuiltTool("verify_overlaps").
-		Input(monolithicFilePath)
+		FlagWithInput("--monolithic-flags ", monolithicFilePath)
 
 	for _, subset := range csvSubsets {
 		command.
+			Flag("--module-flags ").
 			Textf("%s:%s", subset.CsvFile, subset.SignaturePatternsFile).
 			Implicit(subset.CsvFile).Implicit(subset.SignaturePatternsFile)
 	}
 
+	for _, implementationFlag := range implementationFlags {
+		command.FlagWithArg("--implementation-flag ", implementationFlag)
+	}
+
 	// If validation passes then update the file that records that.
 	command.Text("&& touch").Output(validFile)
 	rule.Build(name+"Validation", desc+" validation")
@@ -1072,12 +1111,16 @@
 	// Generate the filtered-stub-flags.csv file which contains the filtered stub flags that will be
 	// compared against the monolithic stub flags.
 	filteredStubFlagsCSV := android.PathForModuleOut(ctx, hiddenApiSubDir, "filtered-stub-flags.csv")
-	buildRuleRemoveBlockedFlag(ctx, "modularHiddenApiFilteredStubFlags", "modular hiddenapi filtered stub flags", stubFlagsCSV, filteredStubFlagsCSV)
+	buildRuleRemoveSignaturesWithImplementationFlags(ctx, "modularHiddenApiFilteredStubFlags",
+		"modular hiddenapi filtered stub flags", stubFlagsCSV, filteredStubFlagsCSV,
+		HIDDENAPI_STUB_FLAGS_IMPL_FLAGS)
 
 	// Generate the filtered-flags.csv file which contains the filtered flags that will be compared
 	// against the monolithic flags.
 	filteredFlagsCSV := android.PathForModuleOut(ctx, hiddenApiSubDir, "filtered-flags.csv")
-	buildRuleRemoveBlockedFlag(ctx, "modularHiddenApiFilteredFlags", "modular hiddenapi filtered flags", allFlagsCSV, filteredFlagsCSV)
+	buildRuleRemoveSignaturesWithImplementationFlags(ctx, "modularHiddenApiFilteredFlags",
+		"modular hiddenapi filtered flags", allFlagsCSV, filteredFlagsCSV,
+		HIDDENAPI_FLAGS_CSV_IMPL_FLAGS)
 
 	// Store the paths in the info for use by other modules and sdk snapshot generation.
 	output := HiddenAPIOutput{
diff --git a/java/java.go b/java/java.go
index e55f045..b34d6de 100644
--- a/java/java.go
+++ b/java/java.go
@@ -421,9 +421,25 @@
 }
 
 type deps struct {
-	classpath               classpath
-	java9Classpath          classpath
-	bootClasspath           classpath
+	// bootClasspath is the list of jars that form the boot classpath (generally the java.* and
+	// android.* classes) for tools that still use it.  javac targeting 1.9 or higher uses
+	// systemModules and java9Classpath instead.
+	bootClasspath classpath
+
+	// classpath is the list of jars that form the classpath for javac and kotlinc rules.  It
+	// contains header jars for all static and non-static dependencies.
+	classpath classpath
+
+	// dexClasspath is the list of jars that form the classpath for d8 and r8 rules.  It contains
+	// header jars for all non-static dependencies.  Static dependencies have already been
+	// combined into the program jar.
+	dexClasspath classpath
+
+	// java9Classpath is the list of jars that will be added to the classpath when targeting
+	// 1.9 or higher.  It generally contains the android.* classes, while the java.* classes
+	// are provided by systemModules.
+	java9Classpath classpath
+
 	processorPath           classpath
 	errorProneProcessorPath classpath
 	processorClasses        []string
@@ -1458,7 +1474,10 @@
 		if ctx.OtherModuleHasProvider(module, JavaInfoProvider) {
 			dep := ctx.OtherModuleProvider(module, JavaInfoProvider).(JavaInfo)
 			switch tag {
-			case libTag, staticLibTag:
+			case libTag:
+				flags.classpath = append(flags.classpath, dep.HeaderJars...)
+				flags.dexClasspath = append(flags.dexClasspath, dep.HeaderJars...)
+			case staticLibTag:
 				flags.classpath = append(flags.classpath, dep.HeaderJars...)
 			case bootClasspathTag:
 				flags.bootClasspath = append(flags.bootClasspath, dep.HeaderJars...)
@@ -1706,6 +1725,7 @@
 
 	android.InitPrebuiltModule(module, &module.properties.Jars)
 	android.InitApexModule(module)
+	android.InitBazelModule(module)
 	InitJavaModule(module, android.HostSupported)
 	return module
 }
@@ -2004,38 +2024,139 @@
 	}
 }
 
-type javaLibraryAttributes struct {
+type javaCommonAttributes struct {
 	Srcs      bazel.LabelListAttribute
-	Deps      bazel.LabelListAttribute
+	Plugins   bazel.LabelListAttribute
 	Javacopts bazel.StringListAttribute
 }
 
-func (m *Library) convertLibraryAttrsBp2Build(ctx android.TopDownMutatorContext) *javaLibraryAttributes {
-	//TODO(b/209577426): Support multiple arch variants
-	srcs := bazel.MakeLabelListAttribute(android.BazelLabelForModuleSrcExcludes(ctx, m.properties.Srcs, m.properties.Exclude_srcs))
-	attrs := &javaLibraryAttributes{
-		Srcs: srcs,
+type javaDependencyLabels struct {
+	// Dependencies which DO NOT contribute to the API visible to upstream dependencies.
+	Deps bazel.LabelListAttribute
+	// Dependencies which DO contribute to the API visible to upstream dependencies.
+	StaticDeps bazel.LabelListAttribute
+}
+
+// convertLibraryAttrsBp2Build converts a few shared attributes from java_* modules
+// and also separates dependencies into dynamic dependencies and static dependencies.
+// Each corresponding Bazel target type, can have a different method for handling
+// dynamic vs. static dependencies, and so these are returned to the calling function.
+type eventLogTagsAttributes struct {
+	Srcs bazel.LabelListAttribute
+}
+
+func (m *Library) convertLibraryAttrsBp2Build(ctx android.TopDownMutatorContext) (*javaCommonAttributes, *javaDependencyLabels) {
+	var srcs bazel.LabelListAttribute
+	archVariantProps := m.GetArchVariantProperties(ctx, &CommonProperties{})
+	for axis, configToProps := range archVariantProps {
+		for config, _props := range configToProps {
+			if archProps, ok := _props.(*CommonProperties); ok {
+				archSrcs := android.BazelLabelForModuleSrcExcludes(ctx, archProps.Srcs, archProps.Exclude_srcs)
+				srcs.SetSelectValue(axis, config, archSrcs)
+			}
+		}
 	}
 
-	if m.properties.Javacflags != nil {
-		attrs.Javacopts = bazel.MakeStringListAttribute(m.properties.Javacflags)
+	javaSrcPartition := "java"
+	protoSrcPartition := "proto"
+	logtagSrcPartition := "logtag"
+	srcPartitions := bazel.PartitionLabelListAttribute(ctx, &srcs, bazel.LabelPartitions{
+		javaSrcPartition:   bazel.LabelPartition{Extensions: []string{".java"}, Keep_remainder: true},
+		logtagSrcPartition: bazel.LabelPartition{Extensions: []string{".logtags", ".logtag"}},
+		protoSrcPartition:  android.ProtoSrcLabelPartition,
+	})
+
+	javaSrcs := srcPartitions[javaSrcPartition]
+
+	var logtagsSrcs bazel.LabelList
+	if !srcPartitions[logtagSrcPartition].IsEmpty() {
+		logtagsLibName := m.Name() + "_logtags"
+		logtagsSrcs = bazel.MakeLabelList([]bazel.Label{{Label: ":" + logtagsLibName}})
+		ctx.CreateBazelTargetModule(
+			bazel.BazelTargetModuleProperties{
+				Rule_class:        "event_log_tags",
+				Bzl_load_location: "//build/make/tools:event_log_tags.bzl",
+			},
+			android.CommonAttributes{Name: logtagsLibName},
+			&eventLogTagsAttributes{
+				Srcs: srcPartitions[logtagSrcPartition],
+			},
+		)
 	}
+	javaSrcs.Append(bazel.MakeLabelListAttribute(logtagsSrcs))
+
+	var javacopts []string
+	if m.properties.Javacflags != nil {
+		javacopts = append(javacopts, m.properties.Javacflags...)
+	}
+	epEnabled := m.properties.Errorprone.Enabled
+	//TODO(b/227504307) add configuration that depends on RUN_ERROR_PRONE environment variable
+	if Bool(epEnabled) {
+		javacopts = append(javacopts, m.properties.Errorprone.Javacflags...)
+	}
+
+	commonAttrs := &javaCommonAttributes{
+		Srcs: javaSrcs,
+		Plugins: bazel.MakeLabelListAttribute(
+			android.BazelLabelForModuleDeps(ctx, m.properties.Plugins),
+		),
+		Javacopts: bazel.MakeStringListAttribute(javacopts),
+	}
+
+	depLabels := &javaDependencyLabels{}
 
 	var deps bazel.LabelList
 	if m.properties.Libs != nil {
 		deps.Append(android.BazelLabelForModuleDeps(ctx, m.properties.Libs))
 	}
-	if m.properties.Static_libs != nil {
-		//TODO(b/217236083) handle static libs similarly to Soong
-		deps.Append(android.BazelLabelForModuleDeps(ctx, m.properties.Static_libs))
-	}
-	attrs.Deps = bazel.MakeLabelListAttribute(deps)
 
-	return attrs
+	var staticDeps bazel.LabelList
+	if m.properties.Static_libs != nil {
+		staticDeps.Append(android.BazelLabelForModuleDeps(ctx, m.properties.Static_libs))
+	}
+
+	protoDepLabel := bp2buildProto(ctx, &m.Module, srcPartitions[protoSrcPartition])
+	// Soong does not differentiate between a java_library and the Bazel equivalent of
+	// a java_proto_library + proto_library pair. Instead, in Soong proto sources are
+	// listed directly in the srcs of a java_library, and the classes produced
+	// by protoc are included directly in the resulting JAR. Thus upstream dependencies
+	// that depend on a java_library with proto sources can link directly to the protobuf API,
+	// and so this should be a static dependency.
+	staticDeps.Add(protoDepLabel)
+
+	depLabels.Deps = bazel.MakeLabelListAttribute(deps)
+	depLabels.StaticDeps = bazel.MakeLabelListAttribute(staticDeps)
+
+	return commonAttrs, depLabels
+}
+
+type javaLibraryAttributes struct {
+	*javaCommonAttributes
+	Deps    bazel.LabelListAttribute
+	Exports bazel.LabelListAttribute
 }
 
 func javaLibraryBp2Build(ctx android.TopDownMutatorContext, m *Library) {
-	attrs := m.convertLibraryAttrsBp2Build(ctx)
+	commonAttrs, depLabels := m.convertLibraryAttrsBp2Build(ctx)
+
+	deps := depLabels.Deps
+	if !commonAttrs.Srcs.IsEmpty() {
+		deps.Append(depLabels.StaticDeps) // we should only append these if there are sources to use them
+
+		sdkVersion := m.SdkVersion(ctx)
+		if sdkVersion.Kind == android.SdkPublic && sdkVersion.ApiLevel == android.FutureApiLevel {
+			// TODO(b/220869005) remove forced dependency on current public android.jar
+			deps.Add(bazel.MakeLabelAttribute("//prebuilts/sdk:public_current_android_sdk_java_import"))
+		}
+	} else if !depLabels.Deps.IsEmpty() {
+		ctx.ModuleErrorf("Module has direct dependencies but no sources. Bazel will not allow this.")
+	}
+
+	attrs := &javaLibraryAttributes{
+		javaCommonAttributes: commonAttrs,
+		Deps:                 deps,
+		Exports:              depLabels.StaticDeps,
+	}
 
 	props := bazel.BazelTargetModuleProperties{
 		Rule_class:        "java_library",
@@ -2046,15 +2167,30 @@
 }
 
 type javaBinaryHostAttributes struct {
-	Srcs       bazel.LabelListAttribute
-	Deps       bazel.LabelListAttribute
-	Main_class string
-	Jvm_flags  bazel.StringListAttribute
-	Javacopts  bazel.StringListAttribute
+	*javaCommonAttributes
+	Deps         bazel.LabelListAttribute
+	Runtime_deps bazel.LabelListAttribute
+	Main_class   string
+	Jvm_flags    bazel.StringListAttribute
 }
 
 // JavaBinaryHostBp2Build is for java_binary_host bp2build.
 func javaBinaryHostBp2Build(ctx android.TopDownMutatorContext, m *Binary) {
+	commonAttrs, depLabels := m.convertLibraryAttrsBp2Build(ctx)
+
+	deps := depLabels.Deps
+	deps.Append(depLabels.StaticDeps)
+	if m.binaryProperties.Jni_libs != nil {
+		deps.Append(bazel.MakeLabelListAttribute(android.BazelLabelForModuleDeps(ctx, m.binaryProperties.Jni_libs)))
+	}
+
+	var runtimeDeps bazel.LabelListAttribute
+	if commonAttrs.Srcs.IsEmpty() {
+		// if there are no sources, then the dependencies can only be used at runtime
+		runtimeDeps = deps
+		deps = bazel.LabelListAttribute{}
+	}
+
 	mainClass := ""
 	if m.binaryProperties.Main_class != nil {
 		mainClass = *m.binaryProperties.Main_class
@@ -2066,26 +2202,12 @@
 		}
 		mainClass = mainClassInManifest
 	}
-	srcs := bazel.MakeLabelListAttribute(android.BazelLabelForModuleSrcExcludes(ctx, m.properties.Srcs, m.properties.Exclude_srcs))
+
 	attrs := &javaBinaryHostAttributes{
-		Srcs:       srcs,
-		Main_class: mainClass,
-	}
-
-	if m.properties.Javacflags != nil {
-		attrs.Javacopts = bazel.MakeStringListAttribute(m.properties.Javacflags)
-	}
-
-	// Attribute deps
-	deps := []string{}
-	if m.properties.Static_libs != nil {
-		deps = append(deps, m.properties.Static_libs...)
-	}
-	if m.binaryProperties.Jni_libs != nil {
-		deps = append(deps, m.binaryProperties.Jni_libs...)
-	}
-	if len(deps) > 0 {
-		attrs.Deps = bazel.MakeLabelListAttribute(android.BazelLabelForModuleDeps(ctx, deps))
+		javaCommonAttributes: commonAttrs,
+		Deps:                 deps,
+		Runtime_deps:         runtimeDeps,
+		Main_class:           mainClass,
 	}
 
 	// Attribute jvm_flags
@@ -2128,8 +2250,16 @@
 
 // java_import bp2Build converter.
 func (i *Import) ConvertWithBp2build(ctx android.TopDownMutatorContext) {
-	//TODO(b/209577426): Support multiple arch variants
-	jars := bazel.MakeLabelListAttribute(android.BazelLabelForModuleSrcExcludes(ctx, i.properties.Jars, []string(nil)))
+	var jars bazel.LabelListAttribute
+	archVariantProps := i.GetArchVariantProperties(ctx, &ImportProperties{})
+	for axis, configToProps := range archVariantProps {
+		for config, _props := range configToProps {
+			if archProps, ok := _props.(*ImportProperties); ok {
+				archJars := android.BazelLabelForModuleSrcExcludes(ctx, archProps.Jars, []string(nil))
+				jars.SetSelectValue(axis, config, archJars)
+			}
+		}
+	}
 
 	attrs := &bazelJavaImportAttributes{
 		Jars: jars,
diff --git a/java/java_test.go b/java/java_test.go
index 21c76b6..4c93824 100644
--- a/java/java_test.go
+++ b/java/java_test.go
@@ -973,7 +973,7 @@
 
 	fooHeaderJar := filepath.Join("out", "soong", ".intermediates", "foo", "android_common", "turbine-combined", "foo.jar")
 	barTurbineJar := filepath.Join("out", "soong", ".intermediates", "bar", "android_common", "turbine", "bar.jar")
-	android.AssertStringDoesContain(t, "bar turbine classpath", barTurbine.Args["classpath"], fooHeaderJar)
+	android.AssertStringDoesContain(t, "bar turbine classpath", barTurbine.Args["turbineFlags"], fooHeaderJar)
 	android.AssertStringDoesContain(t, "bar javac classpath", barJavac.Args["classpath"], fooHeaderJar)
 	android.AssertPathsRelativeToTopEquals(t, "bar turbine combineJar", []string{barTurbineJar, fooHeaderJar}, barTurbineCombined.Inputs)
 	android.AssertStringDoesContain(t, "baz javac classpath", bazJavac.Args["classpath"], "prebuilts/sdk/14/public/android.jar")
@@ -1333,6 +1333,42 @@
 	}
 }
 
+func TestAidlEnforcePermissions(t *testing.T) {
+	ctx, _ := testJava(t, `
+		java_library {
+			name: "foo",
+			srcs: ["aidl/foo/IFoo.aidl"],
+			aidl: { enforce_permissions: true },
+		}
+	`)
+
+	aidlCommand := ctx.ModuleForTests("foo", "android_common").Rule("aidl").RuleParams.Command
+	expectedAidlFlag := "-Wmissing-permission-annotation -Werror"
+	if !strings.Contains(aidlCommand, expectedAidlFlag) {
+		t.Errorf("aidl command %q does not contain %q", aidlCommand, expectedAidlFlag)
+	}
+}
+
+func TestAidlEnforcePermissionsException(t *testing.T) {
+	ctx, _ := testJava(t, `
+		java_library {
+			name: "foo",
+			srcs: ["aidl/foo/IFoo.aidl", "aidl/foo/IFoo2.aidl"],
+			aidl: { enforce_permissions: true, enforce_permissions_exceptions: ["aidl/foo/IFoo2.aidl"] },
+		}
+	`)
+
+	aidlCommand := ctx.ModuleForTests("foo", "android_common").Rule("aidl").RuleParams.Command
+	expectedAidlFlag := "$$FLAGS -Wmissing-permission-annotation -Werror aidl/foo/IFoo.aidl"
+	if !strings.Contains(aidlCommand, expectedAidlFlag) {
+		t.Errorf("aidl command %q does not contain %q", aidlCommand, expectedAidlFlag)
+	}
+	expectedAidlFlag = "$$FLAGS  aidl/foo/IFoo2.aidl"
+	if !strings.Contains(aidlCommand, expectedAidlFlag) {
+		t.Errorf("aidl command %q does not contain %q", aidlCommand, expectedAidlFlag)
+	}
+}
+
 func TestDataNativeBinaries(t *testing.T) {
 	ctx, _ := testJava(t, `
 		java_test_host {
diff --git a/java/jdeps.go b/java/jdeps.go
index eff9a31..3734335 100644
--- a/java/jdeps.go
+++ b/java/jdeps.go
@@ -71,6 +71,8 @@
 		dpInfo.Jars = android.FirstUniqueStrings(dpInfo.Jars)
 		dpInfo.SrcJars = android.FirstUniqueStrings(dpInfo.SrcJars)
 		dpInfo.Paths = android.FirstUniqueStrings(dpInfo.Paths)
+		dpInfo.Static_libs = android.FirstUniqueStrings(dpInfo.Static_libs)
+		dpInfo.Libs = android.FirstUniqueStrings(dpInfo.Libs)
 		moduleInfos[name] = dpInfo
 
 		mkProvider, ok := module.(android.AndroidMkDataProvider)
diff --git a/java/kotlin.go b/java/kotlin.go
index e4f1bc1..eff5bb5 100644
--- a/java/kotlin.go
+++ b/java/kotlin.go
@@ -28,16 +28,20 @@
 
 var kotlinc = pctx.AndroidRemoteStaticRule("kotlinc", android.RemoteRuleSupports{Goma: true},
 	blueprint.RuleParams{
-		Command: `rm -rf "$classesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
-			`mkdir -p "$classesDir" "$srcJarDir" "$emptyDir" && ` +
+		Command: `rm -rf "$classesDir" "$headerClassesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
+			`mkdir -p "$classesDir" "$headerClassesDir" "$srcJarDir" "$emptyDir" && ` +
 			`${config.ZipSyncCmd} -d $srcJarDir -l $srcJarDir/list -f "*.java" $srcJars && ` +
 			`${config.GenKotlinBuildFileCmd} --classpath "$classpath" --name "$name"` +
 			` --out_dir "$classesDir" --srcs "$out.rsp" --srcs "$srcJarDir/list"` +
 			` $commonSrcFilesArg --out "$kotlinBuildFile" && ` +
-			`${config.KotlincCmd} ${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
-			`$kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
-			`-kotlin-home $emptyDir && ` +
-			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir && ` +
+			`${config.KotlincCmd} ${config.KotlincGlobalFlags} ` +
+			` ${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
+			` $kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
+			` -kotlin-home $emptyDir ` +
+			` -Xplugin=${config.KotlinAbiGenPluginJar} ` +
+			` -P plugin:org.jetbrains.kotlin.jvm.abi:outputDir=$headerClassesDir && ` +
+			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir -write_if_changed && ` +
+			`${config.SoongZipCmd} -jar -o $headerJar -C $headerClassesDir -D $headerClassesDir -write_if_changed && ` +
 			`rm -rf "$srcJarDir"`,
 		CommandDeps: []string{
 			"${config.KotlincCmd}",
@@ -48,15 +52,17 @@
 			"${config.KotlinStdlibJar}",
 			"${config.KotlinTrove4jJar}",
 			"${config.KotlinAnnotationJar}",
+			"${config.KotlinAbiGenPluginJar}",
 			"${config.GenKotlinBuildFileCmd}",
 			"${config.SoongZipCmd}",
 			"${config.ZipSyncCmd}",
 		},
 		Rspfile:        "$out.rsp",
 		RspfileContent: `$in`,
+		Restat:         true,
 	},
 	"kotlincFlags", "classpath", "srcJars", "commonSrcFilesArg", "srcJarDir", "classesDir",
-	"kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
+	"headerClassesDir", "headerJar", "kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
 
 func kotlinCommonSrcsList(ctx android.ModuleContext, commonSrcFiles android.Paths) android.OptionalPath {
 	if len(commonSrcFiles) > 0 {
@@ -75,7 +81,7 @@
 }
 
 // kotlinCompile takes .java and .kt sources and srcJars, and compiles the .kt sources into a classes jar in outputFile.
-func kotlinCompile(ctx android.ModuleContext, outputFile android.WritablePath,
+func kotlinCompile(ctx android.ModuleContext, outputFile, headerOutputFile android.WritablePath,
 	srcFiles, commonSrcFiles, srcJars android.Paths,
 	flags javaBuilderFlags) {
 
@@ -96,17 +102,20 @@
 	}
 
 	ctx.Build(pctx, android.BuildParams{
-		Rule:        kotlinc,
-		Description: "kotlinc",
-		Output:      outputFile,
-		Inputs:      srcFiles,
-		Implicits:   deps,
+		Rule:           kotlinc,
+		Description:    "kotlinc",
+		Output:         outputFile,
+		ImplicitOutput: headerOutputFile,
+		Inputs:         srcFiles,
+		Implicits:      deps,
 		Args: map[string]string{
 			"classpath":         flags.kotlincClasspath.FormJavaClassPath(""),
 			"kotlincFlags":      flags.kotlincFlags,
 			"commonSrcFilesArg": commonSrcFilesArg,
 			"srcJars":           strings.Join(srcJars.Strings(), " "),
 			"classesDir":        android.PathForModuleOut(ctx, "kotlinc", "classes").String(),
+			"headerClassesDir":  android.PathForModuleOut(ctx, "kotlinc", "header_classes").String(),
+			"headerJar":         headerOutputFile.String(),
 			"srcJarDir":         android.PathForModuleOut(ctx, "kotlinc", "srcJars").String(),
 			"kotlinBuildFile":   android.PathForModuleOut(ctx, "kotlinc-build.xml").String(),
 			"emptyDir":          android.PathForModuleOut(ctx, "kotlinc", "empty").String(),
@@ -117,7 +126,7 @@
 	})
 }
 
-var kapt = pctx.AndroidRemoteStaticRule("kapt", android.RemoteRuleSupports{Goma: true},
+var kaptStubs = pctx.AndroidRemoteStaticRule("kaptStubs", android.RemoteRuleSupports{Goma: true},
 	blueprint.RuleParams{
 		Command: `rm -rf "$srcJarDir" "$kotlinBuildFile" "$kaptDir" && ` +
 			`mkdir -p "$srcJarDir" "$kaptDir/sources" "$kaptDir/classes" && ` +
@@ -125,19 +134,19 @@
 			`${config.GenKotlinBuildFileCmd} --classpath "$classpath" --name "$name"` +
 			` --srcs "$out.rsp" --srcs "$srcJarDir/list"` +
 			` $commonSrcFilesArg --out "$kotlinBuildFile" && ` +
-			`${config.KotlincCmd} ${config.KaptSuppressJDK9Warnings} ${config.KotlincSuppressJDK9Warnings} ` +
+			`${config.KotlincCmd} ${config.KotlincGlobalFlags} ` +
+			`${config.KaptSuppressJDK9Warnings} ${config.KotlincSuppressJDK9Warnings} ` +
 			`${config.JavacHeapFlags} $kotlincFlags -Xplugin=${config.KotlinKaptJar} ` +
 			`-P plugin:org.jetbrains.kotlin.kapt3:sources=$kaptDir/sources ` +
 			`-P plugin:org.jetbrains.kotlin.kapt3:classes=$kaptDir/classes ` +
 			`-P plugin:org.jetbrains.kotlin.kapt3:stubs=$kaptDir/stubs ` +
 			`-P plugin:org.jetbrains.kotlin.kapt3:correctErrorTypes=true ` +
-			`-P plugin:org.jetbrains.kotlin.kapt3:aptMode=stubsAndApt ` +
+			`-P plugin:org.jetbrains.kotlin.kapt3:aptMode=stubs ` +
 			`-P plugin:org.jetbrains.kotlin.kapt3:javacArguments=$encodedJavacFlags ` +
 			`$kaptProcessorPath ` +
 			`$kaptProcessor ` +
 			`-Xbuild-file=$kotlinBuildFile && ` +
-			`${config.SoongZipCmd} -jar -o $out -C $kaptDir/sources -D $kaptDir/sources && ` +
-			`${config.SoongZipCmd} -jar -o $classesJarOut -C $kaptDir/classes -D $kaptDir/classes && ` +
+			`${config.SoongZipCmd} -jar -o $out -C $kaptDir/stubs -D $kaptDir/stubs && ` +
 			`rm -rf "$srcJarDir"`,
 		CommandDeps: []string{
 			"${config.KotlincCmd}",
@@ -195,13 +204,14 @@
 	kotlinName := filepath.Join(ctx.ModuleDir(), ctx.ModuleSubDir(), ctx.ModuleName())
 	kotlinName = strings.ReplaceAll(kotlinName, "/", "__")
 
+	// First run kapt to generate .java stubs from .kt files
+	kaptStubsJar := android.PathForModuleOut(ctx, "kapt", "stubs.jar")
 	ctx.Build(pctx, android.BuildParams{
-		Rule:           kapt,
-		Description:    "kapt",
-		Output:         srcJarOutputFile,
-		ImplicitOutput: resJarOutputFile,
-		Inputs:         srcFiles,
-		Implicits:      deps,
+		Rule:        kaptStubs,
+		Description: "kapt stubs",
+		Output:      kaptStubsJar,
+		Inputs:      srcFiles,
+		Implicits:   deps,
 		Args: map[string]string{
 			"classpath":         flags.kotlincClasspath.FormJavaClassPath(""),
 			"kotlincFlags":      flags.kotlincFlags,
@@ -217,6 +227,11 @@
 			"classesJarOut":     resJarOutputFile.String(),
 		},
 	})
+
+	// Then run turbine to perform annotation processing on the stubs and any .java srcFiles.
+	javaSrcFiles := srcFiles.FilterByExt(".java")
+	turbineSrcJars := append(android.Paths{kaptStubsJar}, srcJars...)
+	TurbineApt(ctx, srcJarOutputFile, resJarOutputFile, javaSrcFiles, turbineSrcJars, flags)
 }
 
 // kapt converts a list of key, value pairs into a base64 encoded Java serialization, which is what kapt expects.
diff --git a/java/kotlin_test.go b/java/kotlin_test.go
index cac0af3..f9ff982 100644
--- a/java/kotlin_test.go
+++ b/java/kotlin_test.go
@@ -45,6 +45,10 @@
 	fooKotlinc := ctx.ModuleForTests("foo", "android_common").Rule("kotlinc")
 	fooJavac := ctx.ModuleForTests("foo", "android_common").Rule("javac")
 	fooJar := ctx.ModuleForTests("foo", "android_common").Output("combined/foo.jar")
+	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+
+	fooKotlincClasses := fooKotlinc.Output
+	fooKotlincHeaderClasses := fooKotlinc.ImplicitOutput
 
 	if len(fooKotlinc.Inputs) != 2 || fooKotlinc.Inputs[0].String() != "a.java" ||
 		fooKotlinc.Inputs[1].String() != "b.kt" {
@@ -55,17 +59,21 @@
 		t.Errorf(`foo inputs %v != ["a.java"]`, fooJavac.Inputs)
 	}
 
-	if !strings.Contains(fooJavac.Args["classpath"], fooKotlinc.Output.String()) {
+	if !strings.Contains(fooJavac.Args["classpath"], fooKotlincHeaderClasses.String()) {
 		t.Errorf("foo classpath %v does not contain %q",
-			fooJavac.Args["classpath"], fooKotlinc.Output.String())
+			fooJavac.Args["classpath"], fooKotlincHeaderClasses.String())
 	}
 
-	if !inList(fooKotlinc.Output.String(), fooJar.Inputs.Strings()) {
+	if !inList(fooKotlincClasses.String(), fooJar.Inputs.Strings()) {
 		t.Errorf("foo jar inputs %v does not contain %q",
-			fooJar.Inputs.Strings(), fooKotlinc.Output.String())
+			fooJar.Inputs.Strings(), fooKotlincClasses.String())
 	}
 
-	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+	if !inList(fooKotlincHeaderClasses.String(), fooHeaderJar.Inputs.Strings()) {
+		t.Errorf("foo header jar inputs %v does not contain %q",
+			fooHeaderJar.Inputs.Strings(), fooKotlincHeaderClasses.String())
+	}
+
 	bazHeaderJar := ctx.ModuleForTests("baz", "android_common").Output("turbine-combined/baz.jar")
 	barKotlinc := ctx.ModuleForTests("bar", "android_common").Rule("kotlinc")
 
@@ -117,51 +125,71 @@
 
 		buildOS := ctx.Config().BuildOS.String()
 
-		kapt := ctx.ModuleForTests("foo", "android_common").Rule("kapt")
-		kotlinc := ctx.ModuleForTests("foo", "android_common").Rule("kotlinc")
-		javac := ctx.ModuleForTests("foo", "android_common").Rule("javac")
+		foo := ctx.ModuleForTests("foo", "android_common")
+		kaptStubs := foo.Rule("kapt")
+		turbineApt := foo.Description("turbine apt")
+		kotlinc := foo.Rule("kotlinc")
+		javac := foo.Rule("javac")
 
 		bar := ctx.ModuleForTests("bar", buildOS+"_common").Rule("javac").Output.String()
 		baz := ctx.ModuleForTests("baz", buildOS+"_common").Rule("javac").Output.String()
 
 		// Test that the kotlin and java sources are passed to kapt and kotlinc
-		if len(kapt.Inputs) != 2 || kapt.Inputs[0].String() != "a.java" || kapt.Inputs[1].String() != "b.kt" {
-			t.Errorf(`foo kapt inputs %v != ["a.java", "b.kt"]`, kapt.Inputs)
+		if len(kaptStubs.Inputs) != 2 || kaptStubs.Inputs[0].String() != "a.java" || kaptStubs.Inputs[1].String() != "b.kt" {
+			t.Errorf(`foo kapt inputs %v != ["a.java", "b.kt"]`, kaptStubs.Inputs)
 		}
 		if len(kotlinc.Inputs) != 2 || kotlinc.Inputs[0].String() != "a.java" || kotlinc.Inputs[1].String() != "b.kt" {
 			t.Errorf(`foo kotlinc inputs %v != ["a.java", "b.kt"]`, kotlinc.Inputs)
 		}
 
-		// Test that only the java sources are passed to javac
+		// Test that only the java sources are passed to turbine-apt and javac
+		if len(turbineApt.Inputs) != 1 || turbineApt.Inputs[0].String() != "a.java" {
+			t.Errorf(`foo turbine apt inputs %v != ["a.java"]`, turbineApt.Inputs)
+		}
 		if len(javac.Inputs) != 1 || javac.Inputs[0].String() != "a.java" {
 			t.Errorf(`foo inputs %v != ["a.java"]`, javac.Inputs)
 		}
 
-		// Test that the kapt srcjar is a dependency of kotlinc and javac rules
-		if !inList(kapt.Output.String(), kotlinc.Implicits.Strings()) {
-			t.Errorf("expected %q in kotlinc implicits %v", kapt.Output.String(), kotlinc.Implicits.Strings())
-		}
-		if !inList(kapt.Output.String(), javac.Implicits.Strings()) {
-			t.Errorf("expected %q in javac implicits %v", kapt.Output.String(), javac.Implicits.Strings())
+		// Test that the kapt stubs jar is a dependency of turbine-apt
+		if !inList(kaptStubs.Output.String(), turbineApt.Implicits.Strings()) {
+			t.Errorf("expected %q in turbine-apt implicits %v", kaptStubs.Output.String(), kotlinc.Implicits.Strings())
 		}
 
-		// Test that the kapt srcjar is extracted by the kotlinc and javac rules
-		if kotlinc.Args["srcJars"] != kapt.Output.String() {
-			t.Errorf("expected %q in kotlinc srcjars %v", kapt.Output.String(), kotlinc.Args["srcJars"])
+		// Test that the turbine-apt srcjar is a dependency of kotlinc and javac rules
+		if !inList(turbineApt.Output.String(), kotlinc.Implicits.Strings()) {
+			t.Errorf("expected %q in kotlinc implicits %v", turbineApt.Output.String(), kotlinc.Implicits.Strings())
 		}
-		if javac.Args["srcJars"] != kapt.Output.String() {
-			t.Errorf("expected %q in javac srcjars %v", kapt.Output.String(), kotlinc.Args["srcJars"])
+		if !inList(turbineApt.Output.String(), javac.Implicits.Strings()) {
+			t.Errorf("expected %q in javac implicits %v", turbineApt.Output.String(), javac.Implicits.Strings())
+		}
+
+		// Test that the turbine-apt srcjar is extracted by the kotlinc and javac rules
+		if kotlinc.Args["srcJars"] != turbineApt.Output.String() {
+			t.Errorf("expected %q in kotlinc srcjars %v", turbineApt.Output.String(), kotlinc.Args["srcJars"])
+		}
+		if javac.Args["srcJars"] != turbineApt.Output.String() {
+			t.Errorf("expected %q in javac srcjars %v", turbineApt.Output.String(), kotlinc.Args["srcJars"])
 		}
 
 		// Test that the processors are passed to kapt
 		expectedProcessorPath := "-P plugin:org.jetbrains.kotlin.kapt3:apclasspath=" + bar +
 			" -P plugin:org.jetbrains.kotlin.kapt3:apclasspath=" + baz
-		if kapt.Args["kaptProcessorPath"] != expectedProcessorPath {
-			t.Errorf("expected kaptProcessorPath %q, got %q", expectedProcessorPath, kapt.Args["kaptProcessorPath"])
+		if kaptStubs.Args["kaptProcessorPath"] != expectedProcessorPath {
+			t.Errorf("expected kaptProcessorPath %q, got %q", expectedProcessorPath, kaptStubs.Args["kaptProcessorPath"])
 		}
 		expectedProcessor := "-P plugin:org.jetbrains.kotlin.kapt3:processors=com.bar -P plugin:org.jetbrains.kotlin.kapt3:processors=com.baz"
-		if kapt.Args["kaptProcessor"] != expectedProcessor {
-			t.Errorf("expected kaptProcessor %q, got %q", expectedProcessor, kapt.Args["kaptProcessor"])
+		if kaptStubs.Args["kaptProcessor"] != expectedProcessor {
+			t.Errorf("expected kaptProcessor %q, got %q", expectedProcessor, kaptStubs.Args["kaptProcessor"])
+		}
+
+		// Test that the processors are passed to turbine-apt
+		expectedProcessorPath = "--processorpath " + bar + " " + baz
+		if !strings.Contains(turbineApt.Args["turbineFlags"], expectedProcessorPath) {
+			t.Errorf("expected turbine-apt processorpath %q, got %q", expectedProcessorPath, turbineApt.Args["turbineFlags"])
+		}
+		expectedProcessor = "--processors com.bar com.baz"
+		if !strings.Contains(turbineApt.Args["turbineFlags"], expectedProcessor) {
+			t.Errorf("expected turbine-apt processor %q, got %q", expectedProcessor, turbineApt.Args["turbineFlags"])
 		}
 
 		// Test that the processors are not passed to javac
diff --git a/java/legacy_core_platform_api_usage.go b/java/legacy_core_platform_api_usage.go
index e3396c1..8e22491 100644
--- a/java/legacy_core_platform_api_usage.go
+++ b/java/legacy_core_platform_api_usage.go
@@ -81,7 +81,6 @@
 	"ds-car-docs", // for AAOS API documentation only
 	"DynamicSystemInstallationService",
 	"EmergencyInfo-lib",
-	"ethernet-service",
 	"EthernetServiceTests",
 	"ExternalStorageProvider",
 	"face-V1-0-javalib",
diff --git a/java/lint.go b/java/lint.go
index 7845c33..e97c9c2 100644
--- a/java/lint.go
+++ b/java/lint.go
@@ -102,12 +102,12 @@
 	lintOutputs() *lintOutputs
 }
 
-type lintDepSetsIntf interface {
+type LintDepSetsIntf interface {
 	LintDepSets() LintDepSets
 
 	// Methods used to propagate strict_updatability_linting values.
-	getStrictUpdatabilityLinting() bool
-	setStrictUpdatabilityLinting(bool)
+	GetStrictUpdatabilityLinting() bool
+	SetStrictUpdatabilityLinting(bool)
 }
 
 type LintDepSets struct {
@@ -158,15 +158,15 @@
 	return l.outputs.depSets
 }
 
-func (l *linter) getStrictUpdatabilityLinting() bool {
+func (l *linter) GetStrictUpdatabilityLinting() bool {
 	return BoolDefault(l.properties.Lint.Strict_updatability_linting, false)
 }
 
-func (l *linter) setStrictUpdatabilityLinting(strictLinting bool) {
+func (l *linter) SetStrictUpdatabilityLinting(strictLinting bool) {
 	l.properties.Lint.Strict_updatability_linting = &strictLinting
 }
 
-var _ lintDepSetsIntf = (*linter)(nil)
+var _ LintDepSetsIntf = (*linter)(nil)
 
 var _ lintOutputsIntf = (*linter)(nil)
 
@@ -273,7 +273,7 @@
 	cmd.FlagForEachArg("--error_check ", l.properties.Lint.Error_checks)
 	cmd.FlagForEachArg("--fatal_check ", l.properties.Lint.Fatal_checks)
 
-	if l.getStrictUpdatabilityLinting() {
+	if l.GetStrictUpdatabilityLinting() {
 		// Verify the module does not baseline issues that endanger safe updatability.
 		if baselinePath := l.getBaselineFilepath(ctx); baselinePath.Valid() {
 			cmd.FlagWithInput("--baseline ", baselinePath.Path())
@@ -382,7 +382,7 @@
 	depSetsBuilder := NewLintDepSetBuilder().Direct(html, text, xml)
 
 	ctx.VisitDirectDepsWithTag(staticLibTag, func(dep android.Module) {
-		if depLint, ok := dep.(lintDepSetsIntf); ok {
+		if depLint, ok := dep.(LintDepSetsIntf); ok {
 			depSetsBuilder.Transitive(depLint.LintDepSets())
 		}
 	})
@@ -660,10 +660,10 @@
 // Enforce the strict updatability linting to all applicable transitive dependencies.
 func enforceStrictUpdatabilityLintingMutator(ctx android.TopDownMutatorContext) {
 	m := ctx.Module()
-	if d, ok := m.(lintDepSetsIntf); ok && d.getStrictUpdatabilityLinting() {
+	if d, ok := m.(LintDepSetsIntf); ok && d.GetStrictUpdatabilityLinting() {
 		ctx.VisitDirectDepsWithTag(staticLibTag, func(d android.Module) {
-			if a, ok := d.(lintDepSetsIntf); ok {
-				a.setStrictUpdatabilityLinting(true)
+			if a, ok := d.(LintDepSetsIntf); ok {
+				a.SetStrictUpdatabilityLinting(true)
 			}
 		})
 	}
diff --git a/java/plugin.go b/java/plugin.go
index 4b174b9..123dbd4 100644
--- a/java/plugin.go
+++ b/java/plugin.go
@@ -58,27 +58,32 @@
 }
 
 type pluginAttributes struct {
-	*javaLibraryAttributes
-	Processor_class        *string
-	Target_compatible_with bazel.LabelListAttribute
+	*javaCommonAttributes
+	Deps            bazel.LabelListAttribute
+	Processor_class *string
 }
 
 // ConvertWithBp2build is used to convert android_app to Bazel.
 func (p *Plugin) ConvertWithBp2build(ctx android.TopDownMutatorContext) {
-	libAttrs := p.convertLibraryAttrsBp2Build(ctx)
-	attrs := &pluginAttributes{
-		libAttrs,
-		nil,
-		bazel.LabelListAttribute{},
+	pluginName := p.Name()
+	commonAttrs, depLabels := p.convertLibraryAttrsBp2Build(ctx)
+
+	deps := depLabels.Deps
+	deps.Append(depLabels.StaticDeps)
+
+	var processorClass *string
+	if p.pluginProperties.Processor_class != nil {
+		processorClass = p.pluginProperties.Processor_class
 	}
 
-	if p.pluginProperties.Processor_class != nil {
-		attrs.Processor_class = p.pluginProperties.Processor_class
+	attrs := &pluginAttributes{
+		javaCommonAttributes: commonAttrs,
+		Deps:                 deps,
+		Processor_class:      processorClass,
 	}
 
 	props := bazel.BazelTargetModuleProperties{
 		Rule_class: "java_plugin",
 	}
-
-	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: p.Name()}, attrs)
+	ctx.CreateBazelTargetModule(props, android.CommonAttributes{Name: pluginName}, attrs)
 }
diff --git a/java/prebuilt_apis.go b/java/prebuilt_apis.go
index c67e2bd..44650a6 100644
--- a/java/prebuilt_apis.go
+++ b/java/prebuilt_apis.go
@@ -16,6 +16,7 @@
 
 import (
 	"fmt"
+	"path"
 	"strconv"
 	"strings"
 
@@ -37,6 +38,11 @@
 	// list of api version directories
 	Api_dirs []string
 
+	// Directory containing finalized api txt files for extension versions.
+	// Extension versions higher than the base sdk extension version will
+	// be assumed to be finalized later than all Api_dirs.
+	Extensions_dir *string
+
 	// The next API directory can optionally point to a directory where
 	// files incompatibility-tracking files are stored for the current
 	// "in progress" API. Each module present in one of the api_dirs will have
@@ -60,36 +66,45 @@
 	// no need to implement
 }
 
-func parseJarPath(path string) (module string, apiver string, scope string) {
-	elements := strings.Split(path, "/")
+// parsePrebuiltPath parses the relevant variables out of a variety of paths, e.g.
+// <version>/<scope>/<module>.jar
+// <version>/<scope>/api/<module>.txt
+// extensions/<version>/<scope>/<module>.jar
+// extensions/<version>/<scope>/api/<module>.txt
+func parsePrebuiltPath(ctx android.LoadHookContext, p string) (module string, version string, scope string) {
+	elements := strings.Split(p, "/")
 
-	apiver = elements[0]
-	scope = elements[1]
-
-	module = strings.TrimSuffix(elements[2], ".jar")
-	return
-}
-
-func parseApiFilePath(ctx android.LoadHookContext, path string) (module string, apiver string, scope string) {
-	elements := strings.Split(path, "/")
-	apiver = elements[0]
-
-	scope = elements[1]
-	if scope != "public" && scope != "system" && scope != "test" && scope != "module-lib" && scope != "system-server" {
-		ctx.ModuleErrorf("invalid scope %q found in path: %q", scope, path)
+	scopeIdx := len(elements) - 2
+	if elements[scopeIdx] == "api" {
+		scopeIdx--
+	}
+	scope = elements[scopeIdx]
+	if scope != "core" && scope != "public" && scope != "system" && scope != "test" && scope != "module-lib" && scope != "system-server" {
+		ctx.ModuleErrorf("invalid scope %q found in path: %q", scope, p)
 		return
 	}
+	version = elements[scopeIdx-1]
 
-	// elements[2] is string literal "api". skipping.
-	module = strings.TrimSuffix(elements[3], ".txt")
+	module = strings.TrimSuffix(path.Base(p), path.Ext(p))
 	return
 }
 
-func prebuiltApiModuleName(mctx android.LoadHookContext, module string, scope string, apiver string) string {
-	return mctx.ModuleName() + "_" + scope + "_" + apiver + "_" + module
+// parseFinalizedPrebuiltPath is like parsePrebuiltPath, but verifies the version is numeric (a finalized version).
+func parseFinalizedPrebuiltPath(ctx android.LoadHookContext, p string) (module string, version int, scope string) {
+	module, v, scope := parsePrebuiltPath(ctx, p)
+	version, err := strconv.Atoi(v)
+	if err != nil {
+		ctx.ModuleErrorf("Found finalized API files in non-numeric dir '%v'", v)
+		return
+	}
+	return
 }
 
-func createImport(mctx android.LoadHookContext, module, scope, apiver, path, sdkVersion string, compileDex bool) {
+func prebuiltApiModuleName(mctx android.LoadHookContext, module, scope, version string) string {
+	return fmt.Sprintf("%s_%s_%s_%s", mctx.ModuleName(), scope, version, module)
+}
+
+func createImport(mctx android.LoadHookContext, module, scope, version, path, sdkVersion string, compileDex bool) {
 	props := struct {
 		Name        *string
 		Jars        []string
@@ -97,7 +112,7 @@
 		Installable *bool
 		Compile_dex *bool
 	}{}
-	props.Name = proptools.StringPtr(prebuiltApiModuleName(mctx, module, scope, apiver))
+	props.Name = proptools.StringPtr(prebuiltApiModuleName(mctx, module, scope, version))
 	props.Jars = append(props.Jars, path)
 	props.Sdk_version = proptools.StringPtr(sdkVersion)
 	props.Installable = proptools.BoolPtr(false)
@@ -132,111 +147,125 @@
 	mctx.CreateModule(genrule.GenRuleFactory, &props)
 }
 
-func getPrebuiltFiles(mctx android.LoadHookContext, p *prebuiltApis, name string) []string {
+// globApiDirs collects all the files in all api_dirs and all scopes that match the given glob, e.g. '*.jar' or 'api/*.txt'.
+// <api-dir>/<scope>/<glob> for all api-dir and scope.
+func globApiDirs(mctx android.LoadHookContext, p *prebuiltApis, api_dir_glob string) []string {
 	var files []string
 	for _, apiver := range p.properties.Api_dirs {
-		files = append(files, getPrebuiltFilesInSubdir(mctx, apiver, name)...)
+		files = append(files, globScopeDir(mctx, apiver, api_dir_glob)...)
 	}
 	return files
 }
 
-func getPrebuiltFilesInSubdir(mctx android.LoadHookContext, subdir string, name string) []string {
+// globExtensionDirs collects all the files under the extension dir (for all versions and scopes) that match the given glob
+// <extension-dir>/<version>/<scope>/<glob> for all version and scope.
+func globExtensionDirs(mctx android.LoadHookContext, p *prebuiltApis, extension_dir_glob string) []string {
+	// <extensions-dir>/<num>/<extension-dir-glob>
+	return globScopeDir(mctx, *p.properties.Extensions_dir+"/*", extension_dir_glob)
+}
+
+// globScopeDir collects all the files in the given subdir across all scopes that match the given glob, e.g. '*.jar' or 'api/*.txt'.
+// <subdir>/<scope>/<glob> for all scope.
+func globScopeDir(mctx android.LoadHookContext, subdir string, subdir_glob string) []string {
 	var files []string
 	dir := mctx.ModuleDir() + "/" + subdir
 	for _, scope := range []string{"public", "system", "test", "core", "module-lib", "system-server"} {
-		glob := fmt.Sprintf("%s/%s/%s", dir, scope, name)
+		glob := fmt.Sprintf("%s/%s/%s", dir, scope, subdir_glob)
 		vfiles, err := mctx.GlobWithDeps(glob, nil)
 		if err != nil {
-			mctx.ModuleErrorf("failed to glob %s files under %q: %s", name, dir+"/"+scope, err)
+			mctx.ModuleErrorf("failed to glob %s files under %q: %s", subdir_glob, dir+"/"+scope, err)
 		}
 		files = append(files, vfiles...)
 	}
+	for i, f := range files {
+		files[i] = strings.TrimPrefix(f, mctx.ModuleDir()+"/")
+	}
 	return files
 }
 
 func prebuiltSdkStubs(mctx android.LoadHookContext, p *prebuiltApis) {
-	mydir := mctx.ModuleDir() + "/"
 	// <apiver>/<scope>/<module>.jar
-	files := getPrebuiltFiles(mctx, p, "*.jar")
+	files := globApiDirs(mctx, p, "*.jar")
 
 	sdkVersion := proptools.StringDefault(p.properties.Imports_sdk_version, "current")
 	compileDex := proptools.BoolDefault(p.properties.Imports_compile_dex, false)
 
 	for _, f := range files {
 		// create a Import module for each jar file
-		localPath := strings.TrimPrefix(f, mydir)
-		module, apiver, scope := parseJarPath(localPath)
-		createImport(mctx, module, scope, apiver, localPath, sdkVersion, compileDex)
+		module, version, scope := parsePrebuiltPath(mctx, f)
+		createImport(mctx, module, scope, version, f, sdkVersion, compileDex)
 
 		if module == "core-for-system-modules" {
-			createSystemModules(mctx, apiver, scope)
+			createSystemModules(mctx, version, scope)
 		}
 	}
 }
 
-func createSystemModules(mctx android.LoadHookContext, apiver string, scope string) {
+func createSystemModules(mctx android.LoadHookContext, version, scope string) {
 	props := struct {
 		Name *string
 		Libs []string
 	}{}
-	props.Name = proptools.StringPtr(prebuiltApiModuleName(mctx, "system_modules", scope, apiver))
-	props.Libs = append(props.Libs, prebuiltApiModuleName(mctx, "core-for-system-modules", scope, apiver))
+	props.Name = proptools.StringPtr(prebuiltApiModuleName(mctx, "system_modules", scope, version))
+	props.Libs = append(props.Libs, prebuiltApiModuleName(mctx, "core-for-system-modules", scope, version))
 
 	mctx.CreateModule(systemModulesImportFactory, &props)
 }
 
 func prebuiltApiFiles(mctx android.LoadHookContext, p *prebuiltApis) {
-	mydir := mctx.ModuleDir() + "/"
 	// <apiver>/<scope>/api/<module>.txt
-	files := getPrebuiltFiles(mctx, p, "api/*.txt")
-
-	if len(files) == 0 {
-		mctx.ModuleErrorf("no api file found under %q", mydir)
-	}
-
-	// construct a map to find out the latest api file path
-	// for each (<module>, <scope>) pair.
-	type latestApiInfo struct {
-		module  string
-		scope   string
-		version int
-		path    string
+	apiLevelFiles := globApiDirs(mctx, p, "api/*.txt")
+	if len(apiLevelFiles) == 0 {
+		mctx.ModuleErrorf("no api file found under %q", mctx.ModuleDir())
 	}
 
 	// Create modules for all (<module>, <scope, <version>) triplets,
-	// and a "latest" module variant for each (<module>, <scope>) pair
 	apiModuleName := func(module, scope, version string) string {
 		return module + ".api." + scope + "." + version
 	}
-	m := make(map[string]latestApiInfo)
-	for _, f := range files {
-		localPath := strings.TrimPrefix(f, mydir)
-		module, apiver, scope := parseApiFilePath(mctx, localPath)
-		createApiModule(mctx, apiModuleName(module, scope, apiver), localPath)
+	for _, f := range apiLevelFiles {
+		module, version, scope := parseFinalizedPrebuiltPath(mctx, f)
+		createApiModule(mctx, apiModuleName(module, scope, strconv.Itoa(version)), f)
+	}
 
-		version, err := strconv.Atoi(apiver)
-		if err != nil {
-			mctx.ModuleErrorf("Found finalized API files in non-numeric dir %v", apiver)
-			return
-		}
+	// Figure out the latest version of each module/scope
+	type latestApiInfo struct {
+		module, scope, path string
+		version             int
+	}
 
-		// Track latest version of each module/scope, except for incompatibilities
-		if !strings.HasSuffix(module, "incompatibilities") {
+	getLatest := func(files []string) map[string]latestApiInfo {
+		m := make(map[string]latestApiInfo)
+		for _, f := range files {
+			module, version, scope := parseFinalizedPrebuiltPath(mctx, f)
+			if strings.HasSuffix(module, "incompatibilities") {
+				continue
+			}
 			key := module + "." + scope
-			info, ok := m[key]
-			if !ok {
-				m[key] = latestApiInfo{module, scope, version, localPath}
-			} else if version > info.version {
-				info.version = version
-				info.path = localPath
-				m[key] = info
+			info, exists := m[key]
+			if !exists || version > info.version {
+				m[key] = latestApiInfo{module, scope, f, version}
+			}
+		}
+		return m
+	}
+
+	latest := getLatest(apiLevelFiles)
+	if p.properties.Extensions_dir != nil {
+		extensionApiFiles := globExtensionDirs(mctx, p, "api/*.txt")
+		for k, v := range getLatest(extensionApiFiles) {
+			if v.version > mctx.Config().PlatformBaseSdkExtensionVersion() {
+				if _, exists := latest[k]; !exists {
+					mctx.ModuleErrorf("Module %v finalized for extension %d but never during an API level; likely error", v.module, v.version)
+				}
+				latest[k] = v
 			}
 		}
 	}
 
 	// Sort the keys in order to make build.ninja stable
-	for _, k := range android.SortedStringKeys(m) {
-		info := m[k]
+	for _, k := range android.SortedStringKeys(latest) {
+		info := latest[k]
 		name := apiModuleName(info.module, info.scope, "latest")
 		createApiModule(mctx, name, info.path)
 	}
@@ -244,21 +273,20 @@
 	// Create incompatibilities tracking files for all modules, if we have a "next" api.
 	incompatibilities := make(map[string]bool)
 	if nextApiDir := String(p.properties.Next_api_dir); nextApiDir != "" {
-		files := getPrebuiltFilesInSubdir(mctx, nextApiDir, "api/*incompatibilities.txt")
+		files := globScopeDir(mctx, nextApiDir, "api/*incompatibilities.txt")
 		for _, f := range files {
-			localPath := strings.TrimPrefix(f, mydir)
-			filename, _, scope := parseApiFilePath(mctx, localPath)
+			filename, _, scope := parsePrebuiltPath(mctx, f)
 			referencedModule := strings.TrimSuffix(filename, "-incompatibilities")
 
-			createApiModule(mctx, apiModuleName(referencedModule+"-incompatibilities", scope, "latest"), localPath)
+			createApiModule(mctx, apiModuleName(referencedModule+"-incompatibilities", scope, "latest"), f)
 
 			incompatibilities[referencedModule+"."+scope] = true
 		}
 	}
 	// Create empty incompatibilities files for remaining modules
-	for _, k := range android.SortedStringKeys(m) {
+	for _, k := range android.SortedStringKeys(latest) {
 		if _, ok := incompatibilities[k]; !ok {
-			createEmptyFile(mctx, apiModuleName(m[k].module+"-incompatibilities", m[k].scope, "latest"))
+			createEmptyFile(mctx, apiModuleName(latest[k].module+"-incompatibilities", latest[k].scope, "latest"))
 		}
 	}
 }
diff --git a/java/prebuilt_apis_test.go b/java/prebuilt_apis_test.go
index 79f4225..75422ad 100644
--- a/java/prebuilt_apis_test.go
+++ b/java/prebuilt_apis_test.go
@@ -20,9 +20,14 @@
 	"testing"
 
 	"android/soong/android"
+
 	"github.com/google/blueprint"
 )
 
+func intPtr(v int) *int {
+	return &v
+}
+
 func TestPrebuiltApis_SystemModulesCreation(t *testing.T) {
 	result := android.GroupFixturePreparers(
 		prepareForJavaTest,
@@ -54,3 +59,34 @@
 	sort.Strings(expected)
 	android.AssertArrayString(t, "sdk system modules", expected, sdkSystemModules)
 }
+
+func TestPrebuiltApis_WithExtensions(t *testing.T) {
+	runTestWithBaseExtensionLevel := func(v int) (foo_input string, bar_input string) {
+		result := android.GroupFixturePreparers(
+			prepareForJavaTest,
+			android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+				variables.Platform_base_sdk_extension_version = intPtr(v)
+			}),
+			FixtureWithPrebuiltApisAndExtensions(map[string][]string{
+				"31":      {"foo"},
+				"32":      {"foo", "bar"},
+				"current": {"foo", "bar"},
+			}, map[string][]string{
+				"1": {"foo"},
+				"2": {"foo", "bar"},
+			}),
+		).RunTest(t)
+		foo_input = result.ModuleForTests("foo.api.public.latest", "").Rule("generator").Implicits[0].String()
+		bar_input = result.ModuleForTests("bar.api.public.latest", "").Rule("generator").Implicits[0].String()
+		return
+	}
+	// Here, the base extension level is 1, so extension level 2 is the latest
+	foo_input, bar_input := runTestWithBaseExtensionLevel(1)
+	android.AssertStringEquals(t, "Expected latest = extension level 2", "prebuilts/sdk/extensions/2/public/api/foo.txt", foo_input)
+	android.AssertStringEquals(t, "Expected latest = extension level 2", "prebuilts/sdk/extensions/2/public/api/bar.txt", bar_input)
+
+	// Here, the base extension level is 2, so 2 is not later than 32.
+	foo_input, bar_input = runTestWithBaseExtensionLevel(2)
+	android.AssertStringEquals(t, "Expected latest = api level 32", "prebuilts/sdk/32/public/api/foo.txt", foo_input)
+	android.AssertStringEquals(t, "Expected latest = api level 32", "prebuilts/sdk/32/public/api/bar.txt", bar_input)
+}
diff --git a/java/proto.go b/java/proto.go
index 8d23803..5ba486f 100644
--- a/java/proto.go
+++ b/java/proto.go
@@ -19,6 +19,13 @@
 	"strconv"
 
 	"android/soong/android"
+	"android/soong/bazel"
+
+	"github.com/google/blueprint/proptools"
+)
+
+const (
+	protoTypeDefault = "lite"
 )
 
 func genProto(ctx android.ModuleContext, protoFiles android.Paths, flags android.ProtoFlags) android.Paths {
@@ -134,3 +141,52 @@
 
 	return flags
 }
+
+type protoAttributes struct {
+	Deps bazel.LabelListAttribute
+}
+
+func bp2buildProto(ctx android.Bp2buildMutatorContext, m *Module, protoSrcs bazel.LabelListAttribute) *bazel.Label {
+	protoInfo, ok := android.Bp2buildProtoProperties(ctx, &m.ModuleBase, protoSrcs)
+	if !ok {
+		return nil
+	}
+
+	typ := proptools.StringDefault(protoInfo.Type, protoTypeDefault)
+	var rule_class string
+	suffix := "_java_proto"
+	switch typ {
+	case "nano":
+		suffix += "_nano"
+		rule_class = "java_nano_proto_library"
+	case "micro":
+		suffix += "_micro"
+		rule_class = "java_micro_proto_library"
+	case "lite":
+		suffix += "_lite"
+		rule_class = "java_lite_proto_library"
+	case "stream":
+		suffix += "_stream"
+		rule_class = "java_stream_proto_library"
+	case "full":
+		rule_class = "java_proto_library"
+	default:
+		ctx.PropertyErrorf("proto.type", "cannot handle conversion at this time: %q", typ)
+	}
+
+	protoLabel := bazel.Label{Label: ":" + m.Name() + "_proto"}
+	var protoAttrs protoAttributes
+	protoAttrs.Deps.SetValue(bazel.LabelList{Includes: []bazel.Label{protoLabel}})
+
+	name := m.Name() + suffix
+
+	ctx.CreateBazelTargetModule(
+		bazel.BazelTargetModuleProperties{
+			Rule_class:        rule_class,
+			Bzl_load_location: "//build/bazel/rules/java:proto.bzl",
+		},
+		android.CommonAttributes{Name: name},
+		&protoAttrs)
+
+	return &bazel.Label{Label: ":" + name}
+}
diff --git a/java/sdk_library.go b/java/sdk_library.go
index e794a48..c37ed1a 100644
--- a/java/sdk_library.go
+++ b/java/sdk_library.go
@@ -1511,15 +1511,15 @@
 	}
 	droidstubsArgs = append(droidstubsArgs, module.sdkLibraryProperties.Droiddoc_options...)
 	disabledWarnings := []string{
-		"MissingPermission",
 		"BroadcastBehavior",
-		"HiddenSuperclass",
 		"DeprecationMismatch",
-		"UnavailableSymbol",
-		"SdkConstant",
+		"HiddenSuperclass",
 		"HiddenTypeParameter",
+		"MissingPermission",
+		"SdkConstant",
 		"Todo",
 		"Typo",
+		"UnavailableSymbol",
 	}
 	droidstubsArgs = append(droidstubsArgs, android.JoinWithPrefix(disabledWarnings, "--hide "))
 
@@ -2362,17 +2362,17 @@
 	}
 }
 
-func (module *SdkLibraryImport) getStrictUpdatabilityLinting() bool {
+func (module *SdkLibraryImport) GetStrictUpdatabilityLinting() bool {
 	if module.implLibraryModule == nil {
 		return false
 	} else {
-		return module.implLibraryModule.getStrictUpdatabilityLinting()
+		return module.implLibraryModule.GetStrictUpdatabilityLinting()
 	}
 }
 
-func (module *SdkLibraryImport) setStrictUpdatabilityLinting(strictLinting bool) {
+func (module *SdkLibraryImport) SetStrictUpdatabilityLinting(strictLinting bool) {
 	if module.implLibraryModule != nil {
-		module.implLibraryModule.setStrictUpdatabilityLinting(strictLinting)
+		module.implLibraryModule.SetStrictUpdatabilityLinting(strictLinting)
 	}
 }
 
diff --git a/java/testing.go b/java/testing.go
index 6c49bc8..4000334 100644
--- a/java/testing.go
+++ b/java/testing.go
@@ -70,6 +70,10 @@
 		defaultJavaDir + "/framework/aidl": nil,
 		// Needed for various deps defined in GatherRequiredDepsForTest()
 		defaultJavaDir + "/a.java": nil,
+
+		// Needed for R8 rules on apps
+		"build/make/core/proguard.flags":             nil,
+		"build/make/core/proguard_basic_keeps.flags": nil,
 	}.AddToFixture(),
 	// The java default module definitions.
 	android.FixtureAddTextFile(defaultJavaDir+"/Android.bp", gatherRequiredDepsForTest()),
@@ -146,6 +150,10 @@
 // This defines a file in the mock file system in a predefined location (prebuilts/sdk/Android.bp)
 // and so only one instance of this can be used in each fixture.
 func FixtureWithPrebuiltApis(release2Modules map[string][]string) android.FixturePreparer {
+	return FixtureWithPrebuiltApisAndExtensions(release2Modules, nil)
+}
+
+func FixtureWithPrebuiltApisAndExtensions(apiLevel2Modules map[string][]string, extensionLevel2Modules map[string][]string) android.FixturePreparer {
 	mockFS := android.MockFS{}
 	path := "prebuilts/sdk/Android.bp"
 
@@ -153,14 +161,20 @@
 			prebuilt_apis {
 				name: "sdk",
 				api_dirs: ["%s"],
+				extensions_dir: "extensions",
 				imports_sdk_version: "none",
 				imports_compile_dex: true,
 			}
-		`, strings.Join(android.SortedStringKeys(release2Modules), `", "`))
+		`, strings.Join(android.SortedStringKeys(apiLevel2Modules), `", "`))
 
-	for release, modules := range release2Modules {
+	for release, modules := range apiLevel2Modules {
 		mockFS.Merge(prebuiltApisFilesForModules([]string{release}, modules))
 	}
+	if extensionLevel2Modules != nil {
+		for release, modules := range extensionLevel2Modules {
+			mockFS.Merge(prebuiltExtensionApiFiles([]string{release}, modules))
+		}
+	}
 	return android.GroupFixturePreparers(
 		android.FixtureAddTextFile(path, bp),
 		android.FixtureMergeMockFs(mockFS),
@@ -198,6 +212,19 @@
 	return fs
 }
 
+func prebuiltExtensionApiFiles(extensionLevels []string, modules []string) map[string][]byte {
+	fs := make(map[string][]byte)
+	for _, level := range extensionLevels {
+		for _, sdkKind := range []android.SdkKind{android.SdkPublic, android.SdkSystem, android.SdkModule, android.SdkSystemServer} {
+			for _, lib := range modules {
+				fs[fmt.Sprintf("prebuilts/sdk/extensions/%s/%s/api/%s.txt", level, sdkKind, lib)] = nil
+				fs[fmt.Sprintf("prebuilts/sdk/extensions/%s/%s/api/%s-removed.txt", level, sdkKind, lib)] = nil
+			}
+		}
+	}
+	return fs
+}
+
 // FixtureConfigureBootJars configures the boot jars in both the dexpreopt.GlobalConfig and
 // Config.productVariables structs. As a side effect that enables dexpreopt.
 func FixtureConfigureBootJars(bootJars ...string) android.FixturePreparer {
diff --git a/linkerconfig/linkerconfig.go b/linkerconfig/linkerconfig.go
index dbc112e..003b275 100644
--- a/linkerconfig/linkerconfig.go
+++ b/linkerconfig/linkerconfig.go
@@ -158,7 +158,6 @@
 				entries.SetString("LOCAL_MODULE_PATH", l.installDirPath.String())
 				entries.SetString("LOCAL_INSTALLED_MODULE_STEM", l.outputFilePath.Base())
 				entries.SetBoolIfTrue("LOCAL_UNINSTALLABLE_MODULE", !installable)
-				entries.SetString("LINKER_CONFIG_PATH_"+l.Name(), l.OutputFile().String())
 			},
 		},
 	}}
diff --git a/mk2rbc/expr.go b/mk2rbc/expr.go
index 07f7ca1..9266520 100644
--- a/mk2rbc/expr.go
+++ b/mk2rbc/expr.go
@@ -221,11 +221,9 @@
 }
 
 func (xi *interpolateExpr) transform(transformer func(expr starlarkExpr) starlarkExpr) starlarkExpr {
-	argsCopy := make([]starlarkExpr, len(xi.args))
-	for i, arg := range xi.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range xi.args {
+		xi.args[i] = xi.args[i].transform(transformer)
 	}
-	xi.args = argsCopy
 	if replacement := transformer(xi); replacement != nil {
 		return replacement
 	} else {
@@ -234,19 +232,18 @@
 }
 
 type variableRefExpr struct {
-	ref       variable
-	isDefined bool
+	ref variable
 }
 
-func NewVariableRefExpr(ref variable, isDefined bool) starlarkExpr {
+func NewVariableRefExpr(ref variable) starlarkExpr {
 	if predefined, ok := ref.(*predefinedVariable); ok {
 		return predefined.value
 	}
-	return &variableRefExpr{ref, isDefined}
+	return &variableRefExpr{ref}
 }
 
 func (v *variableRefExpr) emit(gctx *generationContext) {
-	v.ref.emitGet(gctx, v.isDefined)
+	v.ref.emitGet(gctx)
 }
 
 func (v *variableRefExpr) typ() starlarkType {
@@ -591,9 +588,8 @@
 	if cx.object != nil {
 		cx.object = cx.object.transform(transformer)
 	}
-	argsCopy := make([]starlarkExpr, len(cx.args))
-	for i, arg := range cx.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range cx.args {
+		cx.args[i] = cx.args[i].transform(transformer)
 	}
 	if replacement := transformer(cx); replacement != nil {
 		return replacement
@@ -768,3 +764,35 @@
 	x, ok := expr.(*stringLiteralExpr)
 	return ok && x.literal == ""
 }
+
+func negateExpr(expr starlarkExpr) starlarkExpr {
+	switch typedExpr := expr.(type) {
+	case *notExpr:
+		return typedExpr.expr
+	case *inExpr:
+		typedExpr.isNot = !typedExpr.isNot
+		return typedExpr
+	case *eqExpr:
+		typedExpr.isEq = !typedExpr.isEq
+		return typedExpr
+	case *binaryOpExpr:
+		switch typedExpr.op {
+		case ">":
+			typedExpr.op = "<="
+			return typedExpr
+		case "<":
+			typedExpr.op = ">="
+			return typedExpr
+		case ">=":
+			typedExpr.op = "<"
+			return typedExpr
+		case "<=":
+			typedExpr.op = ">"
+			return typedExpr
+		default:
+			return &notExpr{expr: expr}
+		}
+	default:
+		return &notExpr{expr: expr}
+	}
+}
diff --git a/mk2rbc/mk2rbc.go b/mk2rbc/mk2rbc.go
index cb50a50..8f4fea4 100644
--- a/mk2rbc/mk2rbc.go
+++ b/mk2rbc/mk2rbc.go
@@ -32,6 +32,7 @@
 	"os"
 	"path/filepath"
 	"regexp"
+	"sort"
 	"strconv"
 	"strings"
 	"text/scanner"
@@ -50,15 +51,12 @@
 	soongNsPrefix = "SOONG_CONFIG_"
 
 	// And here are the functions and variables:
-	cfnGetCfg          = baseName + ".cfg"
-	cfnMain            = baseName + ".product_configuration"
-	cfnBoardMain       = baseName + ".board_configuration"
-	cfnPrintVars       = baseName + ".printvars"
-	cfnWarning         = baseName + ".warning"
-	cfnLocalAppend     = baseName + ".local_append"
-	cfnLocalSetDefault = baseName + ".local_set_default"
-	cfnInherit         = baseName + ".inherit"
-	cfnSetListDefault  = baseName + ".setdefault"
+	cfnGetCfg         = baseName + ".cfg"
+	cfnMain           = baseName + ".product_configuration"
+	cfnBoardMain      = baseName + ".board_configuration"
+	cfnPrintVars      = baseName + ".printvars"
+	cfnInherit        = baseName + ".inherit"
+	cfnSetListDefault = baseName + ".setdefault"
 )
 
 const (
@@ -69,54 +67,67 @@
 var knownFunctions = map[string]interface {
 	parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr
 }{
-	"abspath":                             &simpleCallParser{name: baseName + ".abspath", returnType: starlarkTypeString, addGlobals: false},
-	"add_soong_config_namespace":          &simpleCallParser{name: baseName + ".soong_config_namespace", returnType: starlarkTypeVoid, addGlobals: true},
-	"add_soong_config_var_value":          &simpleCallParser{name: baseName + ".soong_config_set", returnType: starlarkTypeVoid, addGlobals: true},
-	soongConfigAssign:                     &simpleCallParser{name: baseName + ".soong_config_set", returnType: starlarkTypeVoid, addGlobals: true},
-	soongConfigAppend:                     &simpleCallParser{name: baseName + ".soong_config_append", returnType: starlarkTypeVoid, addGlobals: true},
-	"soong_config_get":                    &simpleCallParser{name: baseName + ".soong_config_get", returnType: starlarkTypeString, addGlobals: true},
-	"add-to-product-copy-files-if-exists": &simpleCallParser{name: baseName + ".copy_if_exists", returnType: starlarkTypeList, addGlobals: false},
-	"addprefix":                           &simpleCallParser{name: baseName + ".addprefix", returnType: starlarkTypeList, addGlobals: false},
-	"addsuffix":                           &simpleCallParser{name: baseName + ".addsuffix", returnType: starlarkTypeList, addGlobals: false},
-	"copy-files":                          &simpleCallParser{name: baseName + ".copy_files", returnType: starlarkTypeList, addGlobals: false},
-	"dir":                                 &simpleCallParser{name: baseName + ".dir", returnType: starlarkTypeList, addGlobals: false},
-	"dist-for-goals":                      &simpleCallParser{name: baseName + ".mkdist_for_goals", returnType: starlarkTypeVoid, addGlobals: true},
-	"enforce-product-packages-exist":      &simpleCallParser{name: baseName + ".enforce_product_packages_exist", returnType: starlarkTypeVoid, addGlobals: false},
-	"error":                               &makeControlFuncParser{name: baseName + ".mkerror"},
-	"findstring":                          &simpleCallParser{name: baseName + ".findstring", returnType: starlarkTypeInt, addGlobals: false},
-	"find-copy-subdir-files":              &simpleCallParser{name: baseName + ".find_and_copy", returnType: starlarkTypeList, addGlobals: false},
-	"filter":                              &simpleCallParser{name: baseName + ".filter", returnType: starlarkTypeList, addGlobals: false},
-	"filter-out":                          &simpleCallParser{name: baseName + ".filter_out", returnType: starlarkTypeList, addGlobals: false},
-	"firstword":                           &firstOrLastwordCallParser{isLastWord: false},
-	"foreach":                             &foreachCallPaser{},
-	"if":                                  &ifCallParser{},
-	"info":                                &makeControlFuncParser{name: baseName + ".mkinfo"},
-	"is-board-platform":                   &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
-	"is-board-platform2":                  &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
-	"is-board-platform-in-list":           &simpleCallParser{name: baseName + ".board_platform_in", returnType: starlarkTypeBool, addGlobals: true},
-	"is-board-platform-in-list2":          &simpleCallParser{name: baseName + ".board_platform_in", returnType: starlarkTypeBool, addGlobals: true},
-	"is-product-in-list":                  &isProductInListCallParser{},
-	"is-vendor-board-platform":            &isVendorBoardPlatformCallParser{},
-	"is-vendor-board-qcom":                &isVendorBoardQcomCallParser{},
-	"lastword":                            &firstOrLastwordCallParser{isLastWord: true},
-	"notdir":                              &simpleCallParser{name: baseName + ".notdir", returnType: starlarkTypeString, addGlobals: false},
-	"math_max":                            &mathMaxOrMinCallParser{function: "max"},
-	"math_min":                            &mathMaxOrMinCallParser{function: "min"},
-	"math_gt_or_eq":                       &mathComparisonCallParser{op: ">="},
-	"math_gt":                             &mathComparisonCallParser{op: ">"},
-	"math_lt":                             &mathComparisonCallParser{op: "<"},
-	"my-dir":                              &myDirCallParser{},
-	"patsubst":                            &substCallParser{fname: "patsubst"},
-	"product-copy-files-by-pattern":       &simpleCallParser{name: baseName + ".product_copy_files_by_pattern", returnType: starlarkTypeList, addGlobals: false},
-	"require-artifacts-in-path":           &simpleCallParser{name: baseName + ".require_artifacts_in_path", returnType: starlarkTypeVoid, addGlobals: false},
-	"require-artifacts-in-path-relaxed":   &simpleCallParser{name: baseName + ".require_artifacts_in_path_relaxed", returnType: starlarkTypeVoid, addGlobals: false},
+	"abspath":                              &simpleCallParser{name: baseName + ".abspath", returnType: starlarkTypeString},
+	"add-product-dex-preopt-module-config": &simpleCallParser{name: baseName + ".add_product_dex_preopt_module_config", returnType: starlarkTypeString, addHandle: true},
+	"add_soong_config_namespace":           &simpleCallParser{name: baseName + ".soong_config_namespace", returnType: starlarkTypeVoid, addGlobals: true},
+	"add_soong_config_var_value":           &simpleCallParser{name: baseName + ".soong_config_set", returnType: starlarkTypeVoid, addGlobals: true},
+	soongConfigAssign:                      &simpleCallParser{name: baseName + ".soong_config_set", returnType: starlarkTypeVoid, addGlobals: true},
+	soongConfigAppend:                      &simpleCallParser{name: baseName + ".soong_config_append", returnType: starlarkTypeVoid, addGlobals: true},
+	"soong_config_get":                     &simpleCallParser{name: baseName + ".soong_config_get", returnType: starlarkTypeString, addGlobals: true},
+	"add-to-product-copy-files-if-exists":  &simpleCallParser{name: baseName + ".copy_if_exists", returnType: starlarkTypeList},
+	"addprefix":                            &simpleCallParser{name: baseName + ".addprefix", returnType: starlarkTypeList},
+	"addsuffix":                            &simpleCallParser{name: baseName + ".addsuffix", returnType: starlarkTypeList},
+	"copy-files":                           &simpleCallParser{name: baseName + ".copy_files", returnType: starlarkTypeList},
+	"dir":                                  &simpleCallParser{name: baseName + ".dir", returnType: starlarkTypeString},
+	"dist-for-goals":                       &simpleCallParser{name: baseName + ".mkdist_for_goals", returnType: starlarkTypeVoid, addGlobals: true},
+	"enforce-product-packages-exist":       &simpleCallParser{name: baseName + ".enforce_product_packages_exist", returnType: starlarkTypeVoid, addHandle: true},
+	"error":                                &makeControlFuncParser{name: baseName + ".mkerror"},
+	"findstring":                           &simpleCallParser{name: baseName + ".findstring", returnType: starlarkTypeInt},
+	"find-copy-subdir-files":               &simpleCallParser{name: baseName + ".find_and_copy", returnType: starlarkTypeList},
+	"filter":                               &simpleCallParser{name: baseName + ".filter", returnType: starlarkTypeList},
+	"filter-out":                           &simpleCallParser{name: baseName + ".filter_out", returnType: starlarkTypeList},
+	"firstword":                            &firstOrLastwordCallParser{isLastWord: false},
+	"foreach":                              &foreachCallParser{},
+	"if":                                   &ifCallParser{},
+	"info":                                 &makeControlFuncParser{name: baseName + ".mkinfo"},
+	"is-board-platform":                    &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
+	"is-board-platform2":                   &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
+	"is-board-platform-in-list":            &simpleCallParser{name: baseName + ".board_platform_in", returnType: starlarkTypeBool, addGlobals: true},
+	"is-board-platform-in-list2":           &simpleCallParser{name: baseName + ".board_platform_in", returnType: starlarkTypeBool, addGlobals: true},
+	"is-product-in-list":                   &isProductInListCallParser{},
+	"is-vendor-board-platform":             &isVendorBoardPlatformCallParser{},
+	"is-vendor-board-qcom":                 &isVendorBoardQcomCallParser{},
+	"lastword":                             &firstOrLastwordCallParser{isLastWord: true},
+	"notdir":                               &simpleCallParser{name: baseName + ".notdir", returnType: starlarkTypeString},
+	"math_max":                             &mathMaxOrMinCallParser{function: "max"},
+	"math_min":                             &mathMaxOrMinCallParser{function: "min"},
+	"math_gt_or_eq":                        &mathComparisonCallParser{op: ">="},
+	"math_gt":                              &mathComparisonCallParser{op: ">"},
+	"math_lt":                              &mathComparisonCallParser{op: "<"},
+	"my-dir":                               &myDirCallParser{},
+	"patsubst":                             &substCallParser{fname: "patsubst"},
+	"product-copy-files-by-pattern":        &simpleCallParser{name: baseName + ".product_copy_files_by_pattern", returnType: starlarkTypeList},
+	"require-artifacts-in-path":            &simpleCallParser{name: baseName + ".require_artifacts_in_path", returnType: starlarkTypeVoid, addHandle: true},
+	"require-artifacts-in-path-relaxed":    &simpleCallParser{name: baseName + ".require_artifacts_in_path_relaxed", returnType: starlarkTypeVoid, addHandle: true},
 	// TODO(asmundak): remove it once all calls are removed from configuration makefiles. see b/183161002
 	"shell":    &shellCallParser{},
-	"strip":    &simpleCallParser{name: baseName + ".mkstrip", returnType: starlarkTypeString, addGlobals: false},
+	"sort":     &simpleCallParser{name: baseName + ".mksort", returnType: starlarkTypeList},
+	"strip":    &simpleCallParser{name: baseName + ".mkstrip", returnType: starlarkTypeString},
 	"subst":    &substCallParser{fname: "subst"},
 	"warning":  &makeControlFuncParser{name: baseName + ".mkwarning"},
 	"word":     &wordCallParser{},
-	"wildcard": &simpleCallParser{name: baseName + ".expand_wildcard", returnType: starlarkTypeList, addGlobals: false},
+	"wildcard": &simpleCallParser{name: baseName + ".expand_wildcard", returnType: starlarkTypeList},
+}
+
+// The same as knownFunctions, but returns a []starlarkNode instead of a starlarkExpr
+var knownNodeFunctions = map[string]interface {
+	parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode
+}{
+	"eval":                      &evalNodeParser{},
+	"if":                        &ifCallNodeParser{},
+	"inherit-product":           &inheritProductCallParser{loadAlways: true},
+	"inherit-product-if-exists": &inheritProductCallParser{loadAlways: false},
+	"foreach":                   &foreachCallNodeParser{},
 }
 
 // These are functions that we don't implement conversions for, but
@@ -186,17 +197,57 @@
 	return s == "error" || s == "warning" || s == "info"
 }
 
+// varAssignmentScope points to the last assignment for each variable
+// in the current block. It is used during the parsing to chain
+// the assignments to a variable together.
+type varAssignmentScope struct {
+	outer *varAssignmentScope
+	vars  map[string]bool
+}
+
 // Starlark output generation context
 type generationContext struct {
-	buf          strings.Builder
-	starScript   *StarlarkScript
-	indentLevel  int
-	inAssignment bool
-	tracedCount  int
+	buf            strings.Builder
+	starScript     *StarlarkScript
+	indentLevel    int
+	inAssignment   bool
+	tracedCount    int
+	varAssignments *varAssignmentScope
 }
 
 func NewGenerateContext(ss *StarlarkScript) *generationContext {
-	return &generationContext{starScript: ss}
+	return &generationContext{
+		starScript: ss,
+		varAssignments: &varAssignmentScope{
+			outer: nil,
+			vars:  make(map[string]bool),
+		},
+	}
+}
+
+func (gctx *generationContext) pushVariableAssignments() {
+	va := &varAssignmentScope{
+		outer: gctx.varAssignments,
+		vars:  make(map[string]bool),
+	}
+	gctx.varAssignments = va
+}
+
+func (gctx *generationContext) popVariableAssignments() {
+	gctx.varAssignments = gctx.varAssignments.outer
+}
+
+func (gctx *generationContext) hasBeenAssigned(v variable) bool {
+	for va := gctx.varAssignments; va != nil; va = va.outer {
+		if _, ok := va.vars[v.name()]; ok {
+			return true
+		}
+	}
+	return false
+}
+
+func (gctx *generationContext) setHasBeenAssigned(v variable) {
+	gctx.varAssignments.vars[v.name()] = true
 }
 
 // emit returns generated script
@@ -383,14 +434,6 @@
 	nodeLocator    func(pos mkparser.Pos) int
 }
 
-// varAssignmentScope points to the last assignment for each variable
-// in the current block. It is used during the parsing to chain
-// the assignments to a variable together.
-type varAssignmentScope struct {
-	outer *varAssignmentScope
-	vars  map[string]*assignmentNode
-}
-
 // parseContext holds the script we are generating and all the ephemeral data
 // needed during the parsing.
 type parseContext struct {
@@ -404,11 +447,12 @@
 	errorLogger      ErrorLogger
 	tracedVariables  map[string]bool // variables to be traced in the generated script
 	variables        map[string]variable
-	varAssignments   *varAssignmentScope
 	outputDir        string
 	dependentModules map[string]*moduleInfo
 	soongNamespaces  map[string]map[string]bool
 	includeTops      []string
+	typeHints        map[string]starlarkType
+	atTopOfMakefile  bool
 }
 
 func newParseContext(ss *StarlarkScript, nodes []mkparser.Node) *parseContext {
@@ -430,7 +474,6 @@
 		{"TARGET_COPY_OUT_RECOVERY", "recovery"},
 		{"TARGET_COPY_OUT_VENDOR_RAMDISK", "vendor_ramdisk"},
 		// TODO(asmundak): to process internal config files, we need the following variables:
-		//    BOARD_CONFIG_VENDOR_PATH
 		//    TARGET_VENDOR
 		//    target_base_product
 		//
@@ -453,8 +496,9 @@
 		dependentModules: make(map[string]*moduleInfo),
 		soongNamespaces:  make(map[string]map[string]bool),
 		includeTops:      []string{},
+		typeHints:        make(map[string]starlarkType),
+		atTopOfMakefile:  true,
 	}
-	ctx.pushVarAssignments()
 	for _, item := range predefined {
 		ctx.variables[item.name] = &predefinedVariable{
 			baseVariable: baseVariable{nam: item.name, typ: starlarkTypeString},
@@ -465,31 +509,6 @@
 	return ctx
 }
 
-func (ctx *parseContext) lastAssignment(name string) *assignmentNode {
-	for va := ctx.varAssignments; va != nil; va = va.outer {
-		if v, ok := va.vars[name]; ok {
-			return v
-		}
-	}
-	return nil
-}
-
-func (ctx *parseContext) setLastAssignment(name string, asgn *assignmentNode) {
-	ctx.varAssignments.vars[name] = asgn
-}
-
-func (ctx *parseContext) pushVarAssignments() {
-	va := &varAssignmentScope{
-		outer: ctx.varAssignments,
-		vars:  make(map[string]*assignmentNode),
-	}
-	ctx.varAssignments = va
-}
-
-func (ctx *parseContext) popVarAssignments() {
-	ctx.varAssignments = ctx.varAssignments.outer
-}
-
 func (ctx *parseContext) hasNodes() bool {
 	return ctx.currentNodeIndex < len(ctx.nodes)
 }
@@ -532,7 +551,7 @@
 	if lhs == nil {
 		return []starlarkNode{ctx.newBadNode(a, "unknown variable %s", name)}
 	}
-	_, isTraced := ctx.tracedVariables[name]
+	_, isTraced := ctx.tracedVariables[lhs.name()]
 	asgn := &assignmentNode{lhs: lhs, mkValue: a.Value, isTraced: isTraced, location: ctx.errorLocation(a)}
 	if lhs.valueType() == starlarkTypeUnknown {
 		// Try to divine variable type from the RHS
@@ -565,17 +584,17 @@
 		}
 	}
 
-	asgn.previous = ctx.lastAssignment(name)
-	ctx.setLastAssignment(name, asgn)
+	if asgn.lhs.valueType() == starlarkTypeString &&
+		asgn.value.typ() != starlarkTypeUnknown &&
+		asgn.value.typ() != starlarkTypeString {
+		asgn.value = &toStringExpr{expr: asgn.value}
+	}
+
 	switch a.Type {
 	case "=", ":=":
 		asgn.flavor = asgnSet
 	case "+=":
-		if asgn.previous == nil && !asgn.lhs.isPreset() {
-			asgn.flavor = asgnMaybeAppend
-		} else {
-			asgn.flavor = asgnAppend
-		}
+		asgn.flavor = asgnAppend
 	case "?=":
 		asgn.flavor = asgnMaybeSet
 	default:
@@ -744,6 +763,16 @@
 func (ctx *parseContext) handleSubConfig(
 	v mkparser.Node, pathExpr starlarkExpr, loadAlways bool, processModule func(inheritedModule) starlarkNode) []starlarkNode {
 
+	// Allow seeing $(sort $(wildcard realPathExpr)) or $(wildcard realPathExpr)
+	// because those are functionally the same as not having the sort/wildcard calls.
+	if ce, ok := pathExpr.(*callExpr); ok && ce.name == "rblf.mksort" && len(ce.args) == 1 {
+		if ce2, ok2 := ce.args[0].(*callExpr); ok2 && ce2.name == "rblf.expand_wildcard" && len(ce2.args) == 1 {
+			pathExpr = ce2.args[0]
+		}
+	} else if ce2, ok2 := pathExpr.(*callExpr); ok2 && ce2.name == "rblf.expand_wildcard" && len(ce2.args) == 1 {
+		pathExpr = ce2.args[0]
+	}
+
 	// In a simple case, the name of a module to inherit/include is known statically.
 	if path, ok := maybeString(pathExpr); ok {
 		// Note that even if this directive loads a module unconditionally, a module may be
@@ -751,6 +780,7 @@
 		moduleShouldExist := loadAlways && ctx.ifNestLevel == 0
 		if strings.Contains(path, "*") {
 			if paths, err := fs.Glob(ctx.script.sourceFS, path); err == nil {
+				sort.Strings(paths)
 				result := make([]starlarkNode, 0)
 				for _, p := range paths {
 					mi := ctx.newDependentModule(p, !moduleShouldExist)
@@ -807,20 +837,16 @@
 	if len(matchingPaths) > maxMatchingFiles {
 		return []starlarkNode{ctx.newBadNode(v, "there are >%d files matching the pattern, please rewrite it", maxMatchingFiles)}
 	}
-	if len(matchingPaths) == 1 {
-		res := inheritedStaticModule{ctx.newDependentModule(matchingPaths[0], loadAlways && ctx.ifNestLevel == 0), loadAlways}
-		return []starlarkNode{processModule(res)}
-	} else {
-		needsWarning := pathPattern[0] == "" && len(ctx.includeTops) == 0
-		res := inheritedDynamicModule{*varPath, []*moduleInfo{}, loadAlways, ctx.errorLocation(v), needsWarning}
-		for _, p := range matchingPaths {
-			// A product configuration files discovered dynamically may attempt to inherit
-			// from another one which does not exist in this source tree. Prevent load errors
-			// by always loading the dynamic files as optional.
-			res.candidateModules = append(res.candidateModules, ctx.newDependentModule(p, true))
-		}
-		return []starlarkNode{processModule(res)}
+
+	needsWarning := pathPattern[0] == "" && len(ctx.includeTops) == 0
+	res := inheritedDynamicModule{*varPath, []*moduleInfo{}, loadAlways, ctx.errorLocation(v), needsWarning}
+	for _, p := range matchingPaths {
+		// A product configuration files discovered dynamically may attempt to inherit
+		// from another one which does not exist in this source tree. Prevent load errors
+		// by always loading the dynamic files as optional.
+		res.candidateModules = append(res.candidateModules, ctx.newDependentModule(p, true))
 	}
+	return []starlarkNode{processModule(res)}
 }
 
 func (ctx *parseContext) findMatchingPaths(pattern []string) []string {
@@ -847,15 +873,19 @@
 	return res
 }
 
-func (ctx *parseContext) handleInheritModule(v mkparser.Node, args *mkparser.MakeString, loadAlways bool) []starlarkNode {
+type inheritProductCallParser struct {
+	loadAlways bool
+}
+
+func (p *inheritProductCallParser) parse(ctx *parseContext, v mkparser.Node, args *mkparser.MakeString) []starlarkNode {
 	args.TrimLeftSpaces()
 	args.TrimRightSpaces()
 	pathExpr := ctx.parseMakeString(v, args)
 	if _, ok := pathExpr.(*badExpr); ok {
 		return []starlarkNode{ctx.newBadNode(v, "Unable to parse argument to inherit")}
 	}
-	return ctx.handleSubConfig(v, pathExpr, loadAlways, func(im inheritedModule) starlarkNode {
-		return &inheritNode{im, loadAlways}
+	return ctx.handleSubConfig(v, pathExpr, p.loadAlways, func(im inheritedModule) starlarkNode {
+		return &inheritNode{im, p.loadAlways}
 	})
 }
 
@@ -874,19 +904,12 @@
 	//   $(error xxx)
 	//   $(call other-custom-functions,...)
 
-	// inherit-product(-if-exists) gets converted to a series of statements,
-	// not just a single expression like parseReference returns. So handle it
-	// separately at the beginning here.
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product,", "")
-		return ctx.handleInheritModule(v, args, true)
+	if name, args, ok := ctx.maybeParseFunctionCall(v, v.Name); ok {
+		if kf, ok := knownNodeFunctions[name]; ok {
+			return kf.parse(ctx, v, args)
+		}
 	}
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product-if-exists,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product-if-exists,", "")
-		return ctx.handleInheritModule(v, args, false)
-	}
+
 	return []starlarkNode{&exprNode{expr: ctx.parseReference(v, v.Name)}}
 }
 
@@ -932,11 +955,8 @@
 func (ctx *parseContext) processBranch(check *mkparser.Directive) *switchCase {
 	block := &switchCase{gate: ctx.parseCondition(check)}
 	defer func() {
-		ctx.popVarAssignments()
 		ctx.ifNestLevel--
-
 	}()
-	ctx.pushVarAssignments()
 	ctx.ifNestLevel++
 
 	for ctx.hasNodes() {
@@ -960,7 +980,7 @@
 		if !check.Args.Const() {
 			return ctx.newBadNode(check, "ifdef variable ref too complex: %s", check.Args.Dump())
 		}
-		v := NewVariableRefExpr(ctx.addVariable(check.Args.Strings[0]), false)
+		v := NewVariableRefExpr(ctx.addVariable(check.Args.Strings[0]))
 		if strings.HasSuffix(check.Name, "ndef") {
 			v = &notExpr{v}
 		}
@@ -1031,49 +1051,19 @@
 		otherOperand = xLeft
 	}
 
-	not := func(expr starlarkExpr) starlarkExpr {
-		switch typedExpr := expr.(type) {
-		case *inExpr:
-			typedExpr.isNot = !typedExpr.isNot
-			return typedExpr
-		case *eqExpr:
-			typedExpr.isEq = !typedExpr.isEq
-			return typedExpr
-		case *binaryOpExpr:
-			switch typedExpr.op {
-			case ">":
-				typedExpr.op = "<="
-				return typedExpr
-			case "<":
-				typedExpr.op = ">="
-				return typedExpr
-			case ">=":
-				typedExpr.op = "<"
-				return typedExpr
-			case "<=":
-				typedExpr.op = ">"
-				return typedExpr
-			default:
-				return &notExpr{expr: expr}
-			}
-		default:
-			return &notExpr{expr: expr}
-		}
-	}
-
 	// If we've identified one of the operands as being a string literal, check
 	// for some special cases we can do to simplify the resulting expression.
 	if otherOperand != nil {
 		if stringOperand == "" {
 			if isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
 		}
 		if stringOperand == "true" && otherOperand.typ() == starlarkTypeBool {
 			if !isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
@@ -1229,6 +1219,37 @@
 		right: xValue, isEq: !negate}
 }
 
+func (ctx *parseContext) maybeParseFunctionCall(node mkparser.Node, ref *mkparser.MakeString) (name string, args *mkparser.MakeString, ok bool) {
+	ref.TrimLeftSpaces()
+	ref.TrimRightSpaces()
+
+	words := ref.SplitN(" ", 2)
+	if !words[0].Const() {
+		return "", nil, false
+	}
+
+	name = words[0].Dump()
+	args = mkparser.SimpleMakeString("", words[0].Pos())
+	if len(words) >= 2 {
+		args = words[1]
+	}
+	args.TrimLeftSpaces()
+	if name == "call" {
+		words = args.SplitN(",", 2)
+		if words[0].Empty() || !words[0].Const() {
+			return "", nil, false
+		}
+		name = words[0].Dump()
+		if len(words) < 2 {
+			args = mkparser.SimpleMakeString("", words[0].Pos())
+		} else {
+			args = words[1]
+		}
+	}
+	ok = true
+	return
+}
+
 // parses $(...), returning an expression
 func (ctx *parseContext) parseReference(node mkparser.Node, ref *mkparser.MakeString) starlarkExpr {
 	ref.TrimLeftSpaces()
@@ -1243,7 +1264,7 @@
 
 	// If it is a single word, it can be a simple variable
 	// reference or a function call
-	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" {
+	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" && refDump != "eval" {
 		if strings.HasPrefix(refDump, soongNsPrefix) {
 			// TODO (asmundak): if we find many, maybe handle them.
 			return ctx.newBadExpr(node, "SOONG_CONFIG_ variables cannot be referenced, use soong_config_get instead: %s", refDump)
@@ -1272,38 +1293,24 @@
 				args: []starlarkExpr{
 					&stringLiteralExpr{literal: substParts[0]},
 					&stringLiteralExpr{literal: substParts[1]},
-					NewVariableRefExpr(v, ctx.lastAssignment(v.name()) != nil),
+					NewVariableRefExpr(v),
 				},
 			}
 		}
 		if v := ctx.addVariable(refDump); v != nil {
-			return NewVariableRefExpr(v, ctx.lastAssignment(v.name()) != nil)
+			return NewVariableRefExpr(v)
 		}
 		return ctx.newBadExpr(node, "unknown variable %s", refDump)
 	}
 
-	expr := &callExpr{name: words[0].Dump(), returnType: starlarkTypeUnknown}
-	args := mkparser.SimpleMakeString("", words[0].Pos())
-	if len(words) >= 2 {
-		args = words[1]
-	}
-	args.TrimLeftSpaces()
-	if expr.name == "call" {
-		words = args.SplitN(",", 2)
-		if words[0].Empty() || !words[0].Const() {
-			return ctx.newBadExpr(node, "cannot handle %s", refDump)
-		}
-		expr.name = words[0].Dump()
-		if len(words) < 2 {
-			args = &mkparser.MakeString{}
+	if name, args, ok := ctx.maybeParseFunctionCall(node, ref); ok {
+		if kf, found := knownFunctions[name]; found {
+			return kf.parse(ctx, node, args)
 		} else {
-			args = words[1]
+			return ctx.newBadExpr(node, "cannot handle invoking %s", name)
 		}
-	}
-	if kf, found := knownFunctions[expr.name]; found {
-		return kf.parse(ctx, node, args)
 	} else {
-		return ctx.newBadExpr(node, "cannot handle invoking %s", expr.name)
+		return ctx.newBadExpr(node, "cannot handle %s", refDump)
 	}
 }
 
@@ -1311,6 +1318,7 @@
 	name       string
 	returnType starlarkType
 	addGlobals bool
+	addHandle  bool
 }
 
 func (p *simpleCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
@@ -1318,6 +1326,9 @@
 	if p.addGlobals {
 		expr.args = append(expr.args, &globalsExpr{})
 	}
+	if p.addHandle {
+		expr.args = append(expr.args, &identifierExpr{name: "handle"})
+	}
 	for _, arg := range args.Split(",") {
 		arg.TrimLeftSpaces()
 		arg.TrimRightSpaces()
@@ -1373,7 +1384,7 @@
 	if !args.Empty() {
 		return ctx.newBadExpr(node, "my-dir function cannot have any arguments passed to it.")
 	}
-	return &variableRefExpr{ctx.addVariable("LOCAL_PATH"), true}
+	return &stringLiteralExpr{literal: filepath.Dir(ctx.script.mkFile)}
 }
 
 type isProductInListCallParser struct{}
@@ -1383,7 +1394,7 @@
 		return ctx.newBadExpr(node, "is-product-in-list requires an argument")
 	}
 	return &inExpr{
-		expr:  &variableRefExpr{ctx.addVariable("TARGET_PRODUCT"), true},
+		expr:  NewVariableRefExpr(ctx.addVariable("TARGET_PRODUCT")),
 		list:  maybeConvertToStringList(ctx.parseMakeString(node, args)),
 		isNot: false,
 	}
@@ -1396,8 +1407,8 @@
 		return ctx.newBadExpr(node, "cannot handle non-constant argument to is-vendor-board-platform")
 	}
 	return &inExpr{
-		expr:  &variableRefExpr{ctx.addVariable("TARGET_BOARD_PLATFORM"), false},
-		list:  &variableRefExpr{ctx.addVariable(args.Dump() + "_BOARD_PLATFORMS"), true},
+		expr:  NewVariableRefExpr(ctx.addVariable("TARGET_BOARD_PLATFORM")),
+		list:  NewVariableRefExpr(ctx.addVariable(args.Dump() + "_BOARD_PLATFORMS")),
 		isNot: false,
 	}
 }
@@ -1409,8 +1420,8 @@
 		return ctx.newBadExpr(node, "is-vendor-board-qcom does not accept any arguments")
 	}
 	return &inExpr{
-		expr:  &variableRefExpr{ctx.addVariable("TARGET_BOARD_PLATFORM"), false},
-		list:  &variableRefExpr{ctx.addVariable("QCOM_BOARD_PLATFORMS"), true},
+		expr:  NewVariableRefExpr(ctx.addVariable("TARGET_BOARD_PLATFORM")),
+		list:  NewVariableRefExpr(ctx.addVariable("QCOM_BOARD_PLATFORMS")),
 		isNot: false,
 	}
 }
@@ -1483,9 +1494,46 @@
 	}
 }
 
-type foreachCallPaser struct{}
+type ifCallNodeParser struct{}
 
-func (p *foreachCallPaser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
+func (p *ifCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 2 && len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "if function should have 2 or 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+
+	ifn := &ifNode{expr: ctx.parseMakeString(node, words[0])}
+	cases := []*switchCase{
+		{
+			gate:  ifn,
+			nodes: ctx.parseNodeMakeString(node, words[1]),
+		},
+	}
+	if len(words) == 3 {
+		cases = append(cases, &switchCase{
+			gate:  &elseNode{},
+			nodes: ctx.parseNodeMakeString(node, words[2]),
+		})
+	}
+	if len(cases) == 2 {
+		if len(cases[1].nodes) == 0 {
+			// Remove else branch if it has no contents
+			cases = cases[:1]
+		} else if len(cases[0].nodes) == 0 {
+			// If the if branch has no contents but the else does,
+			// move them to the if and negate its condition
+			ifn.expr = negateExpr(ifn.expr)
+			cases[0].nodes = cases[1].nodes
+			cases = cases[:1]
+		}
+	}
+
+	return []starlarkNode{&switchNode{ssCases: cases}}
+}
+
+type foreachCallParser struct{}
+
+func (p *foreachCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
 	words := args.Split(",")
 	if len(words) != 3 {
 		return ctx.newBadExpr(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))
@@ -1517,6 +1565,71 @@
 	}
 }
 
+func transformNode(node starlarkNode, transformer func(expr starlarkExpr) starlarkExpr) {
+	switch a := node.(type) {
+	case *ifNode:
+		a.expr = a.expr.transform(transformer)
+	case *switchCase:
+		transformNode(a.gate, transformer)
+		for _, n := range a.nodes {
+			transformNode(n, transformer)
+		}
+	case *switchNode:
+		for _, n := range a.ssCases {
+			transformNode(n, transformer)
+		}
+	case *exprNode:
+		a.expr = a.expr.transform(transformer)
+	case *assignmentNode:
+		a.value = a.value.transform(transformer)
+	case *foreachNode:
+		a.list = a.list.transform(transformer)
+		for _, n := range a.actions {
+			transformNode(n, transformer)
+		}
+	}
+}
+
+type foreachCallNodeParser struct{}
+
+func (p *foreachCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+	if !words[0].Const() || words[0].Empty() || !identifierFullMatchRegex.MatchString(words[0].Strings[0]) {
+		return []starlarkNode{ctx.newBadNode(node, "first argument to foreach function must be a simple string identifier")}
+	}
+
+	loopVarName := words[0].Strings[0]
+
+	list := ctx.parseMakeString(node, words[1])
+	if list.typ() != starlarkTypeList {
+		list = &callExpr{
+			name:       baseName + ".words",
+			returnType: starlarkTypeList,
+			args:       []starlarkExpr{list},
+		}
+	}
+
+	actions := ctx.parseNodeMakeString(node, words[2])
+	// TODO(colefaust): Replace transforming code with something more elegant
+	for _, action := range actions {
+		transformNode(action, func(expr starlarkExpr) starlarkExpr {
+			if varRefExpr, ok := expr.(*variableRefExpr); ok && varRefExpr.ref.name() == loopVarName {
+				return &identifierExpr{loopVarName}
+			}
+			return nil
+		})
+	}
+
+	return []starlarkNode{&foreachNode{
+		varName: loopVarName,
+		list:    list,
+		actions: actions,
+	}}
+}
+
 type wordCallParser struct{}
 
 func (p *wordCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
@@ -1627,6 +1740,31 @@
 	}
 }
 
+type evalNodeParser struct{}
+
+func (p *evalNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	parser := mkparser.NewParser("Eval expression", strings.NewReader(args.Dump()))
+	nodes, errs := parser.Parse()
+	if errs != nil {
+		return []starlarkNode{ctx.newBadNode(node, "Unable to parse eval statement")}
+	}
+
+	if len(nodes) == 0 {
+		return []starlarkNode{}
+	} else if len(nodes) == 1 {
+		switch n := nodes[0].(type) {
+		case *mkparser.Assignment:
+			if n.Name.Const() {
+				return ctx.handleAssignment(n)
+			}
+		case *mkparser.Comment:
+			return []starlarkNode{&commentNode{strings.TrimSpace("#" + n.Comment)}}
+		}
+	}
+
+	return []starlarkNode{ctx.newBadNode(node, "Eval expression too complex; only assignments and comments are supported")}
+}
+
 func (ctx *parseContext) parseMakeString(node mkparser.Node, mk *mkparser.MakeString) starlarkExpr {
 	if mk.Const() {
 		return &stringLiteralExpr{mk.Dump()}
@@ -1651,6 +1789,16 @@
 	return NewInterpolateExpr(parts)
 }
 
+func (ctx *parseContext) parseNodeMakeString(node mkparser.Node, mk *mkparser.MakeString) []starlarkNode {
+	// Discard any constant values in the make string, as they would be top level
+	// string literals and do nothing.
+	result := make([]starlarkNode, 0, len(mk.Variables))
+	for i := range mk.Variables {
+		result = append(result, ctx.handleVariable(&mk.Variables[i])...)
+	}
+	return result
+}
+
 // Handles the statements whose treatment is the same in all contexts: comment,
 // assignment, variable (which is a macro call in reality) and all constructs that
 // do not handle in any context ('define directive and any unrecognized stuff).
@@ -1687,16 +1835,24 @@
 	// Clear the includeTops after each non-comment statement
 	// so that include annotations placed on certain statements don't apply
 	// globally for the rest of the makefile was well.
-	if _, wasComment := node.(*mkparser.Comment); !wasComment && len(ctx.includeTops) > 0 {
+	if _, wasComment := node.(*mkparser.Comment); !wasComment {
+		ctx.atTopOfMakefile = false
 		ctx.includeTops = []string{}
 	}
 
 	if result == nil {
 		result = []starlarkNode{}
 	}
+
 	return result
 }
 
+// The types allowed in a type_hint
+var typeHintMap = map[string]starlarkType{
+	"string": starlarkTypeString,
+	"list":   starlarkTypeList,
+}
+
 // Processes annotation. An annotation is a comment that starts with #RBC# and provides
 // a conversion hint -- say, where to look for the dynamically calculated inherit/include
 // paths. Returns true if the comment was a successfully-handled annotation.
@@ -1721,6 +1877,35 @@
 		}
 		ctx.includeTops = append(ctx.includeTops, p)
 		return nil, true
+	} else if p, ok := maybeTrim(annotation, "type_hint"); ok {
+		// Type hints must come at the beginning the file, to avoid confusion
+		// if a type hint was specified later and thus only takes effect for half
+		// of the file.
+		if !ctx.atTopOfMakefile {
+			return ctx.newBadNode(cnode, "type_hint annotations must come before the first Makefile statement"), true
+		}
+
+		parts := strings.Fields(p)
+		if len(parts) <= 1 {
+			return ctx.newBadNode(cnode, "Invalid type_hint annotation: %s. Must be a variable type followed by a list of variables of that type", p), true
+		}
+
+		var varType starlarkType
+		if varType, ok = typeHintMap[parts[0]]; !ok {
+			varType = starlarkTypeUnknown
+		}
+		if varType == starlarkTypeUnknown {
+			return ctx.newBadNode(cnode, "Invalid type_hint annotation. Only list/string types are accepted, found %s", parts[0]), true
+		}
+
+		for _, name := range parts[1:] {
+			// Don't allow duplicate type hints
+			if _, ok := ctx.typeHints[name]; ok {
+				return ctx.newBadNode(cnode, "Duplicate type hint for variable %s", name), true
+			}
+			ctx.typeHints[name] = varType
+		}
+		return nil, true
 	}
 	return ctx.newBadNode(cnode, "unsupported annotation %s", cnode.Comment), true
 }
@@ -1871,9 +2056,7 @@
 	fmt.Fprintf(&buf, "load(%q, %q)\n", baseUri, baseName)
 	fmt.Fprintf(&buf, "load(%q, \"init\")\n", mainModuleUri)
 	fmt.Fprintf(&buf, "load(%q, input_variables_init = \"init\")\n", inputVariablesUri)
-	fmt.Fprintf(&buf, "globals, cfg, globals_base = %s(init, input_variables_init)\n", cfnBoardMain)
-	fmt.Fprintf(&buf, "# TODO: Some product config variables need to be printed, but most are readonly so we can't just print cfg here.\n")
-	fmt.Fprintf(&buf, "%s((globals, cfg, globals_base))\n", cfnPrintVars)
+	fmt.Fprintf(&buf, "%s(%s(init, input_variables_init))\n", cfnPrintVars, cfnBoardMain)
 	return buf.String()
 }
 
diff --git a/mk2rbc/mk2rbc_test.go b/mk2rbc/mk2rbc_test.go
index 447f658..de75129 100644
--- a/mk2rbc/mk2rbc_test.go
+++ b/mk2rbc/mk2rbc_test.go
@@ -65,6 +65,10 @@
 PRODUCT_NAME := Pixel 3
 PRODUCT_MODEL :=
 local_var = foo
+local-var-with-dashes := bar
+$(warning local-var-with-dashes: $(local-var-with-dashes))
+GLOBAL-VAR-WITH-DASHES := baz
+$(warning GLOBAL-VAR-WITH-DASHES: $(GLOBAL-VAR-WITH-DASHES))
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -73,6 +77,10 @@
   cfg["PRODUCT_NAME"] = "Pixel 3"
   cfg["PRODUCT_MODEL"] = ""
   _local_var = "foo"
+  _local_var_with_dashes = "bar"
+  rblf.mkwarning("pixel3.mk", "local-var-with-dashes: %s" % _local_var_with_dashes)
+  g["GLOBAL-VAR-WITH-DASHES"] = "baz"
+  rblf.mkwarning("pixel3.mk", "GLOBAL-VAR-WITH-DASHES: %s" % g["GLOBAL-VAR-WITH-DASHES"])
 `,
 	},
 	{
@@ -189,15 +197,31 @@
 		mkname: "path/product.mk",
 		in: `
 $(call inherit-product, */font.mk)
+$(call inherit-product, $(sort $(wildcard */font.mk)))
+$(call inherit-product, $(wildcard */font.mk))
+
+include */font.mk
+include $(sort $(wildcard */font.mk))
+include $(wildcard */font.mk)
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
-load("//foo:font.star", _font_init = "init")
-load("//bar:font.star", _font1_init = "init")
+load("//bar:font.star", _font_init = "init")
+load("//foo:font.star", _font1_init = "init")
 
 def init(g, handle):
   cfg = rblf.cfg(handle)
-  rblf.inherit(handle, "foo/font", _font_init)
-  rblf.inherit(handle, "bar/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  _font_init(g, handle)
+  _font1_init(g, handle)
+  _font_init(g, handle)
+  _font1_init(g, handle)
+  _font_init(g, handle)
+  _font1_init(g, handle)
 `,
 	},
 	{
@@ -246,6 +270,8 @@
 		in: `
 $(warning this is the warning)
 $(warning)
+$(warning # this warning starts with a pound)
+$(warning this warning has a # in the middle)
 $(info this is the info)
 $(error this is the error)
 PRODUCT_NAME:=$(shell echo *)
@@ -256,6 +282,8 @@
   cfg = rblf.cfg(handle)
   rblf.mkwarning("product.mk", "this is the warning")
   rblf.mkwarning("product.mk", "")
+  rblf.mkwarning("product.mk", "# this warning starts with a pound")
+  rblf.mkwarning("product.mk", "this warning has a # in the middle")
   rblf.mkinfo("product.mk", "this is the info")
   rblf.mkerror("product.mk", "this is the error")
   cfg["PRODUCT_NAME"] = rblf.shell("echo *")
@@ -614,7 +642,7 @@
     pass
   elif not rblf.board_platform_is(g, "copper"):
     pass
-  elif g.get("TARGET_BOARD_PLATFORM", "") not in g["QCOM_BOARD_PLATFORMS"]:
+  elif g.get("TARGET_BOARD_PLATFORM", "") not in g.get("QCOM_BOARD_PLATFORMS", ""):
     pass
   elif g["TARGET_PRODUCT"] in g.get("PLATFORM_LIST", []):
     pass
@@ -637,7 +665,7 @@
     pass
   elif not rblf.board_platform_is(g, "copper"):
     pass
-  elif g.get("TARGET_BOARD_PLATFORM", "") in g["QCOM_BOARD_PLATFORMS"]:
+  elif g.get("TARGET_BOARD_PLATFORM", "") in g.get("QCOM_BOARD_PLATFORMS", ""):
     pass
 `,
 	},
@@ -739,16 +767,18 @@
 $(call require-artifacts-in-path, foo, bar)
 $(call require-artifacts-in-path-relaxed, foo, bar)
 $(call dist-for-goals, goal, from:to)
+$(call add-product-dex-preopt-module-config,MyModule,disable)
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
 def init(g, handle):
   cfg = rblf.cfg(handle)
-  rblf.enforce_product_packages_exist("")
-  rblf.enforce_product_packages_exist("foo")
-  rblf.require_artifacts_in_path("foo", "bar")
-  rblf.require_artifacts_in_path_relaxed("foo", "bar")
+  rblf.enforce_product_packages_exist(handle, "")
+  rblf.enforce_product_packages_exist(handle, "foo")
+  rblf.require_artifacts_in_path(handle, "foo", "bar")
+  rblf.require_artifacts_in_path_relaxed(handle, "foo", "bar")
   rblf.mkdist_for_goals(g, "goal", "from:to")
+  rblf.add_product_dex_preopt_module_config(handle, "MyModule", "disable")
 `,
 	},
 	{
@@ -779,7 +809,7 @@
 PRODUCT_COPY_FILES := $(addsuffix .sff, a b c)
 PRODUCT_NAME := $(word 1, $(subst ., ,$(TARGET_BOARD_PLATFORM)))
 $(info $(patsubst %.pub,$(PRODUCT_NAME)%,$(PRODUCT_ADB_KEYS)))
-$(info $(dir foo/bar))
+$(info $$(dir foo/bar): $(dir foo/bar))
 $(info $(firstword $(PRODUCT_COPY_FILES)))
 $(info $(lastword $(PRODUCT_COPY_FILES)))
 $(info $(dir $(lastword $(MAKEFILE_LIST))))
@@ -802,7 +832,7 @@
   cfg["PRODUCT_COPY_FILES"] = rblf.addsuffix(".sff", "a b c")
   cfg["PRODUCT_NAME"] = ((g.get("TARGET_BOARD_PLATFORM", "")).replace(".", " ")).split()[0]
   rblf.mkinfo("product.mk", rblf.mkpatsubst("%.pub", "%s%%" % cfg["PRODUCT_NAME"], g.get("PRODUCT_ADB_KEYS", "")))
-  rblf.mkinfo("product.mk", rblf.dir("foo/bar"))
+  rblf.mkinfo("product.mk", "$(dir foo/bar): %s" % rblf.dir("foo/bar"))
   rblf.mkinfo("product.mk", cfg["PRODUCT_COPY_FILES"][0])
   rblf.mkinfo("product.mk", cfg["PRODUCT_COPY_FILES"][-1])
   rblf.mkinfo("product.mk", rblf.dir("product.mk"))
@@ -891,6 +921,43 @@
 `,
 	},
 	{
+		desc:   "assigment setdefaults",
+		mkname: "product.mk",
+		in: `
+# All of these should have a setdefault because they're self-referential and not defined before
+PRODUCT_LIST1 = a $(PRODUCT_LIST1)
+PRODUCT_LIST2 ?= a $(PRODUCT_LIST2)
+PRODUCT_LIST3 += a
+
+# Now doing them again should not have a setdefault because they've already been set
+PRODUCT_LIST1 = a $(PRODUCT_LIST1)
+PRODUCT_LIST2 ?= a $(PRODUCT_LIST2)
+PRODUCT_LIST3 += a
+`,
+		expected: `# All of these should have a setdefault because they're self-referential and not defined before
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  rblf.setdefault(handle, "PRODUCT_LIST1")
+  cfg["PRODUCT_LIST1"] = (["a"] +
+      cfg.get("PRODUCT_LIST1", []))
+  if cfg.get("PRODUCT_LIST2") == None:
+    rblf.setdefault(handle, "PRODUCT_LIST2")
+    cfg["PRODUCT_LIST2"] = (["a"] +
+        cfg.get("PRODUCT_LIST2", []))
+  rblf.setdefault(handle, "PRODUCT_LIST3")
+  cfg["PRODUCT_LIST3"] += ["a"]
+  # Now doing them again should not have a setdefault because they've already been set
+  cfg["PRODUCT_LIST1"] = (["a"] +
+      cfg["PRODUCT_LIST1"])
+  if cfg.get("PRODUCT_LIST2") == None:
+    cfg["PRODUCT_LIST2"] = (["a"] +
+        cfg["PRODUCT_LIST2"])
+  cfg["PRODUCT_LIST3"] += ["a"]
+`,
+	},
+	{
 		desc:   "soong namespace assignments",
 		mkname: "product.mk",
 		in: `
@@ -971,19 +1038,22 @@
 `,
 	},
 	{
-		desc:   "strip function",
+		desc:   "strip/sort functions",
 		mkname: "product.mk",
 		in: `
 ifeq ($(filter hwaddress,$(PRODUCT_PACKAGES)),)
    PRODUCT_PACKAGES := $(strip $(PRODUCT_PACKAGES) hwaddress)
 endif
+MY_VAR := $(sort b a c)
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
 def init(g, handle):
   cfg = rblf.cfg(handle)
   if "hwaddress" not in cfg.get("PRODUCT_PACKAGES", []):
+    rblf.setdefault(handle, "PRODUCT_PACKAGES")
     cfg["PRODUCT_PACKAGES"] = (rblf.mkstrip("%s hwaddress" % " ".join(cfg.get("PRODUCT_PACKAGES", [])))).split()
+  g["MY_VAR"] = rblf.mksort("b a c")
 `,
 	},
 	{
@@ -1079,7 +1149,13 @@
 def init(g, handle):
   cfg = rblf.cfg(handle)
   g["MY_PATH"] = "foo"
-  rblf.inherit(handle, "vendor/foo1/cfg", _cfg_init)
+  _entry = {
+    "vendor/foo1/cfg.mk": ("vendor/foo1/cfg", _cfg_init),
+  }.get("%s/cfg.mk" % g["MY_PATH"])
+  (_varmod, _varmod_init) = _entry if _entry else (None, None)
+  if not _varmod_init:
+    rblf.mkerror("product.mk", "Cannot find %s" % ("%s/cfg.mk" % g["MY_PATH"]))
+  rblf.inherit(handle, _varmod, _varmod_init)
 `,
 	},
 	{
@@ -1099,8 +1175,20 @@
 def init(g, handle):
   cfg = rblf.cfg(handle)
   g["MY_PATH"] = "foo"
-  rblf.inherit(handle, "vendor/foo1/cfg", _cfg_init)
-  rblf.inherit(handle, "vendor/foo1/cfg", _cfg_init)
+  _entry = {
+    "vendor/foo1/cfg.mk": ("vendor/foo1/cfg", _cfg_init),
+  }.get("%s/cfg.mk" % g["MY_PATH"])
+  (_varmod, _varmod_init) = _entry if _entry else (None, None)
+  if not _varmod_init:
+    rblf.mkerror("product.mk", "Cannot find %s" % ("%s/cfg.mk" % g["MY_PATH"]))
+  rblf.inherit(handle, _varmod, _varmod_init)
+  _entry = {
+    "vendor/foo1/cfg.mk": ("vendor/foo1/cfg", _cfg_init),
+  }.get("%s/cfg.mk" % g["MY_PATH"])
+  (_varmod, _varmod_init) = _entry if _entry else (None, None)
+  if not _varmod_init:
+    rblf.mkerror("product.mk", "Cannot find %s" % ("%s/cfg.mk" % g["MY_PATH"]))
+  rblf.inherit(handle, _varmod, _varmod_init)
 `,
 	},
 	{
@@ -1124,9 +1212,21 @@
 
 def init(g, handle):
   cfg = rblf.cfg(handle)
-  rblf.inherit(handle, "foo/font", _font_init)
+  _entry = {
+    "foo/font.mk": ("foo/font", _font_init),
+  }.get("%s/font.mk" % g.get("MY_VAR", ""))
+  (_varmod, _varmod_init) = _entry if _entry else (None, None)
+  if not _varmod_init:
+    rblf.mkerror("product.mk", "Cannot find %s" % ("%s/font.mk" % g.get("MY_VAR", "")))
+  rblf.inherit(handle, _varmod, _varmod_init)
   # There's some space and even this comment between the include_top and the inherit-product
-  rblf.inherit(handle, "foo/font", _font_init)
+  _entry = {
+    "foo/font.mk": ("foo/font", _font_init),
+  }.get("%s/font.mk" % g.get("MY_VAR", ""))
+  (_varmod, _varmod_init) = _entry if _entry else (None, None)
+  if not _varmod_init:
+    rblf.mkerror("product.mk", "Cannot find %s" % ("%s/font.mk" % g.get("MY_VAR", "")))
+  rblf.inherit(handle, _varmod, _varmod_init)
   rblf.mkwarning("product.mk:11", "Please avoid starting an include path with a variable. See https://source.android.com/setup/build/bazel/product_config/issues/includes for details.")
   _entry = {
     "foo/font.mk": ("foo/font", _font_init),
@@ -1187,7 +1287,7 @@
 TEST_VAR_LIST += bar
 TEST_VAR_2 := $(if $(TEST_VAR),bar)
 TEST_VAR_3 := $(if $(TEST_VAR),bar,baz)
-TEST_VAR_3 := $(if $(TEST_VAR),$(TEST_VAR_LIST))
+TEST_VAR_4 := $(if $(TEST_VAR),$(TEST_VAR_LIST))
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -1198,7 +1298,7 @@
   g["TEST_VAR_LIST"] += ["bar"]
   g["TEST_VAR_2"] = ("bar" if g["TEST_VAR"] else "")
   g["TEST_VAR_3"] = ("bar" if g["TEST_VAR"] else "baz")
-  g["TEST_VAR_3"] = (g["TEST_VAR_LIST"] if g["TEST_VAR"] else [])
+  g["TEST_VAR_4"] = (g["TEST_VAR_LIST"] if g["TEST_VAR"] else [])
 `,
 	},
 	{
@@ -1228,6 +1328,14 @@
 BOOT_KERNEL_MODULES_LIST += bar.ko
 BOOT_KERNEL_MODULES_FILTER_2 := $(foreach m,$(BOOT_KERNEL_MODULES_LIST),%/$(m))
 
+FOREACH_WITH_IF := $(foreach module,\
+  $(BOOT_KERNEL_MODULES_LIST),\
+  $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
+
+# Same as above, but not assigning it to a variable allows it to be converted to statements
+$(foreach module,\
+  $(BOOT_KERNEL_MODULES_LIST),\
+  $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -1238,6 +1346,11 @@
   g["BOOT_KERNEL_MODULES_LIST"] = ["foo.ko"]
   g["BOOT_KERNEL_MODULES_LIST"] += ["bar.ko"]
   g["BOOT_KERNEL_MODULES_FILTER_2"] = ["%%/%s" % m for m in g["BOOT_KERNEL_MODULES_LIST"]]
+  g["FOREACH_WITH_IF"] = [("" if rblf.filter(module, "foo.ko") else rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)) for module in g["BOOT_KERNEL_MODULES_LIST"]]
+  # Same as above, but not assigning it to a variable allows it to be converted to statements
+  for module in g["BOOT_KERNEL_MODULES_LIST"]:
+    if not rblf.filter(module, "foo.ko"):
+      rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)
 `,
 	},
 	{
@@ -1324,6 +1437,98 @@
     pass
 `,
 	},
+	{
+		desc:   "Type hints",
+		mkname: "product.mk",
+		in: `
+# Test type hints
+#RBC# type_hint list MY_VAR MY_VAR_2
+# Unsupported type
+#RBC# type_hint bool MY_VAR_3
+# Invalid syntax
+#RBC# type_hint list
+# Duplicated variable
+#RBC# type_hint list MY_VAR_2
+#RBC# type_hint list my-local-var-with-dashes
+#RBC# type_hint string MY_STRING_VAR
+
+MY_VAR := foo
+MY_VAR_UNHINTED := foo
+
+# Vars set after other statements still get the hint
+MY_VAR_2 := foo
+
+# You can't specify a type hint after the first statement
+#RBC# type_hint list MY_VAR_4
+MY_VAR_4 := foo
+
+my-local-var-with-dashes := foo
+
+MY_STRING_VAR := $(wildcard foo/bar.mk)
+`,
+		expected: `# Test type hints
+# Unsupported type
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  rblf.mk2rbc_error("product.mk:5", "Invalid type_hint annotation. Only list/string types are accepted, found bool")
+  # Invalid syntax
+  rblf.mk2rbc_error("product.mk:7", "Invalid type_hint annotation: list. Must be a variable type followed by a list of variables of that type")
+  # Duplicated variable
+  rblf.mk2rbc_error("product.mk:9", "Duplicate type hint for variable MY_VAR_2")
+  g["MY_VAR"] = ["foo"]
+  g["MY_VAR_UNHINTED"] = "foo"
+  # Vars set after other statements still get the hint
+  g["MY_VAR_2"] = ["foo"]
+  # You can't specify a type hint after the first statement
+  rblf.mk2rbc_error("product.mk:20", "type_hint annotations must come before the first Makefile statement")
+  g["MY_VAR_4"] = "foo"
+  _my_local_var_with_dashes = ["foo"]
+  g["MY_STRING_VAR"] = " ".join(rblf.expand_wildcard("foo/bar.mk"))
+`,
+	},
+	{
+		desc:   "Set LOCAL_PATH to my-dir",
+		mkname: "product.mk",
+		in: `
+LOCAL_PATH := $(call my-dir)
+`,
+		expected: `load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  
+`,
+	},
+	{
+		desc:   "Evals",
+		mkname: "product.mk",
+		in: `
+$(eval)
+$(eval MY_VAR := foo)
+$(eval # This is a test of eval functions)
+$(eval $(TOO_COMPLICATED) := bar)
+$(foreach x,$(MY_LIST_VAR), \
+  $(eval PRODUCT_COPY_FILES += foo/bar/$(x):$(TARGET_COPY_OUT_VENDOR)/etc/$(x)) \
+  $(if $(MY_OTHER_VAR),$(eval PRODUCT_COPY_FILES += $(MY_OTHER_VAR):foo/bar/$(x))) \
+)
+
+`,
+		expected: `load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  g["MY_VAR"] = "foo"
+  # This is a test of eval functions
+  rblf.mk2rbc_error("product.mk:5", "Eval expression too complex; only assignments and comments are supported")
+  for x in rblf.words(g.get("MY_LIST_VAR", "")):
+    rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+    cfg["PRODUCT_COPY_FILES"] += ("foo/bar/%s:%s/etc/%s" % (x, g.get("TARGET_COPY_OUT_VENDOR", ""), x)).split()
+    if g.get("MY_OTHER_VAR", ""):
+      cfg["PRODUCT_COPY_FILES"] += ("%s:foo/bar/%s" % (g.get("MY_OTHER_VAR", ""), x)).split()
+`,
+	},
 }
 
 var known_variables = []struct {
diff --git a/mk2rbc/node.go b/mk2rbc/node.go
index 5d98d7b..7c39b9e 100644
--- a/mk2rbc/node.go
+++ b/mk2rbc/node.go
@@ -184,10 +184,9 @@
 
 const (
 	// Assignment flavors
-	asgnSet         assignmentFlavor = iota // := or =
-	asgnMaybeSet    assignmentFlavor = iota // ?= and variable may be unset
-	asgnAppend      assignmentFlavor = iota // += and variable has been set before
-	asgnMaybeAppend assignmentFlavor = iota // += and variable may be unset
+	asgnSet      assignmentFlavor = iota // := or =
+	asgnMaybeSet assignmentFlavor = iota // ?=
+	asgnAppend   assignmentFlavor = iota // +=
 )
 
 type assignmentNode struct {
@@ -197,7 +196,6 @@
 	flavor   assignmentFlavor
 	location ErrorLocation
 	isTraced bool
-	previous *assignmentNode
 }
 
 func (asgn *assignmentNode) emit(gctx *generationContext) {
@@ -210,11 +208,25 @@
 		gctx.newLine()
 		gctx.tracedCount++
 		gctx.writef(`print("%s.%d: %s := ", `, gctx.starScript.mkFile, gctx.tracedCount, asgn.lhs.name())
-		asgn.lhs.emitGet(gctx, true)
+		asgn.lhs.emitGet(gctx)
 		gctx.writef(")")
 	}
 }
 
+func (asgn *assignmentNode) isSelfReferential() bool {
+	if asgn.flavor == asgnAppend {
+		return true
+	}
+	isSelfReferential := false
+	asgn.value.transform(func(expr starlarkExpr) starlarkExpr {
+		if ref, ok := expr.(*variableRefExpr); ok && ref.ref.name() == asgn.lhs.name() {
+			isSelfReferential = true
+		}
+		return nil
+	})
+	return isSelfReferential
+}
+
 type exprNode struct {
 	expr starlarkExpr
 }
@@ -258,6 +270,7 @@
 func (cb *switchCase) emit(gctx *generationContext) {
 	cb.gate.emit(gctx)
 	gctx.indentLevel++
+	gctx.pushVariableAssignments()
 	hasStatements := false
 	for _, node := range cb.nodes {
 		if _, ok := node.(*commentNode); !ok {
@@ -269,6 +282,7 @@
 		gctx.emitPass()
 	}
 	gctx.indentLevel--
+	gctx.popVariableAssignments()
 }
 
 // A single complete if ... elseif ... else ... endif sequences
@@ -281,3 +295,30 @@
 		ssCase.emit(gctx)
 	}
 }
+
+type foreachNode struct {
+	varName string
+	list    starlarkExpr
+	actions []starlarkNode
+}
+
+func (f *foreachNode) emit(gctx *generationContext) {
+	gctx.pushVariableAssignments()
+	gctx.newLine()
+	gctx.writef("for %s in ", f.varName)
+	f.list.emit(gctx)
+	gctx.write(":")
+	gctx.indentLevel++
+	hasStatements := false
+	for _, a := range f.actions {
+		if _, ok := a.(*commentNode); !ok {
+			hasStatements = true
+		}
+		a.emit(gctx)
+	}
+	if !hasStatements {
+		gctx.emitPass()
+	}
+	gctx.indentLevel--
+	gctx.popVariableAssignments()
+}
diff --git a/mk2rbc/variable.go b/mk2rbc/variable.go
index f7adca5..0a26ed8 100644
--- a/mk2rbc/variable.go
+++ b/mk2rbc/variable.go
@@ -21,9 +21,8 @@
 
 type variable interface {
 	name() string
-	emitGet(gctx *generationContext, isDefined bool)
+	emitGet(gctx *generationContext)
 	emitSet(gctx *generationContext, asgn *assignmentNode)
-	emitDefined(gctx *generationContext)
 	valueType() starlarkType
 	setValueType(t starlarkType)
 	defaultValueString() string
@@ -74,13 +73,11 @@
 
 func (pcv productConfigVariable) emitSet(gctx *generationContext, asgn *assignmentNode) {
 	emitAssignment := func() {
-		pcv.emitGet(gctx, true)
-		gctx.write(" = ")
+		gctx.writef("cfg[%q] = ", pcv.nam)
 		asgn.value.emitListVarCopy(gctx)
 	}
 	emitAppend := func() {
-		pcv.emitGet(gctx, true)
-		gctx.write(" += ")
+		gctx.writef("cfg[%q] += ", pcv.nam)
 		value := asgn.value
 		if pcv.valueType() == starlarkTypeString {
 			gctx.writef(`" " + `)
@@ -88,56 +85,63 @@
 		}
 		value.emit(gctx)
 	}
-
-	switch asgn.flavor {
-	case asgnSet:
-		emitAssignment()
-	case asgnAppend:
-		emitAppend()
-	case asgnMaybeAppend:
-		// If we are not sure variable has been assigned before, emit setdefault
+	emitSetDefault := func() {
 		if pcv.typ == starlarkTypeList {
 			gctx.writef("%s(handle, %q)", cfnSetListDefault, pcv.name())
 		} else {
 			gctx.writef("cfg.setdefault(%q, %s)", pcv.name(), pcv.defaultValueString())
 		}
 		gctx.newLine()
+	}
+
+	// If we are not sure variable has been assigned before, emit setdefault
+	needsSetDefault := !gctx.hasBeenAssigned(&pcv) && !pcv.isPreset() && asgn.isSelfReferential()
+
+	switch asgn.flavor {
+	case asgnSet:
+		if needsSetDefault {
+			emitSetDefault()
+		}
+		emitAssignment()
+	case asgnAppend:
+		if needsSetDefault {
+			emitSetDefault()
+		}
 		emitAppend()
 	case asgnMaybeSet:
 		gctx.writef("if cfg.get(%q) == None:", pcv.nam)
 		gctx.indentLevel++
 		gctx.newLine()
+		if needsSetDefault {
+			emitSetDefault()
+		}
 		emitAssignment()
 		gctx.indentLevel--
 	}
+
+	gctx.setHasBeenAssigned(&pcv)
 }
 
-func (pcv productConfigVariable) emitGet(gctx *generationContext, isDefined bool) {
-	if isDefined || pcv.isPreset() {
+func (pcv productConfigVariable) emitGet(gctx *generationContext) {
+	if gctx.hasBeenAssigned(&pcv) || pcv.isPreset() {
 		gctx.writef("cfg[%q]", pcv.nam)
 	} else {
 		gctx.writef("cfg.get(%q, %s)", pcv.nam, pcv.defaultValueString())
 	}
 }
 
-func (pcv productConfigVariable) emitDefined(gctx *generationContext) {
-	gctx.writef("g.get(%q) != None", pcv.name())
-}
-
 type otherGlobalVariable struct {
 	baseVariable
 }
 
 func (scv otherGlobalVariable) emitSet(gctx *generationContext, asgn *assignmentNode) {
 	emitAssignment := func() {
-		scv.emitGet(gctx, true)
-		gctx.write(" = ")
+		gctx.writef("g[%q] = ", scv.nam)
 		asgn.value.emitListVarCopy(gctx)
 	}
 
 	emitAppend := func() {
-		scv.emitGet(gctx, true)
-		gctx.write(" += ")
+		gctx.writef("g[%q] += ", scv.nam)
 		value := asgn.value
 		if scv.valueType() == starlarkTypeString {
 			gctx.writef(`" " + `)
@@ -146,75 +150,70 @@
 		value.emit(gctx)
 	}
 
+	// If we are not sure variable has been assigned before, emit setdefault
+	needsSetDefault := !gctx.hasBeenAssigned(&scv) && !scv.isPreset() && asgn.isSelfReferential()
+
 	switch asgn.flavor {
 	case asgnSet:
+		if needsSetDefault {
+			gctx.writef("g.setdefault(%q, %s)", scv.name(), scv.defaultValueString())
+			gctx.newLine()
+		}
 		emitAssignment()
 	case asgnAppend:
-		emitAppend()
-	case asgnMaybeAppend:
-		// If we are not sure variable has been assigned before, emit setdefault
-		gctx.writef("g.setdefault(%q, %s)", scv.name(), scv.defaultValueString())
-		gctx.newLine()
+		if needsSetDefault {
+			gctx.writef("g.setdefault(%q, %s)", scv.name(), scv.defaultValueString())
+			gctx.newLine()
+		}
 		emitAppend()
 	case asgnMaybeSet:
 		gctx.writef("if g.get(%q) == None:", scv.nam)
 		gctx.indentLevel++
 		gctx.newLine()
+		if needsSetDefault {
+			gctx.writef("g.setdefault(%q, %s)", scv.name(), scv.defaultValueString())
+			gctx.newLine()
+		}
 		emitAssignment()
 		gctx.indentLevel--
 	}
+
+	gctx.setHasBeenAssigned(&scv)
 }
 
-func (scv otherGlobalVariable) emitGet(gctx *generationContext, isDefined bool) {
-	if isDefined || scv.isPreset() {
+func (scv otherGlobalVariable) emitGet(gctx *generationContext) {
+	if gctx.hasBeenAssigned(&scv) || scv.isPreset() {
 		gctx.writef("g[%q]", scv.nam)
 	} else {
 		gctx.writef("g.get(%q, %s)", scv.nam, scv.defaultValueString())
 	}
 }
 
-func (scv otherGlobalVariable) emitDefined(gctx *generationContext) {
-	gctx.writef("g.get(%q) != None", scv.name())
-}
-
 type localVariable struct {
 	baseVariable
 }
 
-func (lv localVariable) emitDefined(gctx *generationContext) {
-	gctx.writef(lv.String())
-}
-
 func (lv localVariable) String() string {
 	return "_" + lv.nam
 }
 
 func (lv localVariable) emitSet(gctx *generationContext, asgn *assignmentNode) {
 	switch asgn.flavor {
-	case asgnSet:
+	case asgnSet, asgnMaybeSet:
 		gctx.writef("%s = ", lv)
 		asgn.value.emitListVarCopy(gctx)
 	case asgnAppend:
-		lv.emitGet(gctx, false)
-		gctx.write(" += ")
+		gctx.writef("%s += ", lv)
 		value := asgn.value
 		if lv.valueType() == starlarkTypeString {
 			gctx.writef(`" " + `)
 			value = &toStringExpr{expr: value}
 		}
 		value.emit(gctx)
-	case asgnMaybeAppend:
-		gctx.writef("%s(%q, ", cfnLocalAppend, lv)
-		asgn.value.emit(gctx)
-		gctx.write(")")
-	case asgnMaybeSet:
-		gctx.writef("%s(%q, ", cfnLocalSetDefault, lv)
-		asgn.value.emit(gctx)
-		gctx.write(")")
 	}
 }
 
-func (lv localVariable) emitGet(gctx *generationContext, _ bool) {
+func (lv localVariable) emitGet(gctx *generationContext) {
 	gctx.writef("%s", lv)
 }
 
@@ -223,7 +222,7 @@
 	value starlarkExpr
 }
 
-func (pv predefinedVariable) emitGet(gctx *generationContext, _ bool) {
+func (pv predefinedVariable) emitGet(gctx *generationContext) {
 	pv.value.emit(gctx)
 }
 
@@ -244,10 +243,6 @@
 	panic(fmt.Errorf("cannot set predefined variable %s to %q", pv.name(), asgn.mkValue.Dump()))
 }
 
-func (pv predefinedVariable) emitDefined(gctx *generationContext) {
-	gctx.write("True")
-}
-
 var localProductConfigVariables = map[string]string{
 	"LOCAL_AUDIO_PRODUCT_PACKAGE":         "PRODUCT_PACKAGES",
 	"LOCAL_AUDIO_PRODUCT_COPY_FILES":      "PRODUCT_COPY_FILES",
@@ -278,31 +273,41 @@
 // addVariable returns a variable with a given name. A variable is
 // added if it does not exist yet.
 func (ctx *parseContext) addVariable(name string) variable {
+	// Get the hintType before potentially changing the variable name
+	var hintType starlarkType
+	var ok bool
+	if hintType, ok = ctx.typeHints[name]; !ok {
+		hintType = starlarkTypeUnknown
+	}
+	// Heuristics: if variable's name is all lowercase, consider it local
+	// string variable.
+	isLocalVariable := name == strings.ToLower(name)
+	// Local variables can't have special characters in them, because they
+	// will be used as starlark identifiers
+	if isLocalVariable {
+		name = strings.ReplaceAll(strings.TrimSpace(name), "-", "_")
+	}
 	v, found := ctx.variables[name]
 	if !found {
-		_, preset := presetVariables[name]
 		if vi, found := KnownVariables[name]; found {
+			_, preset := presetVariables[name]
 			switch vi.class {
 			case VarClassConfig:
 				v = &productConfigVariable{baseVariable{nam: name, typ: vi.valueType, preset: preset}}
 			case VarClassSoong:
 				v = &otherGlobalVariable{baseVariable{nam: name, typ: vi.valueType, preset: preset}}
 			}
-		} else if name == strings.ToLower(name) {
-			// Heuristics: if variable's name is all lowercase, consider it local
-			// string variable.
-			v = &localVariable{baseVariable{nam: name, typ: starlarkTypeUnknown}}
+		} else if isLocalVariable {
+			v = &localVariable{baseVariable{nam: name, typ: hintType}}
 		} else {
-			vt := starlarkTypeUnknown
-			if strings.HasPrefix(name, "LOCAL_") {
-				// Heuristics: local variables that contribute to corresponding config variables
-				if cfgVarName, found := localProductConfigVariables[name]; found {
-					vi, found2 := KnownVariables[cfgVarName]
-					if !found2 {
-						panic(fmt.Errorf("unknown config variable %s for %s", cfgVarName, name))
-					}
-					vt = vi.valueType
+			vt := hintType
+			// Heuristics: local variables that contribute to corresponding config variables
+			if cfgVarName, found := localProductConfigVariables[name]; found && vt == starlarkTypeUnknown {
+				vi, found2 := KnownVariables[cfgVarName]
+				if !found2 {
+					panic(fmt.Errorf("unknown config variable %s for %s", cfgVarName, name))
 				}
+				vt = vi.valueType
 			}
 			if strings.HasSuffix(name, "_LIST") && vt == starlarkTypeUnknown {
 				// Heuristics: Variables with "_LIST" suffix are lists
diff --git a/provenance/Android.bp b/provenance/Android.bp
new file mode 100644
index 0000000..6fd67aa
--- /dev/null
+++ b/provenance/Android.bp
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+    name: "soong-provenance",
+    pkgPath: "android/soong/provenance",
+    srcs: [
+        "provenance_singleton.go",
+    ],
+    deps: [
+        "soong-android",
+    ],
+    testSrcs: [
+        "provenance_singleton_test.go",
+    ],
+    pluginFor: [
+        "soong_build",
+    ],
+}
diff --git a/provenance/provenance_metadata_proto/Android.bp b/provenance/provenance_metadata_proto/Android.bp
new file mode 100644
index 0000000..7fc47a9
--- /dev/null
+++ b/provenance/provenance_metadata_proto/Android.bp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_library_host {
+    name: "provenance_metadata_proto",
+    version: {
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+        "provenance_metadata.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
diff --git a/provenance/provenance_metadata_proto/provenance_metadata.proto b/provenance/provenance_metadata_proto/provenance_metadata.proto
new file mode 100644
index 0000000..f42aba7
--- /dev/null
+++ b/provenance/provenance_metadata_proto/provenance_metadata.proto
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+syntax = "proto3";
+
+package provenance_metadata_proto;
+option go_package = "android/soong/provenance/provenance_metadata_proto";
+
+// Provenance metadata of artifacts.
+message ProvenanceMetadata {
+  // Name of the module/target that creates the artifact.
+  // It is either a Soong module name or Bazel target label.
+  string module_name = 1;
+
+  // The path to the prebuilt artifacts, which is relative to the source tree
+  // directory. For example, “prebuilts/runtime/mainline/i18n/apex/com.android.i18n-arm.apex”.
+  string artifact_path = 2;
+
+  // The SHA256 hash of the artifact.
+  string artifact_sha256 = 3;
+
+  // The install path of the artifact in filesystem images.
+  // This is the absolute path of the artifact on the device.
+  string artifact_install_path = 4;
+
+  // Path of the attestation file of a prebuilt artifact, which is relative to
+  // the source tree directory. This is for prebuilt artifacts which have
+  // corresponding attestation files checked in the source tree.
+  string attestation_path = 5;
+}
+
+message ProvenanceMetaDataList {
+  repeated ProvenanceMetadata metadata = 1;
+}
\ No newline at end of file
diff --git a/provenance/provenance_singleton.go b/provenance/provenance_singleton.go
new file mode 100644
index 0000000..e49f3d4
--- /dev/null
+++ b/provenance/provenance_singleton.go
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package provenance
+
+import (
+	"android/soong/android"
+	"github.com/google/blueprint"
+)
+
+var (
+	pctx = android.NewPackageContext("android/soong/provenance")
+	rule = pctx.HostBinToolVariable("gen_provenance_metadata", "gen_provenance_metadata")
+
+	genProvenanceMetaData = pctx.AndroidStaticRule("genProvenanceMetaData",
+		blueprint.RuleParams{
+			Command: `rm -rf "$out" && ` +
+				`${gen_provenance_metadata} --module_name=${module_name} ` +
+				`--artifact_path=$in --install_path=${install_path} --metadata_path=$out`,
+			CommandDeps: []string{"${gen_provenance_metadata}"},
+		}, "module_name", "install_path")
+
+	mergeProvenanceMetaData = pctx.AndroidStaticRule("mergeProvenanceMetaData",
+		blueprint.RuleParams{
+			Command: `rm -rf $out $out.temp && ` +
+				`echo -e "# proto-file: build/soong/provenance/proto/provenance_metadata.proto\n# proto-message: ProvenanceMetaDataList" > $out && ` +
+				`touch $out.temp && cat $out.temp $in | grep -v "^#.*" >> $out && rm -rf $out.temp`,
+		})
+)
+
+type ProvenanceMetadata interface {
+	ProvenanceMetaDataFile() android.OutputPath
+}
+
+func init() {
+	RegisterProvenanceSingleton(android.InitRegistrationContext)
+}
+
+func RegisterProvenanceSingleton(ctx android.RegistrationContext) {
+	ctx.RegisterSingletonType("provenance_metadata_singleton", provenanceInfoSingletonFactory)
+}
+
+var PrepareForTestWithProvenanceSingleton = android.FixtureRegisterWithContext(RegisterProvenanceSingleton)
+
+func provenanceInfoSingletonFactory() android.Singleton {
+	return &provenanceInfoSingleton{}
+}
+
+type provenanceInfoSingleton struct {
+	mergedMetaDataFile android.OutputPath
+}
+
+func (p *provenanceInfoSingleton) GenerateBuildActions(context android.SingletonContext) {
+	allMetaDataFiles := make([]android.Path, 0)
+	context.VisitAllModulesIf(moduleFilter, func(module android.Module) {
+		if p, ok := module.(ProvenanceMetadata); ok {
+			allMetaDataFiles = append(allMetaDataFiles, p.ProvenanceMetaDataFile())
+		}
+	})
+	p.mergedMetaDataFile = android.PathForOutput(context, "provenance_metadata.textproto")
+	context.Build(pctx, android.BuildParams{
+		Rule:        mergeProvenanceMetaData,
+		Description: "merge provenance metadata",
+		Inputs:      allMetaDataFiles,
+		Output:      p.mergedMetaDataFile,
+	})
+
+	context.Build(pctx, android.BuildParams{
+		Rule:        blueprint.Phony,
+		Description: "phony rule of merge provenance metadata",
+		Inputs:      []android.Path{p.mergedMetaDataFile},
+		Output:      android.PathForPhony(context, "provenance_metadata"),
+	})
+
+	context.Phony("droidcore", android.PathForPhony(context, "provenance_metadata"))
+}
+
+func moduleFilter(module android.Module) bool {
+	if !module.Enabled() || module.IsSkipInstall() {
+		return false
+	}
+	if p, ok := module.(ProvenanceMetadata); ok {
+		return p.ProvenanceMetaDataFile().String() != ""
+	}
+	return false
+}
+
+func GenerateArtifactProvenanceMetaData(ctx android.ModuleContext, artifactPath android.Path, installedFile android.InstallPath) android.OutputPath {
+	onDevicePathOfInstalledFile := android.InstallPathToOnDevicePath(ctx, installedFile)
+	artifactMetaDataFile := android.PathForIntermediates(ctx, "provenance_metadata", ctx.ModuleDir(), ctx.ModuleName(), "provenance_metadata.textproto")
+	ctx.Build(pctx, android.BuildParams{
+		Rule:        genProvenanceMetaData,
+		Description: "generate artifact provenance metadata",
+		Inputs:      []android.Path{artifactPath},
+		Output:      artifactMetaDataFile,
+		Args: map[string]string{
+			"module_name":  ctx.ModuleName(),
+			"install_path": onDevicePathOfInstalledFile,
+		}})
+
+	return artifactMetaDataFile
+}
+
+func (p *provenanceInfoSingleton) MakeVars(ctx android.MakeVarsContext) {
+	ctx.DistForGoal("droidcore", p.mergedMetaDataFile)
+}
+
+var _ android.SingletonMakeVarsProvider = (*provenanceInfoSingleton)(nil)
diff --git a/provenance/provenance_singleton_test.go b/provenance/provenance_singleton_test.go
new file mode 100644
index 0000000..0f1eae2
--- /dev/null
+++ b/provenance/provenance_singleton_test.go
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package provenance
+
+import (
+	"strings"
+	"testing"
+
+	"android/soong/android"
+)
+
+func TestProvenanceSingleton(t *testing.T) {
+	result := android.GroupFixturePreparers(
+		PrepareForTestWithProvenanceSingleton,
+		android.PrepareForTestWithAndroidMk).RunTestWithBp(t, "")
+
+	outputs := result.SingletonForTests("provenance_metadata_singleton").AllOutputs()
+	for _, output := range outputs {
+		testingBuildParam := result.SingletonForTests("provenance_metadata_singleton").Output(output)
+		switch {
+		case strings.Contains(output, "soong/provenance_metadata.textproto"):
+			android.AssertStringEquals(t, "Invalid build rule", "android/soong/provenance.mergeProvenanceMetaData", testingBuildParam.Rule.String())
+			android.AssertIntEquals(t, "Invalid input", len(testingBuildParam.Inputs), 0)
+			android.AssertStringDoesContain(t, "Invalid output path", output, "soong/provenance_metadata.textproto")
+			android.AssertIntEquals(t, "Invalid args", len(testingBuildParam.Args), 0)
+
+		case strings.HasSuffix(output, "provenance_metadata"):
+			android.AssertStringEquals(t, "Invalid build rule", "<builtin>:phony", testingBuildParam.Rule.String())
+			android.AssertStringEquals(t, "Invalid input", testingBuildParam.Inputs[0].String(), "out/soong/provenance_metadata.textproto")
+			android.AssertStringEquals(t, "Invalid output path", output, "provenance_metadata")
+			android.AssertIntEquals(t, "Invalid args", len(testingBuildParam.Args), 0)
+		}
+	}
+}
diff --git a/provenance/tools/Android.bp b/provenance/tools/Android.bp
new file mode 100644
index 0000000..0eddd76
--- /dev/null
+++ b/provenance/tools/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+    name: "gen_provenance_metadata",
+    srcs: [
+        "gen_provenance_metadata.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "provenance_metadata_proto",
+        "libprotobuf-python",
+    ],
+}
+
+python_test_host {
+    name: "gen_provenance_metadata_test",
+    main: "gen_provenance_metadata_test.py",
+    srcs: [
+        "gen_provenance_metadata_test.py",
+    ],
+    data: [
+        ":gen_provenance_metadata",
+    ],
+    libs: [
+        "provenance_metadata_proto",
+        "libprotobuf-python",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/provenance/tools/gen_provenance_metadata.py b/provenance/tools/gen_provenance_metadata.py
new file mode 100644
index 0000000..b33f911
--- /dev/null
+++ b/provenance/tools/gen_provenance_metadata.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import hashlib
+import sys
+
+import google.protobuf.text_format as text_format
+import provenance_metadata_pb2
+
+def Log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+def ParseArgs(argv):
+  parser = argparse.ArgumentParser(description='Create provenance metadata for a prebuilt artifact')
+  parser.add_argument('-v', '--verbose', action='store_true', help='Print more information in execution')
+  parser.add_argument('--module_name', help='Module name', required=True)
+  parser.add_argument('--artifact_path', help='Relative path of the prebuilt artifact in source tree', required=True)
+  parser.add_argument('--install_path', help='Absolute path of the artifact in the filesystem images', required=True)
+  parser.add_argument('--metadata_path', help='Path of the provenance metadata file created for the artifact', required=True)
+  return parser.parse_args(argv)
+
+def main(argv):
+  global args
+  args = ParseArgs(argv)
+  Log("Args:", vars(args))
+
+  provenance_metadata = provenance_metadata_pb2.ProvenanceMetadata()
+  provenance_metadata.module_name = args.module_name
+  provenance_metadata.artifact_path = args.artifact_path
+  provenance_metadata.artifact_install_path = args.install_path
+
+  Log("Generating SHA256 hash")
+  h = hashlib.sha256()
+  with open(args.artifact_path, "rb") as artifact_file:
+    h.update(artifact_file.read())
+  provenance_metadata.artifact_sha256 = h.hexdigest()
+
+  text_proto = [
+      "# proto-file: build/soong/provenance/proto/provenance_metadata.proto",
+      "# proto-message: ProvenanceMetaData",
+      "",
+      text_format.MessageToString(provenance_metadata)
+  ]
+  with open(args.metadata_path, "wt") as metadata_file:
+    file_content = "\n".join(text_proto)
+    Log("Writing provenance metadata in textproto:", file_content)
+    metadata_file.write(file_content)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/provenance/tools/gen_provenance_metadata_test.py b/provenance/tools/gen_provenance_metadata_test.py
new file mode 100644
index 0000000..2fc04bf
--- /dev/null
+++ b/provenance/tools/gen_provenance_metadata_test.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import logging
+import os
+import subprocess
+import tempfile
+import unittest
+
+import google.protobuf.text_format as text_format
+import provenance_metadata_pb2
+
+logger = logging.getLogger(__name__)
+
+def run(args, verbose=None, **kwargs):
+  """Creates and returns a subprocess.Popen object.
+
+  Args:
+    args: The command represented as a list of strings.
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
+    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+        stdin, etc. stdout and stderr will default to subprocess.PIPE and
+        subprocess.STDOUT respectively unless caller specifies any of them.
+        universal_newlines will default to True, as most of the users in
+        releasetools expect string output.
+
+  Returns:
+    A subprocess.Popen object.
+  """
+  if 'stdout' not in kwargs and 'stderr' not in kwargs:
+    kwargs['stdout'] = subprocess.PIPE
+    kwargs['stderr'] = subprocess.STDOUT
+  if 'universal_newlines' not in kwargs:
+    kwargs['universal_newlines'] = True
+  if verbose:
+    logger.info("  Running: \"%s\"", " ".join(args))
+  return subprocess.Popen(args, **kwargs)
+
+
+def run_and_check_output(args, verbose=None, **kwargs):
+  """Runs the given command and returns the output.
+
+  Args:
+    args: The command represented as a list of strings.
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
+    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+        stdin, etc. stdout and stderr will default to subprocess.PIPE and
+        subprocess.STDOUT respectively unless caller specifies any of them.
+
+  Returns:
+    The output string.
+
+  Raises:
+    ExternalError: On non-zero exit from the command.
+  """
+  proc = run(args, verbose=verbose, **kwargs)
+  output, _ = proc.communicate()
+  if output is None:
+    output = ""
+  if verbose:
+    logger.info("%s", output.rstrip())
+  if proc.returncode != 0:
+    raise RuntimeError(
+        "Failed to run command '{}' (exit code {}):\n{}".format(
+            args, proc.returncode, output))
+  return output
+
+def run_host_command(args, verbose=None, **kwargs):
+  host_build_top = os.environ.get("ANDROID_BUILD_TOP")
+  if host_build_top:
+    host_command_dir = os.path.join(host_build_top, "out/host/linux-x86/bin")
+    args[0] = os.path.join(host_command_dir, args[0])
+  return run_and_check_output(args, verbose, **kwargs)
+
+def sha256(s):
+  h = hashlib.sha256()
+  h.update(bytearray(s, 'utf-8'))
+  return h.hexdigest()
+
+class ProvenanceMetaDataToolTest(unittest.TestCase):
+
+  def test_gen_provenance_metadata(self):
+    artifact_content = "test artifact"
+    artifact_file = tempfile.mktemp()
+    with open(artifact_file,"wt") as f:
+      f.write(artifact_content)
+    metadata_file = tempfile.mktemp()
+    cmd = ["gen_provenance_metadata"]
+    cmd.extend(["--module_name", "a"])
+    cmd.extend(["--artifact_path", artifact_file])
+    cmd.extend(["--install_path", "b"])
+    cmd.extend(["--metadata_path", metadata_file])
+    output = run_host_command(cmd)
+    self.assertEqual(output, "")
+
+    with open(metadata_file,"rt") as f:
+      data = f.read()
+      provenance_metadata = provenance_metadata_pb2.ProvenanceMetadata()
+      text_format.Parse(data, provenance_metadata)
+      self.assertEqual(provenance_metadata.module_name, "a")
+      self.assertEqual(provenance_metadata.artifact_path, artifact_file)
+      self.assertEqual(provenance_metadata.artifact_install_path, "b")
+      self.assertEqual(provenance_metadata.artifact_sha256, sha256(artifact_content))
+
+    os.remove(artifact_file)
+    os.remove(metadata_file)
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
\ No newline at end of file
diff --git a/python/python.go b/python/python.go
index 734ac57..b100cc3 100644
--- a/python/python.go
+++ b/python/python.go
@@ -423,6 +423,9 @@
 		if ctx.Target().Os.Bionic() {
 			launcherSharedLibDeps = append(launcherSharedLibDeps, "libc", "libdl", "libm")
 		}
+		if ctx.Target().Os == android.LinuxMusl && !ctx.Config().HostStaticBinaries() {
+			launcherSharedLibDeps = append(launcherSharedLibDeps, "libc_musl")
+		}
 
 		switch p.properties.Actual_version {
 		case pyVersion2:
@@ -432,6 +435,7 @@
 			if p.bootstrapper.autorun() {
 				launcherModule = "py2-launcher-autorun"
 			}
+
 			launcherSharedLibDeps = append(launcherSharedLibDeps, "libc++")
 
 		case pyVersion3:
@@ -441,6 +445,9 @@
 			if p.bootstrapper.autorun() {
 				launcherModule = "py3-launcher-autorun"
 			}
+			if ctx.Config().HostStaticBinaries() && ctx.Target().Os == android.LinuxMusl {
+				launcherModule += "-static"
+			}
 
 			if ctx.Device() {
 				launcherSharedLibDeps = append(launcherSharedLibDeps, "liblog")
diff --git a/rust/OWNERS b/rust/OWNERS
index d07ef7e..ddaebc5 100644
--- a/rust/OWNERS
+++ b/rust/OWNERS
@@ -1,5 +1,5 @@
 # Additional owner/reviewers for rust rules, including parent directory owners.
-per-file * = chh@google.com, ivanlozano@google.com, jeffv@google.com, mmaurer@google.com, srhines@google.com
+per-file * = chiw@google.com, chriswailes@google.com, ivanlozano@google.com, jeffv@google.com, mmaurer@google.com, srhines@google.com
 
 # Limited owners/reviewers of the allowed list.
-per-file allowed_list.go = chh@google.com, ivanlozano@google.com, jeffv@google.com, mmaurer@google.com, srhines@google.com
+per-file allowed_list.go = chiw@google.com, chriswailes@google.com, ivanlozano@google.com, jeffv@google.com, mmaurer@google.com, srhines@google.com
diff --git a/rust/bindgen.go b/rust/bindgen.go
index f4c337d..c2b0512 100644
--- a/rust/bindgen.go
+++ b/rust/bindgen.go
@@ -30,7 +30,7 @@
 	defaultBindgenFlags = []string{""}
 
 	// bindgen should specify its own Clang revision so updating Clang isn't potentially blocked on bindgen failures.
-	bindgenClangVersion = "clang-r437112b"
+	bindgenClangVersion = "clang-r445002"
 
 	_ = pctx.VariableFunc("bindgenClangVersion", func(ctx android.PackageVarContext) string {
 		if override := ctx.Config().Getenv("LLVM_BINDGEN_PREBUILTS_VERSION"); override != "" {
diff --git a/rust/builder.go b/rust/builder.go
index 00035b9..20ca5db 100644
--- a/rust/builder.go
+++ b/rust/builder.go
@@ -274,7 +274,7 @@
 		implicits = append(implicits, outputs.Paths()...)
 	}
 
-	envVars = append(envVars, "ANDROID_RUST_VERSION="+config.RustDefaultVersion)
+	envVars = append(envVars, "ANDROID_RUST_VERSION="+config.GetRustVersion(ctx))
 
 	if ctx.RustModule().compiler.CargoEnvCompat() {
 		if _, ok := ctx.RustModule().compiler.(*binaryDecorator); ok {
diff --git a/rust/compiler.go b/rust/compiler.go
index c5d40f4..19499fa 100644
--- a/rust/compiler.go
+++ b/rust/compiler.go
@@ -121,6 +121,12 @@
 	// include all of the static libraries symbols in any dylibs or binaries which use this rlib as well.
 	Whole_static_libs []string `android:"arch_variant"`
 
+	// list of Rust system library dependencies.
+	//
+	// This is usually only needed when `no_stdlibs` is true, in which case it can be used to depend on system crates
+	// like `core` and `alloc`.
+	Stdlibs []string `android:"arch_variant"`
+
 	// crate name, required for modules which produce Rust libraries: rust_library, rust_ffi and SourceProvider
 	// modules which create library variants (rust_bindgen). This must be the expected extern crate name used in
 	// source, and is required to conform to an enforced format matching library output files (if the output file is
@@ -360,6 +366,7 @@
 	deps.StaticLibs = append(deps.StaticLibs, compiler.Properties.Static_libs...)
 	deps.WholeStaticLibs = append(deps.WholeStaticLibs, compiler.Properties.Whole_static_libs...)
 	deps.SharedLibs = append(deps.SharedLibs, compiler.Properties.Shared_libs...)
+	deps.Stdlibs = append(deps.Stdlibs, compiler.Properties.Stdlibs...)
 
 	if !Bool(compiler.Properties.No_stdlibs) {
 		for _, stdlib := range config.Stdlibs {
diff --git a/rust/config/allowed_list.go b/rust/config/allowed_list.go
index 14fcb02..802e1da 100644
--- a/rust/config/allowed_list.go
+++ b/rust/config/allowed_list.go
@@ -24,8 +24,11 @@
 		"packages/modules/DnsResolver",
 		"packages/modules/Uwb",
 		"packages/modules/Virtualization",
+		"platform_testing/tests/codecoverage/native/rust",
 		"prebuilts/rust",
+		"system/core/debuggerd/rust",
 		"system/core/libstats/pull_rust",
+		"system/core/trusty/libtrusty-rs",
 		"system/extras/profcollectd",
 		"system/extras/simpleperf",
 		"system/hardware/interfaces/keystore2",
diff --git a/rust/config/global.go b/rust/config/global.go
index c1ce13f..2d5fa99 100644
--- a/rust/config/global.go
+++ b/rust/config/global.go
@@ -24,7 +24,7 @@
 var pctx = android.NewPackageContext("android/soong/rust/config")
 
 var (
-	RustDefaultVersion = "1.58.1"
+	RustDefaultVersion = "1.59.0"
 	RustDefaultBase    = "prebuilts/rust/"
 	DefaultEdition     = "2021"
 	Stdlibs            = []string{
@@ -49,7 +49,7 @@
 		"-C overflow-checks=on",
 		"-C force-unwind-tables=yes",
 		// Use v0 mangling to distinguish from C++ symbols
-		"-Z symbol-mangling-version=v0",
+		"-C symbol-mangling-version=v0",
 	}
 
 	deviceGlobalRustFlags = []string{
@@ -86,12 +86,7 @@
 		return "${RustDefaultBase}"
 	})
 
-	pctx.VariableFunc("RustVersion", func(ctx android.PackageVarContext) string {
-		if override := ctx.Config().Getenv("RUST_PREBUILTS_VERSION"); override != "" {
-			return override
-		}
-		return RustDefaultVersion
-	})
+	pctx.VariableFunc("RustVersion", getRustVersionPctx)
 
 	pctx.StaticVariable("RustPath", "${RustBase}/${HostPrebuiltTag}/${RustVersion}")
 	pctx.StaticVariable("RustBin", "${RustPath}/bin")
@@ -103,3 +98,14 @@
 	pctx.StaticVariable("DeviceGlobalLinkFlags", strings.Join(deviceGlobalLinkFlags, " "))
 
 }
+
+func getRustVersionPctx(ctx android.PackageVarContext) string {
+	return GetRustVersion(ctx)
+}
+
+func GetRustVersion(ctx android.PathContext) string {
+	if override := ctx.Config().Getenv("RUST_PREBUILTS_VERSION"); override != "" {
+		return override
+	}
+	return RustDefaultVersion
+}
diff --git a/rust/config/toolchain.go b/rust/config/toolchain.go
index a769f12..9c9d572 100644
--- a/rust/config/toolchain.go
+++ b/rust/config/toolchain.go
@@ -121,14 +121,7 @@
 }
 
 func LibclangRuntimeLibrary(t Toolchain, library string) string {
-	arch := t.LibclangRuntimeLibraryArch()
-	if arch == "" {
-		return ""
-	}
-	if !t.Bionic() {
-		return "libclang_rt." + library + "-" + arch
-	}
-	return "libclang_rt." + library + "-" + arch + "-android"
+	return "libclang_rt." + library
 }
 
 func LibRustRuntimeLibrary(t Toolchain, library string) string {
diff --git a/rust/config/x86_linux_host.go b/rust/config/x86_linux_host.go
index 7608349..4d7c422 100644
--- a/rust/config/x86_linux_host.go
+++ b/rust/config/x86_linux_host.go
@@ -42,8 +42,6 @@
 		"-nodefaultlibs",
 		"-nostdlib",
 		"-Wl,--no-dynamic-linker",
-		// for unwind
-		"-lgcc", "-lgcc_eh",
 	}
 	linuxX86Rustflags   = []string{}
 	linuxX86Linkflags   = []string{}
diff --git a/rust/coverage.go b/rust/coverage.go
index 050b811..651ce6e 100644
--- a/rust/coverage.go
+++ b/rust/coverage.go
@@ -22,6 +22,7 @@
 
 var CovLibraryName = "libprofile-clang-extras"
 
+// Add '%c' to default specifier after we resolve http://b/210012154
 const profileInstrFlag = "-fprofile-instr-generate=/data/misc/trace/clang-%p-%m.profraw"
 
 type coverage struct {
@@ -59,6 +60,10 @@
 		flags.LinkFlags = append(flags.LinkFlags,
 			profileInstrFlag, "-g", coverage.OutputFile().Path().String(), "-Wl,--wrap,open")
 		deps.StaticLibs = append(deps.StaticLibs, coverage.OutputFile().Path())
+		if cc.EnableContinuousCoverage(ctx) {
+			flags.RustFlags = append(flags.RustFlags, "-C llvm-args=--runtime-counter-relocation")
+			flags.LinkFlags = append(flags.LinkFlags, "-Wl,-mllvm,-runtime-counter-relocation")
+		}
 	}
 
 	return flags, deps
diff --git a/rust/fuzz_test.go b/rust/fuzz_test.go
index 98be7c2..865665e 100644
--- a/rust/fuzz_test.go
+++ b/rust/fuzz_test.go
@@ -47,17 +47,17 @@
 	// Check that compiler flags are set appropriately .
 	fuzz_libtest := ctx.ModuleForTests("fuzz_libtest", "android_arm64_armv8-a_fuzzer").Rule("rustc")
 	if !strings.Contains(fuzz_libtest.Args["rustcFlags"], "-Z sanitizer=hwaddress") ||
-		!strings.Contains(fuzz_libtest.Args["rustcFlags"], "-C passes='sancov'") ||
+		!strings.Contains(fuzz_libtest.Args["rustcFlags"], "-C passes='sancov-module'") ||
 		!strings.Contains(fuzz_libtest.Args["rustcFlags"], "--cfg fuzzing") {
-		t.Errorf("rust_fuzz module does not contain the expected flags (sancov, cfg fuzzing, hwaddress sanitizer).")
+		t.Errorf("rust_fuzz module does not contain the expected flags (sancov-module, cfg fuzzing, hwaddress sanitizer).")
 
 	}
 
 	// Check that dependencies have 'fuzzer' variants produced for them as well.
 	libtest_fuzzer := ctx.ModuleForTests("libtest_fuzzing", "android_arm64_armv8-a_rlib_rlib-std_fuzzer").Output("libtest_fuzzing.rlib")
 	if !strings.Contains(libtest_fuzzer.Args["rustcFlags"], "-Z sanitizer=hwaddress") ||
-		!strings.Contains(libtest_fuzzer.Args["rustcFlags"], "-C passes='sancov'") ||
+		!strings.Contains(libtest_fuzzer.Args["rustcFlags"], "-C passes='sancov-module'") ||
 		!strings.Contains(libtest_fuzzer.Args["rustcFlags"], "--cfg fuzzing") {
-		t.Errorf("rust_fuzz dependent library does not contain the expected flags (sancov, cfg fuzzing, hwaddress sanitizer).")
+		t.Errorf("rust_fuzz dependent library does not contain the expected flags (sancov-module, cfg fuzzing, hwaddress sanitizer).")
 	}
 }
diff --git a/rust/image.go b/rust/image.go
index 5d57f15..dfc7f74 100644
--- a/rust/image.go
+++ b/rust/image.go
@@ -149,6 +149,10 @@
 	return mod.ModuleBase.InRecovery() || mod.ModuleBase.InstallInRecovery()
 }
 
+func (mod *Module) InRamdisk() bool {
+	return mod.ModuleBase.InRamdisk() || mod.ModuleBase.InstallInRamdisk()
+}
+
 func (mod *Module) InVendorRamdisk() bool {
 	return mod.ModuleBase.InVendorRamdisk() || mod.ModuleBase.InstallInVendorRamdisk()
 }
diff --git a/rust/library_test.go b/rust/library_test.go
index d78dcdd..4633cc7 100644
--- a/rust/library_test.go
+++ b/rust/library_test.go
@@ -200,23 +200,34 @@
 func TestAutoDeps(t *testing.T) {
 
 	ctx := testRust(t, `
-                rust_library_host {
-                        name: "libbar",
-                        srcs: ["bar.rs"],
-                        crate_name: "bar",
-                }
+		rust_library_host {
+			name: "libbar",
+			srcs: ["bar.rs"],
+			crate_name: "bar",
+		}
+		rust_library_host_rlib {
+			name: "librlib_only",
+			srcs: ["bar.rs"],
+			crate_name: "rlib_only",
+		}
 		rust_library_host {
 			name: "libfoo",
 			srcs: ["foo.rs"],
 			crate_name: "foo",
-                        rustlibs: ["libbar"],
+			rustlibs: [
+				"libbar",
+				"librlib_only",
+			],
 		}
-                rust_ffi_host {
-                        name: "libfoo.ffi",
-                        srcs: ["foo.rs"],
-                        crate_name: "foo",
-                        rustlibs: ["libbar"],
-                }`)
+		rust_ffi_host {
+			name: "libfoo.ffi",
+			srcs: ["foo.rs"],
+			crate_name: "foo",
+			rustlibs: [
+				"libbar",
+				"librlib_only",
+			],
+		}`)
 
 	libfooRlib := ctx.ModuleForTests("libfoo", "linux_glibc_x86_64_rlib_rlib-std")
 	libfooDylib := ctx.ModuleForTests("libfoo", "linux_glibc_x86_64_dylib")
@@ -239,7 +250,9 @@
 		if android.InList("libbar.dylib-std", dyn.Module().(*Module).Properties.AndroidMkRlibs) {
 			t.Errorf("libbar present as rlib dependency in dynamic lib")
 		}
-
+		if !android.InList("librlib_only.dylib-std", dyn.Module().(*Module).Properties.AndroidMkRlibs) {
+			t.Errorf("librlib_only should be selected by rustlibs as an rlib.")
+		}
 	}
 }
 
diff --git a/rust/prebuilt.go b/rust/prebuilt.go
index 6f17272..6cdd07d 100644
--- a/rust/prebuilt.go
+++ b/rust/prebuilt.go
@@ -22,6 +22,7 @@
 	android.RegisterModuleType("rust_prebuilt_library", PrebuiltLibraryFactory)
 	android.RegisterModuleType("rust_prebuilt_dylib", PrebuiltDylibFactory)
 	android.RegisterModuleType("rust_prebuilt_rlib", PrebuiltRlibFactory)
+	android.RegisterModuleType("rust_prebuilt_proc_macro", PrebuiltProcMacroFactory)
 }
 
 type PrebuiltProperties struct {
@@ -38,8 +39,42 @@
 	Properties PrebuiltProperties
 }
 
+type prebuiltProcMacroDecorator struct {
+	android.Prebuilt
+
+	*procMacroDecorator
+	Properties PrebuiltProperties
+}
+
+func PrebuiltProcMacroFactory() android.Module {
+	module, _ := NewPrebuiltProcMacro(android.HostSupportedNoCross)
+	return module.Init()
+}
+
+type rustPrebuilt interface {
+	prebuiltSrcs() []string
+	prebuilt() *android.Prebuilt
+}
+
+func NewPrebuiltProcMacro(hod android.HostOrDeviceSupported) (*Module, *prebuiltProcMacroDecorator) {
+	module, library := NewProcMacro(hod)
+	prebuilt := &prebuiltProcMacroDecorator{
+		procMacroDecorator: library,
+	}
+	module.compiler = prebuilt
+
+	addSrcSupplier(module, prebuilt)
+
+	return module, prebuilt
+}
+
 var _ compiler = (*prebuiltLibraryDecorator)(nil)
 var _ exportedFlagsProducer = (*prebuiltLibraryDecorator)(nil)
+var _ rustPrebuilt = (*prebuiltLibraryDecorator)(nil)
+
+var _ compiler = (*prebuiltProcMacroDecorator)(nil)
+var _ exportedFlagsProducer = (*prebuiltProcMacroDecorator)(nil)
+var _ rustPrebuilt = (*prebuiltProcMacroDecorator)(nil)
 
 func PrebuiltLibraryFactory() android.Module {
 	module, _ := NewPrebuiltLibrary(android.HostAndDeviceSupported)
@@ -56,7 +91,7 @@
 	return module.Init()
 }
 
-func addSrcSupplier(module android.PrebuiltInterface, prebuilt *prebuiltLibraryDecorator) {
+func addSrcSupplier(module android.PrebuiltInterface, prebuilt rustPrebuilt) {
 	srcsSupplier := func(_ android.BaseModuleContext, _ android.Module) []string {
 		return prebuilt.prebuiltSrcs()
 	}
@@ -152,3 +187,44 @@
 func (prebuilt *prebuiltLibraryDecorator) prebuilt() *android.Prebuilt {
 	return &prebuilt.Prebuilt
 }
+
+func (prebuilt *prebuiltProcMacroDecorator) prebuiltSrcs() []string {
+	srcs := prebuilt.Properties.Srcs
+	return srcs
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) prebuilt() *android.Prebuilt {
+	return &prebuilt.Prebuilt
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) compilerProps() []interface{} {
+	return append(prebuilt.procMacroDecorator.compilerProps(),
+		&prebuilt.Properties)
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) compile(ctx ModuleContext, flags Flags, deps PathDeps) android.Path {
+	prebuilt.flagExporter.exportLinkDirs(android.PathsForModuleSrc(ctx, prebuilt.Properties.Link_dirs).Strings()...)
+	prebuilt.flagExporter.setProvider(ctx)
+
+	srcPath, paths := srcPathFromModuleSrcs(ctx, prebuilt.prebuiltSrcs())
+	if len(paths) > 0 {
+		ctx.PropertyErrorf("srcs", "prebuilt libraries can only have one entry in srcs (the prebuilt path)")
+	}
+	prebuilt.baseCompiler.unstrippedOutputFile = srcPath
+	return srcPath
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) rustdoc(ctx ModuleContext, flags Flags,
+	deps PathDeps) android.OptionalPath {
+
+	return android.OptionalPath{}
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) compilerDeps(ctx DepsContext, deps Deps) Deps {
+	deps = prebuilt.baseCompiler.compilerDeps(ctx, deps)
+	return deps
+}
+
+func (prebuilt *prebuiltProcMacroDecorator) nativeCoverage() bool {
+	return false
+}
diff --git a/rust/proc_macro.go b/rust/proc_macro.go
index 974c096..f8a4bbd 100644
--- a/rust/proc_macro.go
+++ b/rust/proc_macro.go
@@ -33,6 +33,7 @@
 }
 
 type procMacroInterface interface {
+	ProcMacro() bool
 }
 
 var _ compiler = (*procMacroDecorator)(nil)
@@ -90,6 +91,10 @@
 	return rlibAutoDep
 }
 
+func (procMacro *procMacroDecorator) ProcMacro() bool {
+	return true
+}
+
 func (procMacro *procMacroDecorator) everInstallable() bool {
 	// Proc_macros are never installed
 	return false
diff --git a/rust/rust.go b/rust/rust.go
index f40f1a8..c4fd148 100644
--- a/rust/rust.go
+++ b/rust/rust.go
@@ -27,6 +27,7 @@
 	cc_config "android/soong/cc/config"
 	"android/soong/fuzz"
 	"android/soong/rust/config"
+	"android/soong/snapshot"
 )
 
 var pctx = android.NewPackageContext("android/soong/rust")
@@ -331,6 +332,20 @@
 	return false
 }
 
+func (mod *Module) IsVndkPrebuiltLibrary() bool {
+	// Rust modules do not provide VNDK prebuilts
+	return false
+}
+
+func (mod *Module) IsVendorPublicLibrary() bool {
+	return mod.VendorProperties.IsVendorPublicLibrary
+}
+
+func (mod *Module) SdkAndPlatformVariantVisibleToMake() bool {
+	// Rust modules to not provide Sdk variants
+	return false
+}
+
 func (c *Module) IsVndkPrivate() bool {
 	return false
 }
@@ -806,6 +821,13 @@
 	return mod.Properties.Installable
 }
 
+func (mod *Module) ProcMacro() bool {
+	if pm, ok := mod.compiler.(procMacroInterface); ok {
+		return pm.ProcMacro()
+	}
+	return false
+}
+
 func (mod *Module) toolchain(ctx android.BaseModuleContext) config.Toolchain {
 	if mod.cachedToolchain == nil {
 		mod.cachedToolchain = config.FindToolchain(ctx.Os(), ctx.Arch())
@@ -833,24 +855,7 @@
 	toolchain := mod.toolchain(ctx)
 	mod.makeLinkType = cc.GetMakeLinkType(actx, mod)
 
-	// Differentiate static libraries that are vendor available
-	if mod.UseVndk() {
-		if mod.InProduct() && !mod.OnlyInProduct() {
-			mod.Properties.SubName += cc.ProductSuffix
-		} else {
-			mod.Properties.SubName += cc.VendorSuffix
-		}
-	} else if mod.InRamdisk() && !mod.OnlyInRamdisk() {
-		mod.Properties.SubName += cc.RamdiskSuffix
-	} else if mod.InVendorRamdisk() && !mod.OnlyInVendorRamdisk() {
-		mod.Properties.SubName += cc.VendorRamdiskSuffix
-	} else if mod.InRecovery() && !mod.OnlyInRecovery() {
-		mod.Properties.SubName += cc.RecoverySuffix
-	}
-
-	if mod.Target().NativeBridge == android.NativeBridgeEnabled {
-		mod.Properties.SubName += cc.NativeBridgeSuffix
-	}
+	mod.Properties.SubName = cc.GetSubnameProperty(actx, mod)
 
 	if !toolchain.Supported() {
 		// This toolchain's unsupported, there's nothing to do for this mod.
@@ -920,12 +925,13 @@
 		}
 
 		apexInfo := actx.Provider(android.ApexInfoProvider).(android.ApexInfo)
-		if !proptools.BoolDefault(mod.Installable(), mod.EverInstallable()) {
+		if !proptools.BoolDefault(mod.Installable(), mod.EverInstallable()) && !mod.ProcMacro() {
 			// If the module has been specifically configure to not be installed then
 			// hide from make as otherwise it will break when running inside make as the
 			// output path to install will not be specified. Not all uninstallable
 			// modules can be hidden from make as some are needed for resolving make
-			// side dependencies.
+			// side dependencies. In particular, proc-macros need to be captured in the
+			// host snapshot.
 			mod.HideFromMake()
 		} else if !mod.installable(apexInfo) {
 			mod.SkipInstall()
@@ -968,6 +974,7 @@
 	deps.ProcMacros = android.LastUniqueStrings(deps.ProcMacros)
 	deps.SharedLibs = android.LastUniqueStrings(deps.SharedLibs)
 	deps.StaticLibs = android.LastUniqueStrings(deps.StaticLibs)
+	deps.Stdlibs = android.LastUniqueStrings(deps.Stdlibs)
 	deps.WholeStaticLibs = android.LastUniqueStrings(deps.WholeStaticLibs)
 	return deps
 
@@ -1045,7 +1052,7 @@
 }
 
 func (mod *Module) Prebuilt() *android.Prebuilt {
-	if p, ok := mod.compiler.(*prebuiltLibraryDecorator); ok {
+	if p, ok := mod.compiler.(rustPrebuilt); ok {
 		return p.prebuilt()
 	}
 	return nil
@@ -1361,13 +1368,12 @@
 	}
 
 	// rlibs
+	rlibDepVariations = append(rlibDepVariations, blueprint.Variation{Mutator: "rust_libraries", Variation: rlibVariation})
 	for _, lib := range deps.Rlibs {
 		depTag := rlibDepTag
 		lib = cc.RewriteSnapshotLib(lib, cc.GetSnapshot(mod, &snapshotInfo, actx).Rlibs)
 
-		actx.AddVariationDependencies(append(rlibDepVariations, []blueprint.Variation{
-			{Mutator: "rust_libraries", Variation: rlibVariation},
-		}...), depTag, lib)
+		actx.AddVariationDependencies(rlibDepVariations, depTag, lib)
 	}
 
 	// dylibs
@@ -1379,21 +1385,25 @@
 	// rustlibs
 	if deps.Rustlibs != nil && !mod.compiler.Disabled() {
 		autoDep := mod.compiler.(autoDeppable).autoDep(ctx)
-		if autoDep.depTag == rlibDepTag {
-			for _, lib := range deps.Rustlibs {
-				depTag := autoDep.depTag
-				lib = cc.RewriteSnapshotLib(lib, cc.GetSnapshot(mod, &snapshotInfo, actx).Rlibs)
-				actx.AddVariationDependencies(append(rlibDepVariations, []blueprint.Variation{
-					{Mutator: "rust_libraries", Variation: autoDep.variation},
-				}...), depTag, lib)
+		for _, lib := range deps.Rustlibs {
+			if autoDep.depTag == rlibDepTag {
+				// Handle the rlib deptag case
+				addRlibDependency(actx, lib, mod, snapshotInfo, rlibDepVariations)
+			} else {
+				// autoDep.depTag is a dylib depTag. Not all rustlibs may be available as a dylib however.
+				// Check for the existence of the dylib deptag variant. Select it if available,
+				// otherwise select the rlib variant.
+				autoDepVariations := append(commonDepVariations,
+					blueprint.Variation{Mutator: "rust_libraries", Variation: autoDep.variation})
+				if actx.OtherModuleDependencyVariantExists(autoDepVariations, lib) {
+					actx.AddVariationDependencies(autoDepVariations, autoDep.depTag, lib)
+				} else {
+					// If there's no dylib dependency available, try to add the rlib dependency instead.
+					addRlibDependency(actx, lib, mod, snapshotInfo, rlibDepVariations)
+				}
 			}
-		} else {
-			actx.AddVariationDependencies(
-				append(commonDepVariations, blueprint.Variation{Mutator: "rust_libraries", Variation: autoDep.variation}),
-				autoDep.depTag, deps.Rustlibs...)
 		}
 	}
-
 	// stdlibs
 	if deps.Stdlibs != nil {
 		if mod.compiler.stdLinkage(ctx) == RlibLinkage {
@@ -1469,6 +1479,12 @@
 	actx.AddFarVariationDependencies(ctx.Config().BuildOSTarget.Variations(), procMacroDepTag, deps.ProcMacros...)
 }
 
+// addRlibDependency will add an rlib dependency, rewriting to the snapshot library if available.
+func addRlibDependency(actx android.BottomUpMutatorContext, lib string, mod *Module, snapshotInfo *cc.SnapshotInfo, variations []blueprint.Variation) {
+	lib = cc.RewriteSnapshotLib(lib, cc.GetSnapshot(mod, &snapshotInfo, actx).Rlibs)
+	actx.AddVariationDependencies(variations, rlibDepTag, lib)
+}
+
 func BeginMutator(ctx android.BottomUpMutatorContext) {
 	if mod, ok := ctx.Module().(*Module); ok && mod.Enabled() {
 		mod.beginMutator(ctx)
@@ -1500,6 +1516,7 @@
 }
 
 var _ android.HostToolProvider = (*Module)(nil)
+var _ snapshot.RelativeInstallPath = (*Module)(nil)
 
 func (mod *Module) HostToolPath() android.OptionalPath {
 	if !mod.Host() {
@@ -1507,6 +1524,10 @@
 	}
 	if binary, ok := mod.compiler.(*binaryDecorator); ok {
 		return android.OptionalPathForPath(binary.baseCompiler.path)
+	} else if pm, ok := mod.compiler.(*procMacroDecorator); ok {
+		// Even though proc-macros aren't strictly "tools", since they target the compiler
+		// and act as compiler plugins, we treat them similarly.
+		return android.OptionalPathForPath(pm.baseCompiler.path)
 	}
 	return android.OptionalPath{}
 }
diff --git a/rust/sanitize.go b/rust/sanitize.go
index be9dc42..39aaf33 100644
--- a/rust/sanitize.go
+++ b/rust/sanitize.go
@@ -57,7 +57,7 @@
 }
 
 var fuzzerFlags = []string{
-	"-C passes='sancov'",
+	"-C passes='sancov-module'",
 
 	"--cfg fuzzing",
 	"-C llvm-args=-sanitizer-coverage-level=3",
@@ -70,7 +70,7 @@
 	"-C link-dead-code",
 
 	// Sancov breaks with lto
-	// TODO: Remove when https://bugs.llvm.org/show_bug.cgi?id=41734 is resolved and sancov works with LTO
+	// TODO: Remove when https://bugs.llvm.org/show_bug.cgi?id=41734 is resolved and sancov-module works with LTO
 	"-C lto=no",
 }
 
diff --git a/rust/testing.go b/rust/testing.go
index 1b34dfe..cb98bed 100644
--- a/rust/testing.go
+++ b/rust/testing.go
@@ -88,13 +88,13 @@
 			export_include_dirs: ["libprotobuf-cpp-full-includes"],
 		}
 		cc_library {
-			name: "libclang_rt.asan-aarch64-android",
+			name: "libclang_rt.asan",
 			no_libcrt: true,
 			nocrt: true,
 			system_shared_libs: [],
 		}
 		cc_library {
-			name: "libclang_rt.hwasan_static-aarch64-android",
+			name: "libclang_rt.hwasan_static",
 			no_libcrt: true,
 			nocrt: true,
 			system_shared_libs: [],
diff --git a/rust/vendor_snapshot_test.go b/rust/vendor_snapshot_test.go
index 03bd867..7be0042 100644
--- a/rust/vendor_snapshot_test.go
+++ b/rust/vendor_snapshot_test.go
@@ -561,7 +561,7 @@
 				static_libs: [
 					"libvendor",
 					"libvndk",
-					"libclang_rt.builtins-aarch64-android",
+					"libclang_rt.builtins",
 					"note_memtag_heap_sync",
 				],
 				shared_libs: [
@@ -589,7 +589,7 @@
 				static_libs: [
 					"libvendor",
 					"libvndk",
-					"libclang_rt.builtins-arm-android",
+					"libclang_rt.builtins",
 				],
 				shared_libs: [
 					"libvendor_available",
@@ -731,19 +731,7 @@
 	}
 
 	vendor_snapshot_static {
-		name: "libclang_rt.builtins-aarch64-android",
-		version: "30",
-		target_arch: "arm64",
-		vendor: true,
-		arch: {
-			arm64: {
-				src: "libclang_rt.builtins-aarch64-android.a",
-			},
-		},
-    }
-
-    vendor_snapshot_static {
-		name: "libclang_rt.builtins-arm-android",
+		name: "libclang_rt.builtins",
 		version: "30",
 		target_arch: "arm64",
 		vendor: true,
@@ -751,6 +739,9 @@
 			arm: {
 				src: "libclang_rt.builtins-arm-android.a",
 			},
+			arm64: {
+				src: "libclang_rt.builtins-aarch64-android.a",
+			},
 		},
     }
 
@@ -967,7 +958,7 @@
 	}
 
 	libclientAndroidMkStaticLibs := ctx.ModuleForTests("libclient", sharedVariant).Module().(*Module).Properties.AndroidMkStaticLibs
-	if g, w := libclientAndroidMkStaticLibs, []string{"libvendor", "libvendor_without_snapshot", "libclang_rt.builtins-aarch64-android.vendor"}; !reflect.DeepEqual(g, w) {
+	if g, w := libclientAndroidMkStaticLibs, []string{"libvendor", "libvendor_without_snapshot", "libclang_rt.builtins.vendor"}; !reflect.DeepEqual(g, w) {
 		t.Errorf("wanted libclient AndroidMkStaticLibs %q, got %q", w, g)
 	}
 
@@ -1024,7 +1015,7 @@
 	}
 
 	memtagStaticLibs := ctx.ModuleForTests("memtag_binary", "android_vendor.30_arm64_armv8-a").Module().(*Module).Properties.AndroidMkStaticLibs
-	if g, w := memtagStaticLibs, []string{"libclang_rt.builtins-aarch64-android.vendor", "note_memtag_heap_sync.vendor"}; !reflect.DeepEqual(g, w) {
+	if g, w := memtagStaticLibs, []string{"libclang_rt.builtins.vendor", "note_memtag_heap_sync.vendor"}; !reflect.DeepEqual(g, w) {
 		t.Errorf("wanted memtag_binary AndroidMkStaticLibs %q, got %q", w, g)
 	}
 }
diff --git a/scripts/Android.bp b/scripts/Android.bp
index 4c847a1..4773579 100644
--- a/scripts/Android.bp
+++ b/scripts/Android.bp
@@ -188,3 +188,8 @@
         "get_clang_version.py",
     ],
 }
+
+sh_binary_host {
+    name: "list_image",
+    src: "list_image.sh",
+}
diff --git a/scripts/build-ndk-prebuilts.sh b/scripts/build-ndk-prebuilts.sh
index 4783037..b57963b 100755
--- a/scripts/build-ndk-prebuilts.sh
+++ b/scripts/build-ndk-prebuilts.sh
@@ -19,54 +19,9 @@
     exit 1
 fi
 
-TOP=$(pwd)
-
-source build/envsetup.sh
-PLATFORM_SDK_VERSION=$(get_build_var PLATFORM_SDK_VERSION)
-PLATFORM_VERSION_ALL_CODENAMES=$(get_build_var PLATFORM_VERSION_ALL_CODENAMES)
-
-# PLATFORM_VERSION_ALL_CODENAMES is a comma separated list like O,P. We need to
-# turn this into ["O","P"].
-PLATFORM_VERSION_ALL_CODENAMES=${PLATFORM_VERSION_ALL_CODENAMES/,/'","'}
-PLATFORM_VERSION_ALL_CODENAMES="[\"${PLATFORM_VERSION_ALL_CODENAMES}\"]"
-
-# Get the list of missing <uses-library> modules and convert it to a JSON array
-# (quote module names, add comma separator and wrap in brackets).
-MISSING_USES_LIBRARIES="$(get_build_var INTERNAL_PLATFORM_MISSING_USES_LIBRARIES)"
-MISSING_USES_LIBRARIES="[$(echo $MISSING_USES_LIBRARIES | sed -e 's/\([^ ]\+\)/\"\1\"/g' -e 's/[ ]\+/, /g')]"
-
-SOONG_OUT=${OUT_DIR}/soong
-SOONG_NDK_OUT=${OUT_DIR}/soong/ndk
-rm -rf ${SOONG_OUT}
-mkdir -p ${SOONG_OUT}
-
-# We only really need to set some of these variables, but soong won't merge this
-# with the defaults, so we need to write out all the defaults with our values
-# added.
-cat > ${SOONG_OUT}/soong.variables << EOF
-{
-    "Platform_sdk_version": ${PLATFORM_SDK_VERSION},
-    "Platform_version_active_codenames": ${PLATFORM_VERSION_ALL_CODENAMES},
-
-    "DeviceName": "generic_arm64",
-    "HostArch": "x86_64",
-    "Malloc_not_svelte": false,
-    "Safestack": false,
-
-    "Ndk_abis": true,
-
-    "VendorVars": {
-        "art_module": {
-            "source_build": "true"
-        }
-    },
-
-    "MissingUsesLibraries": ${MISSING_USES_LIBRARIES}
-}
-EOF
-m --soong-only --skip-config ${SOONG_OUT}/ndk.timestamp
+TARGET_PRODUCT=ndk build/soong/soong_ui.bash --make-mode --soong-only ${OUT_DIR}/soong/ndk.timestamp
 
 if [ -n "${DIST_DIR}" ]; then
     mkdir -p ${DIST_DIR} || true
-    tar cjf ${DIST_DIR}/ndk_platform.tar.bz2 -C ${SOONG_OUT} ndk
+    tar cjf ${DIST_DIR}/ndk_platform.tar.bz2 -C ${OUT_DIR}/soong ndk
 fi
diff --git a/scripts/generate-notice-files.py b/scripts/generate-notice-files.py
deleted file mode 100755
index 1b4acfa..0000000
--- a/scripts/generate-notice-files.py
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Usage: generate-notice-files --text-output [plain text output file] \
-               --html-output [html output file] \
-               --xml-output [xml output file] \
-               -t [file title] -s [directory of notices]
-
-Generate the Android notice files, including both text and html files.
-
--h to display this usage message and exit.
-"""
-from collections import defaultdict
-import argparse
-import hashlib
-import itertools
-import os
-import os.path
-import re
-import struct
-import sys
-
-MD5_BLOCKSIZE = 1024 * 1024
-HTML_ESCAPE_TABLE = {
-    b"&": b"&amp;",
-    b'"': b"&quot;",
-    b"'": b"&apos;",
-    b">": b"&gt;",
-    b"<": b"&lt;",
-    }
-
-def md5sum(filename):
-    """Calculate an MD5 of the file given by FILENAME,
-    and return hex digest as a string.
-    Output should be compatible with md5sum command"""
-
-    f = open(filename, "rb")
-    sum = hashlib.md5()
-    while 1:
-        block = f.read(MD5_BLOCKSIZE)
-        if not block:
-            break
-        sum.update(block)
-    f.close()
-    return sum.hexdigest()
-
-
-def html_escape(text):
-    """Produce entities within text."""
-    # Using for i in text doesn't work since i will be an int, not a byte.
-    # There are multiple ways to solve this, but the most performant way
-    # to iterate over a byte array is to use unpack. Using the
-    # for i in range(len(text)) and using that to get a byte using array
-    # slices is twice as slow as this method.
-    return b"".join(HTML_ESCAPE_TABLE.get(i,i) for i in struct.unpack(str(len(text)) + 'c', text))
-
-HTML_OUTPUT_CSS=b"""
-<style type="text/css">
-body { padding: 0; font-family: sans-serif; }
-.same-license { background-color: #eeeeee; border-top: 20px solid white; padding: 10px; }
-.label { font-weight: bold; }
-.file-list { margin-left: 1em; color: blue; }
-</style>
-
-"""
-
-def combine_notice_files_html(file_hash, input_dir, output_filename):
-    """Combine notice files in FILE_HASH and output a HTML version to OUTPUT_FILENAME."""
-
-    SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
-
-    # Set up a filename to row id table (anchors inside tables don't work in
-    # most browsers, but href's to table row ids do)
-    id_table = {}
-    id_count = 0
-    for value in file_hash:
-        for filename in value:
-             id_table[filename] = id_count
-        id_count += 1
-
-    # Open the output file, and output the header pieces
-    output_file = open(output_filename, "wb")
-
-    output_file.write(b"<html><head>\n")
-    output_file.write(HTML_OUTPUT_CSS)
-    output_file.write(b'</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">\n')
-
-    # Output our table of contents
-    output_file.write(b'<div class="toc">\n')
-    output_file.write(b"<ul>\n")
-
-    # Flatten the list of lists into a single list of filenames
-    sorted_filenames = sorted(itertools.chain.from_iterable(file_hash))
-
-    # Print out a nice table of contents
-    for filename in sorted_filenames:
-        stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
-        output_file.write(('<li><a href="#id%d">%s</a></li>\n' % (id_table.get(filename), stripped_filename)).encode())
-
-    output_file.write(b"</ul>\n")
-    output_file.write(b"</div><!-- table of contents -->\n")
-    # Output the individual notice file lists
-    output_file.write(b'<table cellpadding="0" cellspacing="0" border="0">\n')
-    for value in file_hash:
-        output_file.write(('<tr id="id%d"><td class="same-license">\n' % id_table.get(value[0])).encode())
-        output_file.write(b'<div class="label">Notices for file(s):</div>\n')
-        output_file.write(b'<div class="file-list">\n')
-        for filename in value:
-            output_file.write(("%s <br/>\n" % (SRC_DIR_STRIP_RE.sub(r"\1", filename))).encode())
-        output_file.write(b"</div><!-- file-list -->\n\n")
-        output_file.write(b'<pre class="license-text">\n')
-        with open(value[0], "rb") as notice_file:
-            output_file.write(html_escape(notice_file.read()))
-        output_file.write(b"\n</pre><!-- license-text -->\n")
-        output_file.write(b"</td></tr><!-- same-license -->\n\n\n\n")
-
-    # Finish off the file output
-    output_file.write(b"</table>\n")
-    output_file.write(b"</body></html>\n")
-    output_file.close()
-
-def combine_notice_files_text(file_hash, input_dir, output_filename, file_title):
-    """Combine notice files in FILE_HASH and output a text version to OUTPUT_FILENAME."""
-
-    SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
-    output_file = open(output_filename, "wb")
-    output_file.write(file_title.encode())
-    output_file.write(b"\n")
-    for value in file_hash:
-        output_file.write(b"============================================================\n")
-        output_file.write(b"Notices for file(s):\n")
-        for filename in value:
-            output_file.write(SRC_DIR_STRIP_RE.sub(r"\1", filename).encode())
-            output_file.write(b"\n")
-        output_file.write(b"------------------------------------------------------------\n")
-        with open(value[0], "rb") as notice_file:
-            output_file.write(notice_file.read())
-            output_file.write(b"\n")
-    output_file.close()
-
-def combine_notice_files_xml(files_with_same_hash, input_dir, output_filename):
-    """Combine notice files in FILE_HASH and output a XML version to OUTPUT_FILENAME."""
-
-    SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
-
-    # Set up a filename to row id table (anchors inside tables don't work in
-    # most browsers, but href's to table row ids do)
-    id_table = {}
-    for file_key, files in files_with_same_hash.items():
-        for filename in files:
-             id_table[filename] = file_key
-
-    # Open the output file, and output the header pieces
-    output_file = open(output_filename, "wb")
-
-    output_file.write(b'<?xml version="1.0" encoding="utf-8"?>\n')
-    output_file.write(b"<licenses>\n")
-
-    # Flatten the list of lists into a single list of filenames
-    sorted_filenames = sorted(list(id_table))
-
-    # Print out a nice table of contents
-    for filename in sorted_filenames:
-        stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
-        output_file.write(('<file-name contentId="%s">%s</file-name>\n' % (id_table.get(filename), stripped_filename)).encode())
-    output_file.write(b"\n\n")
-
-    processed_file_keys = []
-    # Output the individual notice file lists
-    for filename in sorted_filenames:
-        file_key = id_table.get(filename)
-        if file_key in processed_file_keys:
-            continue
-        processed_file_keys.append(file_key)
-
-        output_file.write(('<file-content contentId="%s"><![CDATA[' % file_key).encode())
-        with open(filename, "rb") as notice_file:
-            output_file.write(html_escape(notice_file.read()))
-        output_file.write(b"]]></file-content>\n\n")
-
-    # Finish off the file output
-    output_file.write(b"</licenses>\n")
-    output_file.close()
-
-def get_args():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        '--text-output', required=True,
-        help='The text output file path.')
-    parser.add_argument(
-        '--html-output',
-        help='The html output file path.')
-    parser.add_argument(
-        '--xml-output',
-        help='The xml output file path.')
-    parser.add_argument(
-        '-t', '--title', required=True,
-        help='The file title.')
-    parser.add_argument(
-        '-s', '--source-dir', required=True,
-        help='The directory containing notices.')
-    parser.add_argument(
-        '-i', '--included-subdirs', action='append',
-        help='The sub directories which should be included.')
-    parser.add_argument(
-        '-e', '--excluded-subdirs', action='append',
-        help='The sub directories which should be excluded.')
-    return parser.parse_args()
-
-def main(argv):
-    args = get_args()
-
-    txt_output_file = args.text_output
-    html_output_file = args.html_output
-    xml_output_file = args.xml_output
-    file_title = args.title
-    included_subdirs = []
-    excluded_subdirs = []
-    if args.included_subdirs is not None:
-        included_subdirs = args.included_subdirs
-    if args.excluded_subdirs is not None:
-        excluded_subdirs = args.excluded_subdirs
-
-    # Find all the notice files and md5 them
-    input_dir = os.path.normpath(args.source_dir)
-    files_with_same_hash = defaultdict(list)
-    for root, dir, files in os.walk(input_dir):
-        for file in files:
-            matched = True
-            if len(included_subdirs) > 0:
-                matched = False
-                for subdir in included_subdirs:
-                    if (root == (input_dir + '/' + subdir) or
-                        root.startswith(input_dir + '/' + subdir + '/')):
-                        matched = True
-                        break
-            elif len(excluded_subdirs) > 0:
-                for subdir in excluded_subdirs:
-                    if (root == (input_dir + '/' + subdir) or
-                        root.startswith(input_dir + '/' + subdir + '/')):
-                        matched = False
-                        break
-            if matched and file.endswith(".txt"):
-                filename = os.path.join(root, file)
-                file_md5sum = md5sum(filename)
-                files_with_same_hash[file_md5sum].append(filename)
-
-    filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(list(files_with_same_hash))]
-
-    combine_notice_files_text(filesets, input_dir, txt_output_file, file_title)
-
-    if html_output_file is not None:
-        combine_notice_files_html(filesets, input_dir, html_output_file)
-
-    if xml_output_file is not None:
-        combine_notice_files_xml(files_with_same_hash, input_dir, xml_output_file)
-
-if __name__ == "__main__":
-    main(sys.argv)
diff --git a/scripts/hiddenapi/Android.bp b/scripts/hiddenapi/Android.bp
index 7ffda62..07878f9 100644
--- a/scripts/hiddenapi/Android.bp
+++ b/scripts/hiddenapi/Android.bp
@@ -19,6 +19,52 @@
 }
 
 python_binary_host {
+    name: "analyze_bcpf",
+    main: "analyze_bcpf.py",
+    srcs: ["analyze_bcpf.py"],
+    // Make sure that the bpmodify tool is built.
+    data: [":bpmodify"],
+    libs: [
+        "signature_trie",
+    ],
+    version: {
+        py2: {
+            enabled: false,
+        },
+        py3: {
+            enabled: true,
+            embedded_launcher: true,
+        },
+    },
+}
+
+python_test_host {
+    name: "analyze_bcpf_test",
+    main: "analyze_bcpf_test.py",
+    srcs: [
+        "analyze_bcpf.py",
+        "analyze_bcpf_test.py",
+    ],
+    // Make sure that the bpmodify tool is built.
+    data: [":bpmodify"],
+    libs: [
+        "signature_trie",
+    ],
+    version: {
+        py2: {
+            enabled: false,
+        },
+        py3: {
+            enabled: true,
+            embedded_launcher: true,
+        },
+    },
+    test_options: {
+        unit_test: true,
+    },
+}
+
+python_binary_host {
     name: "merge_csv",
     main: "merge_csv.py",
     srcs: ["merge_csv.py"],
@@ -69,10 +115,37 @@
     },
 }
 
+python_library_host {
+    name: "signature_trie",
+    srcs: ["signature_trie.py"],
+}
+
+python_test_host {
+    name: "signature_trie_test",
+    main: "signature_trie_test.py",
+    srcs: ["signature_trie_test.py"],
+    libs: ["signature_trie"],
+    version: {
+        py2: {
+            enabled: false,
+        },
+        py3: {
+            enabled: true,
+            embedded_launcher: true,
+        },
+    },
+    test_options: {
+        unit_test: true,
+    },
+}
+
 python_binary_host {
     name: "verify_overlaps",
     main: "verify_overlaps.py",
     srcs: ["verify_overlaps.py"],
+    libs: [
+        "signature_trie",
+    ],
     version: {
         py2: {
             enabled: false,
@@ -91,6 +164,9 @@
         "verify_overlaps.py",
         "verify_overlaps_test.py",
     ],
+    libs: [
+        "signature_trie",
+    ],
     version: {
         py2: {
             enabled: false,
diff --git a/scripts/hiddenapi/analyze_bcpf.py b/scripts/hiddenapi/analyze_bcpf.py
new file mode 100644
index 0000000..595343b
--- /dev/null
+++ b/scripts/hiddenapi/analyze_bcpf.py
@@ -0,0 +1,1477 @@
+#!/usr/bin/env -S python -u
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Analyze bootclasspath_fragment usage."""
+import argparse
+import dataclasses
+import enum
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+import textwrap
+import typing
+from enum import Enum
+
+import sys
+
+from signature_trie import signature_trie
+
+_STUB_FLAGS_FILE = "out/soong/hiddenapi/hiddenapi-stub-flags.txt"
+
+_FLAGS_FILE = "out/soong/hiddenapi/hiddenapi-flags.csv"
+
+_INCONSISTENT_FLAGS = "ERROR: Hidden API flags are inconsistent:"
+
+
+class BuildOperation:
+
+    def __init__(self, popen):
+        self.popen = popen
+        self.returncode = None
+
+    def lines(self):
+        """Return an iterator over the lines output by the build operation.
+
+        The lines have had any trailing white space, including the newline
+        stripped.
+        """
+        return newline_stripping_iter(self.popen.stdout.readline)
+
+    def wait(self, *args, **kwargs):
+        self.popen.wait(*args, **kwargs)
+        self.returncode = self.popen.returncode
+
+
+@dataclasses.dataclass()
+class FlagDiffs:
+    """Encapsulates differences in flags reported by the build"""
+
+    # Map from member signature to the (module flags, monolithic flags)
+    diffs: typing.Dict[str, typing.Tuple[str, str]]
+
+
+@dataclasses.dataclass()
+class ModuleInfo:
+    """Provides access to the generated module-info.json file.
+
+    This is used to find the location of the file within which specific modules
+    are defined.
+    """
+
+    modules: typing.Dict[str, typing.Dict[str, typing.Any]]
+
+    @staticmethod
+    def load(filename):
+        with open(filename, "r", encoding="utf8") as f:
+            j = json.load(f)
+            return ModuleInfo(j)
+
+    def _module(self, module_name):
+        """Find module by name in module-info.json file"""
+        if module_name in self.modules:
+            return self.modules[module_name]
+
+        raise Exception(f"Module {module_name} could not be found")
+
+    def module_path(self, module_name):
+        module = self._module(module_name)
+        # The "path" is actually a list of paths, one for each class of module
+        # but as the modules are all created from bp files if a module does
+        # create multiple classes of make modules they should all have the same
+        # path.
+        paths = module["path"]
+        unique_paths = set(paths)
+        if len(unique_paths) != 1:
+            raise Exception(f"Expected module '{module_name}' to have a "
+                            f"single unique path but found {unique_paths}")
+        return paths[0]
+
+
+def extract_indent(line):
+    return re.match(r"([ \t]*)", line).group(1)
+
+
+_SPECIAL_PLACEHOLDER: str = "SPECIAL_PLACEHOLDER"
+
+
+@dataclasses.dataclass
+class BpModifyRunner:
+
+    bpmodify_path: str
+
+    def add_values_to_property(self, property_name, values, module_name,
+                               bp_file):
+        cmd = [
+            self.bpmodify_path, "-a", values, "-property", property_name, "-m",
+            module_name, "-w", bp_file, bp_file
+        ]
+
+        logging.debug(" ".join(cmd))
+        subprocess.run(
+            cmd,
+            stderr=subprocess.STDOUT,
+            stdout=log_stream_for_subprocess(),
+            check=True)
+
+
+@dataclasses.dataclass
+class FileChange:
+    path: str
+
+    description: str
+
+    def __lt__(self, other):
+        return self.path < other.path
+
+
+class PropertyChangeAction(Enum):
+    """Allowable actions that are supported by HiddenApiPropertyChange."""
+
+    # New values are appended to any existing values.
+    APPEND = 1
+
+    # New values replace any existing values.
+    REPLACE = 2
+
+
+@dataclasses.dataclass
+class HiddenApiPropertyChange:
+
+    property_name: str
+
+    values: typing.List[str]
+
+    property_comment: str = ""
+
+    # The action that indicates how this change is applied.
+    action: PropertyChangeAction = PropertyChangeAction.APPEND
+
+    def snippet(self, indent):
+        snippet = "\n"
+        snippet += format_comment_as_text(self.property_comment, indent)
+        snippet += f"{indent}{self.property_name}: ["
+        if self.values:
+            snippet += "\n"
+            for value in self.values:
+                snippet += f'{indent}    "{value}",\n'
+            snippet += f"{indent}"
+        snippet += "],\n"
+        return snippet
+
+    def fix_bp_file(self, bcpf_bp_file, bcpf, bpmodify_runner: BpModifyRunner):
+        # Add an additional placeholder value to identify the modification that
+        # bpmodify makes.
+        bpmodify_values = [_SPECIAL_PLACEHOLDER]
+
+        if self.action == PropertyChangeAction.APPEND:
+            # If adding the values to the existing values then pass the new
+            # values to bpmodify.
+            bpmodify_values.extend(self.values)
+        elif self.action == PropertyChangeAction.REPLACE:
+            # If replacing the existing values then it is not possible to use
+            # bpmodify for that directly. It could be used twice to remove the
+            # existing property and then add a new one but that does not remove
+            # any related comments and loses the position of the existing
+            # property as the new property is always added to the end of the
+            # containing block.
+            #
+            # So, instead of passing the new values to bpmodify this this just
+            # adds an extra placeholder to force bpmodify to format the list
+            # across multiple lines to ensure a consistent structure for the
+            # code that removes all the existing values and adds the new ones.
+            #
+            # This placeholder has to be different to the other placeholder as
+            # bpmodify dedups values.
+            bpmodify_values.append(_SPECIAL_PLACEHOLDER + "_REPLACE")
+        else:
+            raise ValueError(f"unknown action {self.action}")
+
+        packages = ",".join(bpmodify_values)
+        bpmodify_runner.add_values_to_property(
+            f"hidden_api.{self.property_name}", packages, bcpf, bcpf_bp_file)
+
+        with open(bcpf_bp_file, "r", encoding="utf8") as tio:
+            lines = tio.readlines()
+            lines = [line.rstrip("\n") for line in lines]
+
+        if self.fixup_bpmodify_changes(bcpf_bp_file, lines):
+            with open(bcpf_bp_file, "w", encoding="utf8") as tio:
+                for line in lines:
+                    print(line, file=tio)
+
+    def fixup_bpmodify_changes(self, bcpf_bp_file, lines):
+        """Fixup the output of bpmodify.
+
+        The bpmodify tool does not support all the capabilities that this needs
+        so it is used to do what it can, including marking the place in the
+        Android.bp file where it makes its changes and then this gets passed a
+        list of lines from that file which it then modifies to complete the
+        change.
+
+        This analyzes the list of lines to find the indices of the significant
+        lines and then applies some changes. As those changes can insert and
+        delete lines (changing the indices of following lines) the changes are
+        generally done in reverse order starting from the end and working
+        towards the beginning. That ensures that the changes do not invalidate
+        the indices of following lines.
+        """
+
+        # Find the line containing the placeholder that has been inserted.
+        place_holder_index = -1
+        for i, line in enumerate(lines):
+            if _SPECIAL_PLACEHOLDER in line:
+                place_holder_index = i
+                break
+        if place_holder_index == -1:
+            logging.debug("Could not find %s in %s", _SPECIAL_PLACEHOLDER,
+                          bcpf_bp_file)
+            return False
+
+        # Remove the place holder. Do this before inserting the comment as that
+        # would change the location of the place holder in the list.
+        place_holder_line = lines[place_holder_index]
+        if place_holder_line.endswith("],"):
+            place_holder_line = place_holder_line.replace(
+                f'"{_SPECIAL_PLACEHOLDER}"', "")
+            lines[place_holder_index] = place_holder_line
+        else:
+            del lines[place_holder_index]
+
+        # Scan forward to the end of the property block to remove a blank line
+        # that bpmodify inserts.
+        end_property_array_index = -1
+        for i in range(place_holder_index, len(lines)):
+            line = lines[i]
+            if line.endswith("],"):
+                end_property_array_index = i
+                break
+        if end_property_array_index == -1:
+            logging.debug("Could not find end of property array in %s",
+                          bcpf_bp_file)
+            return False
+
+        # If bdmodify inserted a blank line afterwards then remove it.
+        if (not lines[end_property_array_index + 1] and
+                lines[end_property_array_index + 2].endswith("},")):
+            del lines[end_property_array_index + 1]
+
+        # Scan back to find the preceding property line.
+        property_line_index = -1
+        for i in range(place_holder_index, 0, -1):
+            line = lines[i]
+            if line.lstrip().startswith(f"{self.property_name}: ["):
+                property_line_index = i
+                break
+        if property_line_index == -1:
+            logging.debug("Could not find property line in %s", bcpf_bp_file)
+            return False
+
+        # If this change is replacing the existing values then they need to be
+        # removed and replaced with the new values. That will change the lines
+        # after the property but it is necessary to do here as the following
+        # code operates on earlier lines.
+        if self.action == PropertyChangeAction.REPLACE:
+            # This removes the existing values and replaces them with the new
+            # values.
+            indent = extract_indent(lines[property_line_index + 1])
+            insert = [f'{indent}"{x}",' for x in self.values]
+            lines[property_line_index + 1:end_property_array_index] = insert
+            if not self.values:
+                # If the property has no values then merge the ], onto the
+                # same line as the property name.
+                del lines[property_line_index + 1]
+                lines[property_line_index] = lines[property_line_index] + "],"
+
+        # Only insert a comment if the property does not already have a comment.
+        line_preceding_property = lines[(property_line_index - 1)]
+        if (self.property_comment and
+                not re.match("([ \t]+)// ", line_preceding_property)):
+            # Extract the indent from the property line and use it to format the
+            # comment.
+            indent = extract_indent(lines[property_line_index])
+            comment_lines = format_comment_as_lines(self.property_comment,
+                                                    indent)
+
+            # If the line before the comment is not blank then insert an extra
+            # blank line at the beginning of the comment.
+            if line_preceding_property:
+                comment_lines.insert(0, "")
+
+            # Insert the comment before the property.
+            lines[property_line_index:property_line_index] = comment_lines
+        return True
+
+
+@dataclasses.dataclass()
+class PackagePropertyReason:
+    """Provides the reasons why a package was added to a specific property.
+
+    A split package is one that contains classes from the bootclasspath_fragment
+    and other bootclasspath modules. So, for a split package this contains the
+    corresponding lists of classes.
+
+    A single package is one that contains classes sub-packages from the
+    For a split package this contains a list of classes in that package that are
+    provided by the bootclasspath_fragment and a list of classes
+    """
+
+    # The list of classes/sub-packages that is provided by the
+    # bootclasspath_fragment.
+    bcpf: typing.List[str]
+
+    # The list of classes/sub-packages that is provided by other modules on the
+    # bootclasspath.
+    other: typing.List[str]
+
+
+@dataclasses.dataclass()
+class Result:
+    """Encapsulates the result of the analysis."""
+
+    # The diffs in the flags.
+    diffs: typing.Optional[FlagDiffs] = None
+
+    # A map from package name to the reason why it belongs in the
+    # split_packages property.
+    split_packages: typing.Dict[str, PackagePropertyReason] = dataclasses.field(
+        default_factory=dict)
+
+    # A map from package name to the reason why it belongs in the
+    # single_packages property.
+    single_packages: typing.Dict[str,
+                                 PackagePropertyReason] = dataclasses.field(
+                                     default_factory=dict)
+
+    # The list of packages to add to the package_prefixes property.
+    package_prefixes: typing.List[str] = dataclasses.field(default_factory=list)
+
+    # The bootclasspath_fragment hidden API properties changes.
+    property_changes: typing.List[HiddenApiPropertyChange] = dataclasses.field(
+        default_factory=list)
+
+    # The list of file changes.
+    file_changes: typing.List[FileChange] = dataclasses.field(
+        default_factory=list)
+
+
+class ClassProvider(enum.Enum):
+    """The source of a class found during the hidden API processing"""
+    BCPF = "bcpf"
+    OTHER = "other"
+
+
+# A fake member to use when using the signature trie to compute the package
+# properties from hidden API flags. This is needed because while that
+# computation only cares about classes the trie expects a class to be an
+# interior node but without a member it makes the class a leaf node. That causes
+# problems when analyzing inner classes as the outer class is a leaf node for
+# its own entry but is used as an interior node for inner classes.
+_FAKE_MEMBER = ";->fake()V"
+
+
+@dataclasses.dataclass()
+class BcpfAnalyzer:
+    # Path to this tool.
+    tool_path: str
+
+    # Directory pointed to by ANDROID_BUILD_OUT
+    top_dir: str
+
+    # Directory pointed to by OUT_DIR of {top_dir}/out if that is not set.
+    out_dir: str
+
+    # Directory pointed to by ANDROID_PRODUCT_OUT.
+    product_out_dir: str
+
+    # The name of the bootclasspath_fragment module.
+    bcpf: str
+
+    # The name of the apex module containing {bcpf}, only used for
+    # informational purposes.
+    apex: str
+
+    # The name of the sdk module containing {bcpf}, only used for
+    # informational purposes.
+    sdk: str
+
+    # If true then this will attempt to automatically fix any issues that are
+    # found.
+    fix: bool = False
+
+    # All the signatures, loaded from all-flags.csv, initialized by
+    # load_all_flags().
+    _signatures: typing.Set[str] = dataclasses.field(default_factory=set)
+
+    # All the classes, loaded from all-flags.csv, initialized by
+    # load_all_flags().
+    _classes: typing.Set[str] = dataclasses.field(default_factory=set)
+
+    # Information loaded from module-info.json, initialized by
+    # load_module_info().
+    module_info: ModuleInfo = None
+
+    @staticmethod
+    def reformat_report_test(text):
+        return re.sub(r"(.)\n([^\s])", r"\1 \2", text)
+
+    def report(self, text="", **kwargs):
+        # Concatenate lines that are not separated by a blank line together to
+        # eliminate formatting applied to the supplied text to adhere to python
+        # line length limitations.
+        text = self.reformat_report_test(text)
+        logging.info("%s", text, **kwargs)
+
+    def report_dedent(self, text, **kwargs):
+        text = textwrap.dedent(text)
+        self.report(text, **kwargs)
+
+    def run_command(self, cmd, *args, **kwargs):
+        cmd_line = " ".join(cmd)
+        logging.debug("Running %s", cmd_line)
+        subprocess.run(
+            cmd,
+            *args,
+            check=True,
+            cwd=self.top_dir,
+            stderr=subprocess.STDOUT,
+            stdout=log_stream_for_subprocess(),
+            text=True,
+            **kwargs)
+
+    @property
+    def signatures(self):
+        if not self._signatures:
+            raise Exception("signatures has not been initialized")
+        return self._signatures
+
+    @property
+    def classes(self):
+        if not self._classes:
+            raise Exception("classes has not been initialized")
+        return self._classes
+
+    def load_all_flags(self):
+        all_flags = self.find_bootclasspath_fragment_output_file(
+            "all-flags.csv")
+
+        # Extract the set of signatures and a separate set of classes produced
+        # by the bootclasspath_fragment.
+        with open(all_flags, "r", encoding="utf8") as f:
+            for line in newline_stripping_iter(f.readline):
+                signature = self.line_to_signature(line)
+                self._signatures.add(signature)
+                class_name = self.signature_to_class(signature)
+                self._classes.add(class_name)
+
+    def load_module_info(self):
+        module_info_file = os.path.join(self.product_out_dir,
+                                        "module-info.json")
+        self.report(f"\nMaking sure that {module_info_file} is up to date.\n")
+        output = self.build_file_read_output(module_info_file)
+        lines = output.lines()
+        for line in lines:
+            logging.debug("%s", line)
+        output.wait(timeout=10)
+        if output.returncode:
+            raise Exception(f"Error building {module_info_file}")
+        abs_module_info_file = os.path.join(self.top_dir, module_info_file)
+        self.module_info = ModuleInfo.load(abs_module_info_file)
+
+    @staticmethod
+    def line_to_signature(line):
+        return line.split(",")[0]
+
+    @staticmethod
+    def signature_to_class(signature):
+        return signature.split(";->")[0]
+
+    @staticmethod
+    def to_parent_package(pkg_or_class):
+        return pkg_or_class.rsplit("/", 1)[0]
+
+    def module_path(self, module_name):
+        return self.module_info.module_path(module_name)
+
+    def module_out_dir(self, module_name):
+        module_path = self.module_path(module_name)
+        return os.path.join(self.out_dir, "soong/.intermediates", module_path,
+                            module_name)
+
+    def find_bootclasspath_fragment_output_file(self, basename, required=True):
+        # Find the output file of the bootclasspath_fragment with the specified
+        # base name.
+        found_file = ""
+        bcpf_out_dir = self.module_out_dir(self.bcpf)
+        for (dirpath, _, filenames) in os.walk(bcpf_out_dir):
+            for f in filenames:
+                if f == basename:
+                    found_file = os.path.join(dirpath, f)
+                    break
+        if not found_file and required:
+            raise Exception(f"Could not find {basename} in {bcpf_out_dir}")
+        return found_file
+
+    def analyze(self):
+        """Analyze a bootclasspath_fragment module.
+
+        Provides help in resolving any existing issues and provides
+        optimizations that can be applied.
+        """
+        self.report(f"Analyzing bootclasspath_fragment module {self.bcpf}")
+        self.report_dedent(f"""
+            Run this tool to help initialize a bootclasspath_fragment module.
+            Before you start make sure that:
+
+            1. The current checkout is up to date.
+
+            2. The environment has been initialized using lunch, e.g.
+               lunch aosp_arm64-userdebug
+
+            3. You have added a bootclasspath_fragment module to the appropriate
+            Android.bp file. Something like this:
+
+               bootclasspath_fragment {{
+                 name: "{self.bcpf}",
+                 contents: [
+                   "...",
+                 ],
+            
+                 // The bootclasspath_fragments that provide APIs on which this
+                 // depends.
+                 fragments: [
+                   {{
+                     apex: "com.android.art",
+                     module: "art-bootclasspath-fragment",
+                   }},
+                 ],
+               }}
+            
+            4. You have added it to the platform_bootclasspath module in
+            frameworks/base/boot/Android.bp. Something like this:
+
+               platform_bootclasspath {{
+                 name: "platform-bootclasspath",
+                 fragments: [
+                   ...
+                   {{
+                     apex: "{self.apex}",
+                     module: "{self.bcpf}",
+                   }},
+                 ],
+               }}
+
+            5. You have added an sdk module. Something like this:
+
+               sdk {{
+                 name: "{self.sdk}",
+                 bootclasspath_fragments: ["{self.bcpf}"],
+               }}
+            """)
+
+        # Make sure that the module-info.json file is up to date.
+        self.load_module_info()
+
+        self.report_dedent("""
+            Cleaning potentially stale files.
+            """)
+        # Remove the out/soong/hiddenapi files.
+        shutil.rmtree(f"{self.out_dir}/soong/hiddenapi", ignore_errors=True)
+
+        # Remove any bootclasspath_fragment output files.
+        shutil.rmtree(self.module_out_dir(self.bcpf), ignore_errors=True)
+
+        self.build_monolithic_stubs_flags()
+
+        result = Result()
+
+        self.build_monolithic_flags(result)
+        self.analyze_hiddenapi_package_properties(result)
+        self.explain_how_to_check_signature_patterns()
+
+        # If there were any changes that need to be made to the Android.bp
+        # file then either apply or report them.
+        if result.property_changes:
+            bcpf_dir = self.module_info.module_path(self.bcpf)
+            bcpf_bp_file = os.path.join(self.top_dir, bcpf_dir, "Android.bp")
+            if self.fix:
+                tool_dir = os.path.dirname(self.tool_path)
+                bpmodify_path = os.path.join(tool_dir, "bpmodify")
+                bpmodify_runner = BpModifyRunner(bpmodify_path)
+                for property_change in result.property_changes:
+                    property_change.fix_bp_file(bcpf_bp_file, self.bcpf,
+                                                bpmodify_runner)
+
+                result.file_changes.append(
+                    self.new_file_change(
+                        bcpf_bp_file,
+                        f"Updated hidden_api properties of '{self.bcpf}'"))
+
+            else:
+                hiddenapi_snippet = ""
+                for property_change in result.property_changes:
+                    hiddenapi_snippet += property_change.snippet("        ")
+
+                # Remove leading and trailing blank lines.
+                hiddenapi_snippet = hiddenapi_snippet.strip("\n")
+
+                result.file_changes.append(
+                    self.new_file_change(
+                        bcpf_bp_file, f"""
+Add the following snippet into the {self.bcpf} bootclasspath_fragment module
+in the {bcpf_dir}/Android.bp file. If the hidden_api block already exists then
+merge these properties into it.
+
+    hidden_api: {{
+{hiddenapi_snippet}
+    }},
+"""))
+
+        if result.file_changes:
+            if self.fix:
+                file_change_message = textwrap.dedent("""
+                    The following files were modified by this script:
+                    """)
+            else:
+                file_change_message = textwrap.dedent("""
+                    The following modifications need to be made:
+                    """)
+
+            self.report(file_change_message)
+            result.file_changes.sort()
+            for file_change in result.file_changes:
+                self.report(f"    {file_change.path}")
+                self.report(f"        {file_change.description}")
+                self.report()
+
+            if not self.fix:
+                self.report_dedent("""
+                    Run the command again with the --fix option to automatically
+                    make the above changes.
+                    """.lstrip("\n"))
+
+    def new_file_change(self, file, description):
+        return FileChange(
+            path=os.path.relpath(file, self.top_dir), description=description)
+
+    def check_inconsistent_flag_lines(self, significant, module_line,
+                                      monolithic_line, separator_line):
+        if not (module_line.startswith("< ") and
+                monolithic_line.startswith("> ") and not separator_line):
+            # Something went wrong.
+            self.report("Invalid build output detected:")
+            self.report(f"  module_line: '{module_line}'")
+            self.report(f"  monolithic_line: '{monolithic_line}'")
+            self.report(f"  separator_line: '{separator_line}'")
+            sys.exit(1)
+
+        if significant:
+            logging.debug("%s", module_line)
+            logging.debug("%s", monolithic_line)
+            logging.debug("%s", separator_line)
+
+    def scan_inconsistent_flags_report(self, lines):
+        """Scans a hidden API flags report
+
+        The hidden API inconsistent flags report which looks something like
+        this.
+
+        < out/soong/.intermediates/.../filtered-stub-flags.csv
+        > out/soong/hiddenapi/hiddenapi-stub-flags.txt
+
+        < Landroid/compat/Compatibility;->clearOverrides()V
+        > Landroid/compat/Compatibility;->clearOverrides()V,core-platform-api
+
+        """
+
+        # The basic format of an entry in the inconsistent flags report is:
+        #   <module specific flag>
+        #   <monolithic flag>
+        #   <separator>
+        #
+        # Wrap the lines iterator in an iterator which returns a tuple
+        # consisting of the three separate lines.
+        triples = zip(lines, lines, lines)
+
+        module_line, monolithic_line, separator_line = next(triples)
+        significant = False
+        bcpf_dir = self.module_info.module_path(self.bcpf)
+        if os.path.join(bcpf_dir, self.bcpf) in module_line:
+            # These errors are related to the bcpf being analyzed so
+            # keep them.
+            significant = True
+        else:
+            self.report(f"Filtering out errors related to {module_line}")
+
+        self.check_inconsistent_flag_lines(significant, module_line,
+                                           monolithic_line, separator_line)
+
+        diffs = {}
+        for module_line, monolithic_line, separator_line in triples:
+            self.check_inconsistent_flag_lines(significant, module_line,
+                                               monolithic_line, "")
+
+            module_parts = module_line.removeprefix("< ").split(",")
+            module_signature = module_parts[0]
+            module_flags = module_parts[1:]
+
+            monolithic_parts = monolithic_line.removeprefix("> ").split(",")
+            monolithic_signature = monolithic_parts[0]
+            monolithic_flags = monolithic_parts[1:]
+
+            if module_signature != monolithic_signature:
+                # Something went wrong.
+                self.report("Inconsistent signatures detected:")
+                self.report(f"  module_signature: '{module_signature}'")
+                self.report(f"  monolithic_signature: '{monolithic_signature}'")
+                sys.exit(1)
+
+            diffs[module_signature] = (module_flags, monolithic_flags)
+
+            if separator_line:
+                # If the separator line is not blank then it is the end of the
+                # current report, and possibly the start of another.
+                return separator_line, diffs
+
+        return "", diffs
+
+    def build_file_read_output(self, filename):
+        # Make sure the filename is relative to top if possible as the build
+        # may be using relative paths as the target.
+        rel_filename = filename.removeprefix(self.top_dir)
+        cmd = ["build/soong/soong_ui.bash", "--make-mode", rel_filename]
+        cmd_line = " ".join(cmd)
+        logging.debug("%s", cmd_line)
+        # pylint: disable=consider-using-with
+        output = subprocess.Popen(
+            cmd,
+            cwd=self.top_dir,
+            stderr=subprocess.STDOUT,
+            stdout=subprocess.PIPE,
+            text=True,
+        )
+        return BuildOperation(popen=output)
+
+    def build_hiddenapi_flags(self, filename):
+        output = self.build_file_read_output(filename)
+
+        lines = output.lines()
+        diffs = None
+        for line in lines:
+            logging.debug("%s", line)
+            while line == _INCONSISTENT_FLAGS:
+                line, diffs = self.scan_inconsistent_flags_report(lines)
+
+        output.wait(timeout=10)
+        if output.returncode != 0:
+            logging.debug("Command failed with %s", output.returncode)
+        else:
+            logging.debug("Command succeeded")
+
+        return diffs
+
+    def build_monolithic_stubs_flags(self):
+        self.report_dedent(f"""
+            Attempting to build {_STUB_FLAGS_FILE} to verify that the
+            bootclasspath_fragment has the correct API stubs available...
+            """)
+
+        # Build the hiddenapi-stubs-flags.txt file.
+        diffs = self.build_hiddenapi_flags(_STUB_FLAGS_FILE)
+        if diffs:
+            self.report_dedent(f"""
+                There is a discrepancy between the stub API derived flags
+                created by the bootclasspath_fragment and the
+                platform_bootclasspath. See preceding error messages to see
+                which flags are inconsistent. The inconsistencies can occur for
+                a couple of reasons:
+
+                If you are building against prebuilts of the Android SDK, e.g.
+                by using TARGET_BUILD_APPS then the prebuilt versions of the
+                APIs this bootclasspath_fragment depends upon are out of date
+                and need updating. See go/update-prebuilts for help.
+
+                Otherwise, this is happening because there are some stub APIs
+                that are either provided by or used by the contents of the
+                bootclasspath_fragment but which are not available to it. There
+                are 4 ways to handle this:
+
+                1. A java_sdk_library in the contents property will
+                automatically make its stub APIs available to the
+                bootclasspath_fragment so nothing needs to be done.
+
+                2. If the API provided by the bootclasspath_fragment is created
+                by an api_only java_sdk_library (or a java_library that compiles
+                files generated by a separate droidstubs module then it cannot
+                be added to the contents and instead must be added to the
+                api.stubs property, e.g.
+
+                   bootclasspath_fragment {{
+                     name: "{self.bcpf}",
+                     ...
+                     api: {{
+                       stubs: ["$MODULE-api-only"],"
+                     }},
+                   }}
+
+                3. If the contents use APIs provided by another
+                bootclasspath_fragment then it needs to be added to the
+                fragments property, e.g.
+                
+                   bootclasspath_fragment {{
+                     name: "{self.bcpf}",
+                     ...
+                     // The bootclasspath_fragments that provide APIs on which this depends.
+                     fragments: [
+                       ...
+                       {{
+                         apex: "com.android.other",
+                         module: "com.android.other-bootclasspath-fragment",
+                       }},
+                     ],
+                   }}
+                
+                4. If the contents use APIs from a module that is not part of
+                another bootclasspath_fragment then it must be added to the
+                additional_stubs property, e.g.
+
+                   bootclasspath_fragment {{
+                     name: "{self.bcpf}",
+                     ...
+                     additional_stubs: ["android-non-updatable"],
+                   }}
+
+                   Like the api.stubs property these are typically
+                   java_sdk_library modules but can be java_library too.
+
+                   Note: The "android-non-updatable" is treated as if it was a
+                   java_sdk_library which it is not at the moment but will be in
+                   future.
+                """)
+
+        return diffs
+
+    def build_monolithic_flags(self, result):
+        self.report_dedent(f"""
+            Attempting to build {_FLAGS_FILE} to verify that the
+            bootclasspath_fragment has the correct hidden API flags...
+            """)
+
+        # Build the hiddenapi-flags.csv file and extract any differences in
+        # the flags between this bootclasspath_fragment and the monolithic
+        # files.
+        result.diffs = self.build_hiddenapi_flags(_FLAGS_FILE)
+
+        # Load information from the bootclasspath_fragment's all-flags.csv file.
+        self.load_all_flags()
+
+        if result.diffs:
+            self.report_dedent(f"""
+                There is a discrepancy between the hidden API flags created by
+                the bootclasspath_fragment and the platform_bootclasspath. See
+                preceding error messages to see which flags are inconsistent.
+                The inconsistencies can occur for a couple of reasons:
+
+                If you are building against prebuilts of this
+                bootclasspath_fragment then the prebuilt version of the sdk
+                snapshot (specifically the hidden API flag files) are
+                inconsistent with the prebuilt version of the apex {self.apex}.
+                Please ensure that they are both updated from the same build.
+
+                1. There are custom hidden API flags specified in the one of the
+                files in frameworks/base/boot/hiddenapi which apply to the
+                bootclasspath_fragment but which are not supplied to the
+                bootclasspath_fragment module.
+
+                2. The bootclasspath_fragment specifies invalid
+                "split_packages", "single_packages" and/of "package_prefixes"
+                properties that match packages and classes that it does not
+                provide.
+                """)
+
+            # Check to see if there are any hiddenapi related properties that
+            # need to be added to the
+            self.report_dedent("""
+                Checking custom hidden API flags....
+                """)
+            self.check_frameworks_base_boot_hidden_api_files(result)
+
+    def report_hidden_api_flag_file_changes(self, result, property_name,
+                                            flags_file, rel_bcpf_flags_file,
+                                            bcpf_flags_file):
+        matched_signatures = set()
+        # Open the flags file to read the flags from.
+        with open(flags_file, "r", encoding="utf8") as f:
+            for signature in newline_stripping_iter(f.readline):
+                if signature in self.signatures:
+                    # The signature is provided by the bootclasspath_fragment so
+                    # it will need to be moved to the bootclasspath_fragment
+                    # specific file.
+                    matched_signatures.add(signature)
+
+        # If the bootclasspath_fragment specific flags file is not empty
+        # then it contains flags. That could either be new flags just moved
+        # from frameworks/base or previous contents of the file. In either
+        # case the file must not be removed.
+        if matched_signatures:
+            insert = textwrap.indent("\n".join(matched_signatures),
+                                     "            ")
+            result.file_changes.append(
+                self.new_file_change(
+                    flags_file, f"""Remove the following entries:
+{insert}
+"""))
+
+            result.file_changes.append(
+                self.new_file_change(
+                    bcpf_flags_file, f"""Add the following entries:
+{insert}
+"""))
+
+            result.property_changes.append(
+                HiddenApiPropertyChange(
+                    property_name=property_name,
+                    values=[rel_bcpf_flags_file],
+                ))
+
+    def fix_hidden_api_flag_files(self, result, property_name, flags_file,
+                                  rel_bcpf_flags_file, bcpf_flags_file):
+        # Read the file in frameworks/base/boot/hiddenapi/<file> copy any
+        # flags that relate to the bootclasspath_fragment into a local
+        # file in the hiddenapi subdirectory.
+        tmp_flags_file = flags_file + ".tmp"
+
+        # Make sure the directory containing the bootclasspath_fragment specific
+        # hidden api flags exists.
+        os.makedirs(os.path.dirname(bcpf_flags_file), exist_ok=True)
+
+        bcpf_flags_file_exists = os.path.exists(bcpf_flags_file)
+
+        matched_signatures = set()
+        # Open the flags file to read the flags from.
+        with open(flags_file, "r", encoding="utf8") as f:
+            # Open a temporary file to write the flags (minus any removed
+            # flags).
+            with open(tmp_flags_file, "w", encoding="utf8") as t:
+                # Open the bootclasspath_fragment file for append just in
+                # case it already exists.
+                with open(bcpf_flags_file, "a", encoding="utf8") as b:
+                    for line in iter(f.readline, ""):
+                        signature = line.rstrip()
+                        if signature in self.signatures:
+                            # The signature is provided by the
+                            # bootclasspath_fragment so write it to the new
+                            # bootclasspath_fragment specific file.
+                            print(line, file=b, end="")
+                            matched_signatures.add(signature)
+                        else:
+                            # The signature is NOT provided by the
+                            # bootclasspath_fragment. Copy it to the new
+                            # monolithic file.
+                            print(line, file=t, end="")
+
+        # If the bootclasspath_fragment specific flags file is not empty
+        # then it contains flags. That could either be new flags just moved
+        # from frameworks/base or previous contents of the file. In either
+        # case the file must not be removed.
+        if matched_signatures:
+            # There are custom flags related to the bootclasspath_fragment
+            # so replace the frameworks/base/boot/hiddenapi file with the
+            # file that does not contain those flags.
+            shutil.move(tmp_flags_file, flags_file)
+
+            result.file_changes.append(
+                self.new_file_change(flags_file,
+                                     f"Removed '{self.bcpf}' specific entries"))
+
+            result.property_changes.append(
+                HiddenApiPropertyChange(
+                    property_name=property_name,
+                    values=[rel_bcpf_flags_file],
+                ))
+
+            # Make sure that the files are sorted.
+            self.run_command([
+                "tools/platform-compat/hiddenapi/sort_api.sh",
+                bcpf_flags_file,
+            ])
+
+            if bcpf_flags_file_exists:
+                desc = f"Added '{self.bcpf}' specific entries"
+            else:
+                desc = f"Created with '{self.bcpf}' specific entries"
+            result.file_changes.append(
+                self.new_file_change(bcpf_flags_file, desc))
+        else:
+            # There are no custom flags related to the
+            # bootclasspath_fragment so clean up the working files.
+            os.remove(tmp_flags_file)
+            if not bcpf_flags_file_exists:
+                os.remove(bcpf_flags_file)
+
+    def check_frameworks_base_boot_hidden_api_files(self, result):
+        hiddenapi_dir = os.path.join(self.top_dir,
+                                     "frameworks/base/boot/hiddenapi")
+        for basename in sorted(os.listdir(hiddenapi_dir)):
+            if not (basename.startswith("hiddenapi-") and
+                    basename.endswith(".txt")):
+                continue
+
+            flags_file = os.path.join(hiddenapi_dir, basename)
+
+            logging.debug("Checking %s for flags related to %s", flags_file,
+                          self.bcpf)
+
+            # Map the file name in frameworks/base/boot/hiddenapi into a
+            # slightly more meaningful name for use by the
+            # bootclasspath_fragment.
+            if basename == "hiddenapi-max-target-o.txt":
+                basename = "hiddenapi-max-target-o-low-priority.txt"
+            elif basename == "hiddenapi-max-target-r-loprio.txt":
+                basename = "hiddenapi-max-target-r-low-priority.txt"
+
+            property_name = basename.removeprefix("hiddenapi-")
+            property_name = property_name.removesuffix(".txt")
+            property_name = property_name.replace("-", "_")
+
+            rel_bcpf_flags_file = f"hiddenapi/{basename}"
+            bcpf_dir = self.module_info.module_path(self.bcpf)
+            bcpf_flags_file = os.path.join(self.top_dir, bcpf_dir,
+                                           rel_bcpf_flags_file)
+
+            if self.fix:
+                self.fix_hidden_api_flag_files(result, property_name,
+                                               flags_file, rel_bcpf_flags_file,
+                                               bcpf_flags_file)
+            else:
+                self.report_hidden_api_flag_file_changes(
+                    result, property_name, flags_file, rel_bcpf_flags_file,
+                    bcpf_flags_file)
+
+    @staticmethod
+    def split_package_comment(split_packages):
+        if split_packages:
+            return textwrap.dedent("""
+                The following packages contain classes from other modules on the
+                bootclasspath. That means that the hidden API flags for this
+                module has to explicitly list every single class this module
+                provides in that package to differentiate them from the classes
+                provided by other modules. That can include private classes that
+                are not part of the API.
+            """).strip("\n")
+
+        return "This module does not contain any split packages."
+
+    @staticmethod
+    def package_prefixes_comment():
+        return textwrap.dedent("""
+            The following packages and all their subpackages currently only
+            contain classes from this bootclasspath_fragment. Listing a package
+            here won't prevent other bootclasspath modules from adding classes
+            in any of those packages but it will prevent them from adding those
+            classes into an API surface, e.g. public, system, etc.. Doing so
+            will result in a build failure due to inconsistent flags.
+        """).strip("\n")
+
+    def analyze_hiddenapi_package_properties(self, result):
+        self.compute_hiddenapi_package_properties(result)
+
+        def indent_lines(lines):
+            return "\n".join([f"        {cls}" for cls in lines])
+
+        # TODO(b/202154151): Find those classes in split packages that are not
+        #  part of an API, i.e. are an internal implementation class, and so
+        #  can, and should, be safely moved out of the split packages.
+
+        split_packages = result.split_packages.keys()
+        result.property_changes.append(
+            HiddenApiPropertyChange(
+                property_name="split_packages",
+                values=split_packages,
+                property_comment=self.split_package_comment(split_packages),
+                action=PropertyChangeAction.REPLACE,
+            ))
+
+        if split_packages:
+            self.report_dedent(f"""
+                bootclasspath_fragment {self.bcpf} contains classes in packages
+                that also contain classes provided by other bootclasspath
+                modules. Those packages are called split packages. Split
+                packages should be avoided where possible but are often
+                unavoidable when modularizing existing code.
+
+                The hidden api processing needs to know which packages are split
+                (and conversely which are not) so that it can optimize the
+                hidden API flags to remove unnecessary implementation details.
+
+                By default (for backwards compatibility) the
+                bootclasspath_fragment assumes that all packages are split
+                unless one of the package_prefixes or split_packages properties
+                are specified. While that is safe it is not optimal and can lead
+                to unnecessary implementation details leaking into the hidden
+                API flags. Adding an empty split_packages property allows the
+                flags to be optimized and remove any unnecessary implementation
+                details.
+                """)
+
+            for package in split_packages:
+                reason = result.split_packages[package]
+                self.report(f"""
+    Package {package} is split because while this bootclasspath_fragment
+    provides the following classes:
+{indent_lines(reason.bcpf)}
+
+    Other module(s) on the bootclasspath provides the following classes in
+    that package:
+{indent_lines(reason.other)}
+""")
+
+        single_packages = result.single_packages.keys()
+        if single_packages:
+            result.property_changes.append(
+                HiddenApiPropertyChange(
+                    property_name="single_packages",
+                    values=single_packages,
+                    property_comment=textwrap.dedent("""
+                    The following packages currently only contain classes from
+                    this bootclasspath_fragment but some of their sub-packages
+                    contain classes from other bootclasspath modules. Packages
+                    should only be listed here when necessary for legacy
+                    purposes, new packages should match a package prefix.
+                    """),
+                    action=PropertyChangeAction.REPLACE,
+                ))
+
+            self.report_dedent(f"""
+                bootclasspath_fragment {self.bcpf} contains classes from
+                packages that has sub-packages which contain classes provided by
+                other bootclasspath modules. Those packages are called single
+                packages. Single packages should be avoided where possible but
+                are often unavoidable when modularizing existing code.
+
+                Because some sub-packages contains classes from other
+                bootclasspath modules it is not possible to use the package as a
+                package prefix as that treats the package and all its
+                sub-packages as being provided by this module.  
+                """)
+            for package in single_packages:
+                reason = result.single_packages[package]
+                self.report(f"""
+    Package {package} is not a package prefix because while this
+    bootclasspath_fragment provides the following sub-packages:
+{indent_lines(reason.bcpf)}
+
+    Other module(s) on the bootclasspath provide the following sub-packages:
+{indent_lines(reason.other)}
+""")
+
+        package_prefixes = result.package_prefixes
+        if package_prefixes:
+            result.property_changes.append(
+                HiddenApiPropertyChange(
+                    property_name="package_prefixes",
+                    values=package_prefixes,
+                    property_comment=self.package_prefixes_comment(),
+                    action=PropertyChangeAction.REPLACE,
+                ))
+
+    def explain_how_to_check_signature_patterns(self):
+        signature_patterns_files = self.find_bootclasspath_fragment_output_file(
+            "signature-patterns.csv", required=False)
+        if signature_patterns_files:
+            signature_patterns_files = signature_patterns_files.removeprefix(
+                self.top_dir)
+
+            self.report_dedent(f"""
+                The purpose of the hiddenapi split_packages and package_prefixes
+                properties is to allow the removal of implementation details
+                from the hidden API flags to reduce the coupling between sdk
+                snapshots and the APEX runtime. It cannot eliminate that
+                coupling completely though. Doing so may require changes to the
+                code.
+
+                This tool provides support for managing those properties but it
+                cannot decide whether the set of package prefixes suggested is
+                appropriate that needs the input of the developer.
+
+                Please run the following command:
+                    m {signature_patterns_files}
+
+                And then check the '{signature_patterns_files}' for any mention
+                of implementation classes and packages (i.e. those
+                classes/packages that do not contain any part of an API surface,
+                including the hidden API). If they are found then the code
+                should ideally be moved to a package unique to this module that
+                is contained within a package that is part of an API surface.
+
+                The format of the file is a list of patterns:
+
+                * Patterns for split packages will list every class in that package.
+
+                * Patterns for package prefixes will end with .../**.
+
+                * Patterns for packages which are not split but cannot use a
+                package prefix because there are sub-packages which are provided
+                by another module will end with .../*.
+                """)
+
+    def compute_hiddenapi_package_properties(self, result):
+        trie = signature_trie()
+        # Populate the trie with the classes that are provided by the
+        # bootclasspath_fragment tagging them to make it clear where they
+        # are from.
+        sorted_classes = sorted(self.classes)
+        for class_name in sorted_classes:
+            trie.add(class_name + _FAKE_MEMBER, ClassProvider.BCPF)
+
+        # Now the same for monolithic classes.
+        monolithic_classes = set()
+        abs_flags_file = os.path.join(self.top_dir, _FLAGS_FILE)
+        with open(abs_flags_file, "r", encoding="utf8") as f:
+            for line in iter(f.readline, ""):
+                signature = self.line_to_signature(line)
+                class_name = self.signature_to_class(signature)
+                if (class_name not in monolithic_classes and
+                        class_name not in self.classes):
+                    trie.add(
+                        class_name + _FAKE_MEMBER,
+                        ClassProvider.OTHER,
+                        only_if_matches=True)
+                    monolithic_classes.add(class_name)
+
+        self.recurse_hiddenapi_packages_trie(trie, result)
+
+    @staticmethod
+    def selector_to_java_reference(node):
+        return node.selector.replace("/", ".")
+
+    @staticmethod
+    def determine_reason_for_single_package(node):
+        bcpf_packages = []
+        other_packages = []
+
+        def recurse(n):
+            if n.type != "package":
+                return
+
+            providers = n.get_matching_rows("*")
+            package_ref = BcpfAnalyzer.selector_to_java_reference(n)
+            if ClassProvider.BCPF in providers:
+                bcpf_packages.append(package_ref)
+            else:
+                other_packages.append(package_ref)
+
+            children = n.child_nodes()
+            if children:
+                for child in children:
+                    recurse(child)
+
+        recurse(node)
+        return PackagePropertyReason(bcpf=bcpf_packages, other=other_packages)
+
+    @staticmethod
+    def determine_reason_for_split_package(node):
+        bcpf_classes = []
+        other_classes = []
+        for child in node.child_nodes():
+            if child.type != "class":
+                continue
+
+            providers = child.values(lambda _: True)
+            class_ref = BcpfAnalyzer.selector_to_java_reference(child)
+            if ClassProvider.BCPF in providers:
+                bcpf_classes.append(class_ref)
+            else:
+                other_classes.append(class_ref)
+
+        return PackagePropertyReason(bcpf=bcpf_classes, other=other_classes)
+
+    def recurse_hiddenapi_packages_trie(self, node, result):
+        nodes = node.child_nodes()
+        if nodes:
+            for child in nodes:
+                # Ignore any non-package nodes.
+                if child.type != "package":
+                    continue
+
+                package = self.selector_to_java_reference(child)
+
+                providers = set(child.get_matching_rows("**"))
+                if not providers:
+                    # The package and all its sub packages contain no
+                    # classes. This should never happen.
+                    pass
+                elif providers == {ClassProvider.BCPF}:
+                    # The package and all its sub packages only contain
+                    # classes provided by the bootclasspath_fragment.
+                    logging.debug("Package '%s.**' is not split", package)
+                    result.package_prefixes.append(package)
+                    # There is no point traversing into the sub packages.
+                    continue
+                elif providers == {ClassProvider.OTHER}:
+                    # The package and all its sub packages contain no
+                    # classes provided by the bootclasspath_fragment.
+                    # There is no point traversing into the sub packages.
+                    logging.debug("Package '%s.**' contains no classes from %s",
+                                  package, self.bcpf)
+                    continue
+                elif ClassProvider.BCPF in providers:
+                    # The package and all its sub packages contain classes
+                    # provided by the bootclasspath_fragment and other
+                    # sources.
+                    logging.debug(
+                        "Package '%s.**' contains classes from "
+                        "%s and other sources", package, self.bcpf)
+
+                providers = set(child.get_matching_rows("*"))
+                if not providers:
+                    # The package contains no classes.
+                    logging.debug("Package: %s contains no classes", package)
+                elif providers == {ClassProvider.BCPF}:
+                    # The package only contains classes provided by the
+                    # bootclasspath_fragment.
+                    logging.debug(
+                        "Package '%s.*' is not split but does have "
+                        "sub-packages from other modules", package)
+
+                    # Partition the sub-packages into those that are provided by
+                    # this bootclasspath_fragment and those provided by other
+                    # modules. They can be used to explain the reason for the
+                    # single package to developers.
+                    reason = self.determine_reason_for_single_package(child)
+                    result.single_packages[package] = reason
+
+                elif providers == {ClassProvider.OTHER}:
+                    # The package contains no classes provided by the
+                    # bootclasspath_fragment. Child nodes make contain such
+                    # classes.
+                    logging.debug("Package '%s.*' contains no classes from %s",
+                                  package, self.bcpf)
+                elif ClassProvider.BCPF in providers:
+                    # The package contains classes provided by both the
+                    # bootclasspath_fragment and some other source.
+                    logging.debug("Package '%s.*' is split", package)
+
+                    # Partition the classes in this split package into those
+                    # that come from this bootclasspath_fragment and those that
+                    # come from other modules. That can be used to explain the
+                    # reason for the split package to developers.
+                    reason = self.determine_reason_for_split_package(child)
+                    result.split_packages[package] = reason
+
+                self.recurse_hiddenapi_packages_trie(child, result)
+
+
+def newline_stripping_iter(iterator):
+    """Return an iterator over the iterator that strips trailing white space."""
+    lines = iter(iterator, "")
+    lines = (line.rstrip() for line in lines)
+    return lines
+
+
+def format_comment_as_text(text, indent):
+    return "".join(
+        [f"{line}\n" for line in format_comment_as_lines(text, indent)])
+
+
+def format_comment_as_lines(text, indent):
+    lines = textwrap.wrap(text.strip("\n"), width=77 - len(indent))
+    lines = [f"{indent}// {line}" for line in lines]
+    return lines
+
+
+def log_stream_for_subprocess():
+    stream = subprocess.DEVNULL
+    for handler in logging.root.handlers:
+        if handler.level == logging.DEBUG:
+            if isinstance(handler, logging.StreamHandler):
+                stream = handler.stream
+    return stream
+
+
+def main(argv):
+    args_parser = argparse.ArgumentParser(
+        description="Analyze a bootclasspath_fragment module.")
+    args_parser.add_argument(
+        "--bcpf",
+        help="The bootclasspath_fragment module to analyze",
+        required=True,
+    )
+    args_parser.add_argument(
+        "--apex",
+        help="The apex module to which the bootclasspath_fragment belongs. It "
+        "is not strictly necessary at the moment but providing it will "
+        "allow this script to give more useful messages and it may be"
+        "required in future.",
+        default="SPECIFY-APEX-OPTION")
+    args_parser.add_argument(
+        "--sdk",
+        help="The sdk module to which the bootclasspath_fragment belongs. It "
+        "is not strictly necessary at the moment but providing it will "
+        "allow this script to give more useful messages and it may be"
+        "required in future.",
+        default="SPECIFY-SDK-OPTION")
+    args_parser.add_argument(
+        "--fix",
+        help="Attempt to fix any issues found automatically.",
+        action="store_true",
+        default=False)
+    args = args_parser.parse_args(argv[1:])
+    top_dir = os.environ["ANDROID_BUILD_TOP"] + "/"
+    out_dir = os.environ.get("OUT_DIR", os.path.join(top_dir, "out"))
+    product_out_dir = os.environ.get("ANDROID_PRODUCT_OUT", top_dir)
+    # Make product_out_dir relative to the top so it can be used as part of a
+    # build target.
+    product_out_dir = product_out_dir.removeprefix(top_dir)
+    log_fd, abs_log_file = tempfile.mkstemp(
+        suffix="_analyze_bcpf.log", text=True)
+
+    with os.fdopen(log_fd, "w") as log_file:
+        # Set up debug logging to the log file.
+        logging.basicConfig(
+            level=logging.DEBUG,
+            format="%(levelname)-8s %(message)s",
+            stream=log_file)
+
+        # define a Handler which writes INFO messages or higher to the
+        # sys.stdout with just the message.
+        console = logging.StreamHandler()
+        console.setLevel(logging.INFO)
+        console.setFormatter(logging.Formatter("%(message)s"))
+        # add the handler to the root logger
+        logging.getLogger("").addHandler(console)
+
+        print(f"Writing log to {abs_log_file}")
+        try:
+            analyzer = BcpfAnalyzer(
+                tool_path=argv[0],
+                top_dir=top_dir,
+                out_dir=out_dir,
+                product_out_dir=product_out_dir,
+                bcpf=args.bcpf,
+                apex=args.apex,
+                sdk=args.sdk,
+                fix=args.fix,
+            )
+            analyzer.analyze()
+        finally:
+            print(f"Log written to {abs_log_file}")
+
+
+if __name__ == "__main__":
+    main(sys.argv)
diff --git a/scripts/hiddenapi/analyze_bcpf_test.py b/scripts/hiddenapi/analyze_bcpf_test.py
new file mode 100644
index 0000000..a32ffd0
--- /dev/null
+++ b/scripts/hiddenapi/analyze_bcpf_test.py
@@ -0,0 +1,661 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Unit tests for analyzing bootclasspath_fragment modules."""
+import os.path
+import shutil
+import tempfile
+import unittest
+import unittest.mock
+
+import sys
+
+import analyze_bcpf as ab
+
+_FRAMEWORK_HIDDENAPI = "frameworks/base/boot/hiddenapi"
+_MAX_TARGET_O = f"{_FRAMEWORK_HIDDENAPI}/hiddenapi-max-target-o.txt"
+_MAX_TARGET_P = f"{_FRAMEWORK_HIDDENAPI}/hiddenapi-max-target-p.txt"
+_MAX_TARGET_Q = f"{_FRAMEWORK_HIDDENAPI}/hiddenapi-max-target-q.txt"
+_MAX_TARGET_R = f"{_FRAMEWORK_HIDDENAPI}/hiddenapi-max-target-r-loprio.txt"
+
+_MULTI_LINE_COMMENT = """
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut arcu justo,
+bibendum eu malesuada vel, fringilla in odio. Etiam gravida ultricies sem
+tincidunt luctus.""".replace("\n", " ").strip()
+
+
+class FakeBuildOperation(ab.BuildOperation):
+
+    def __init__(self, lines, return_code):
+        ab.BuildOperation.__init__(self, None)
+        self._lines = lines
+        self.returncode = return_code
+
+    def lines(self):
+        return iter(self._lines)
+
+    def wait(self, *args, **kwargs):
+        return
+
+
+class TestAnalyzeBcpf(unittest.TestCase):
+
+    def setUp(self):
+        # Create a temporary directory
+        self.test_dir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        # Remove the directory after the test
+        shutil.rmtree(self.test_dir)
+
+    @staticmethod
+    def write_abs_file(abs_path, contents):
+        os.makedirs(os.path.dirname(abs_path), exist_ok=True)
+        with open(abs_path, "w", encoding="utf8") as f:
+            print(contents.removeprefix("\n"), file=f, end="")
+
+    def populate_fs(self, fs):
+        for path, contents in fs.items():
+            abs_path = os.path.join(self.test_dir, path)
+            self.write_abs_file(abs_path, contents)
+
+    def create_analyzer_for_test(self,
+                                 fs=None,
+                                 bcpf="bcpf",
+                                 apex="apex",
+                                 sdk="sdk",
+                                 fix=False):
+        if fs:
+            self.populate_fs(fs)
+
+        top_dir = self.test_dir
+        out_dir = os.path.join(self.test_dir, "out")
+        product_out_dir = "out/product"
+
+        bcpf_dir = f"{bcpf}-dir"
+        modules = {bcpf: {"path": [bcpf_dir]}}
+        module_info = ab.ModuleInfo(modules)
+
+        analyzer = ab.BcpfAnalyzer(
+            tool_path=os.path.join(out_dir, "bin"),
+            top_dir=top_dir,
+            out_dir=out_dir,
+            product_out_dir=product_out_dir,
+            bcpf=bcpf,
+            apex=apex,
+            sdk=sdk,
+            fix=fix,
+            module_info=module_info,
+        )
+        analyzer.load_all_flags()
+        return analyzer
+
+    def test_reformat_report_text(self):
+        lines = """
+99. An item in a numbered list
+that traverses multiple lines.
+
+   An indented example
+   that should not be reformatted.
+"""
+        reformatted = ab.BcpfAnalyzer.reformat_report_test(lines)
+        self.assertEqual(
+            """
+99. An item in a numbered list that traverses multiple lines.
+
+   An indented example
+   that should not be reformatted.
+""", reformatted)
+
+    def do_test_build_flags(self, fix):
+        lines = """
+ERROR: Hidden API flags are inconsistent:
+< out/soong/.intermediates/bcpf-dir/bcpf-dir/filtered-flags.csv
+> out/soong/hiddenapi/hiddenapi-flags.csv
+
+< Lacme/test/Class;-><init>()V,blocked
+> Lacme/test/Class;-><init>()V,max-target-o
+
+< Lacme/test/Other;->getThing()Z,blocked
+> Lacme/test/Other;->getThing()Z,max-target-p
+
+< Lacme/test/Widget;-><init()V,blocked
+> Lacme/test/Widget;-><init()V,max-target-q
+
+< Lacme/test/Gadget;->NAME:Ljava/lang/String;,blocked
+> Lacme/test/Gadget;->NAME:Ljava/lang/String;,lo-prio,max-target-r
+16:37:32 ninja failed with: exit status 1
+""".strip().splitlines()
+        operation = FakeBuildOperation(lines=lines, return_code=1)
+
+        fs = {
+            _MAX_TARGET_O:
+                """
+Lacme/items/Magnet;->size:I
+Lacme/test/Class;-><init>()V
+""",
+            _MAX_TARGET_P:
+                """
+Lacme/items/Rocket;->size:I
+Lacme/test/Other;->getThing()Z
+""",
+            _MAX_TARGET_Q:
+                """
+Lacme/items/Rock;->size:I
+Lacme/test/Widget;-><init()V
+""",
+            _MAX_TARGET_R:
+                """
+Lacme/items/Lever;->size:I
+Lacme/test/Gadget;->NAME:Ljava/lang/String;
+""",
+            "bcpf-dir/hiddenapi/hiddenapi-max-target-p.txt":
+                """
+Lacme/old/Class;->getWidget()Lacme/test/Widget;
+""",
+            "out/soong/.intermediates/bcpf-dir/bcpf/all-flags.csv":
+                """
+Lacme/test/Gadget;->NAME:Ljava/lang/String;,blocked
+Lacme/test/Widget;-><init()V,blocked
+Lacme/test/Class;-><init>()V,blocked
+Lacme/test/Other;->getThing()Z,blocked
+""",
+        }
+
+        analyzer = self.create_analyzer_for_test(fs, fix=fix)
+
+        # Override the build_file_read_output() method to just return a fake
+        # build operation.
+        analyzer.build_file_read_output = unittest.mock.Mock(
+            return_value=operation)
+
+        # Override the run_command() method to do nothing.
+        analyzer.run_command = unittest.mock.Mock()
+
+        result = ab.Result()
+
+        analyzer.build_monolithic_flags(result)
+        expected_diffs = {
+            "Lacme/test/Gadget;->NAME:Ljava/lang/String;":
+                (["blocked"], ["lo-prio", "max-target-r"]),
+            "Lacme/test/Widget;-><init()V": (["blocked"], ["max-target-q"]),
+            "Lacme/test/Class;-><init>()V": (["blocked"], ["max-target-o"]),
+            "Lacme/test/Other;->getThing()Z": (["blocked"], ["max-target-p"])
+        }
+        self.assertEqual(expected_diffs, result.diffs, msg="flag differences")
+
+        expected_property_changes = [
+            ab.HiddenApiPropertyChange(
+                property_name="max_target_o_low_priority",
+                values=["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+                property_comment=""),
+            ab.HiddenApiPropertyChange(
+                property_name="max_target_p",
+                values=["hiddenapi/hiddenapi-max-target-p.txt"],
+                property_comment=""),
+            ab.HiddenApiPropertyChange(
+                property_name="max_target_q",
+                values=["hiddenapi/hiddenapi-max-target-q.txt"],
+                property_comment=""),
+            ab.HiddenApiPropertyChange(
+                property_name="max_target_r_low_priority",
+                values=["hiddenapi/hiddenapi-max-target-r-low-priority.txt"],
+                property_comment=""),
+        ]
+        self.assertEqual(
+            expected_property_changes,
+            result.property_changes,
+            msg="property changes")
+
+        return result
+
+    def test_build_flags_report(self):
+        result = self.do_test_build_flags(fix=False)
+
+        expected_file_changes = [
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/"
+                "hiddenapi-max-target-o-low-priority.txt",
+                description="""Add the following entries:
+            Lacme/test/Class;-><init>()V
+""",
+            ),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/hiddenapi-max-target-p.txt",
+                description="""Add the following entries:
+            Lacme/test/Other;->getThing()Z
+""",
+            ),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/hiddenapi-max-target-q.txt",
+                description="""Add the following entries:
+            Lacme/test/Widget;-><init()V
+"""),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/"
+                "hiddenapi-max-target-r-low-priority.txt",
+                description="""Add the following entries:
+            Lacme/test/Gadget;->NAME:Ljava/lang/String;
+"""),
+            ab.FileChange(
+                path="frameworks/base/boot/hiddenapi/"
+                "hiddenapi-max-target-o.txt",
+                description="""Remove the following entries:
+            Lacme/test/Class;-><init>()V
+"""),
+            ab.FileChange(
+                path="frameworks/base/boot/hiddenapi/"
+                "hiddenapi-max-target-p.txt",
+                description="""Remove the following entries:
+            Lacme/test/Other;->getThing()Z
+"""),
+            ab.FileChange(
+                path="frameworks/base/boot/hiddenapi/"
+                "hiddenapi-max-target-q.txt",
+                description="""Remove the following entries:
+            Lacme/test/Widget;-><init()V
+"""),
+            ab.FileChange(
+                path="frameworks/base/boot/hiddenapi/"
+                "hiddenapi-max-target-r-loprio.txt",
+                description="""Remove the following entries:
+            Lacme/test/Gadget;->NAME:Ljava/lang/String;
+""")
+        ]
+        result.file_changes.sort()
+        self.assertEqual(
+            expected_file_changes, result.file_changes, msg="file_changes")
+
+    def test_build_flags_fix(self):
+        result = self.do_test_build_flags(fix=True)
+
+        expected_file_changes = [
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/"
+                "hiddenapi-max-target-o-low-priority.txt",
+                description="Created with 'bcpf' specific entries"),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/hiddenapi-max-target-p.txt",
+                description="Added 'bcpf' specific entries"),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/hiddenapi-max-target-q.txt",
+                description="Created with 'bcpf' specific entries"),
+            ab.FileChange(
+                path="bcpf-dir/hiddenapi/"
+                "hiddenapi-max-target-r-low-priority.txt",
+                description="Created with 'bcpf' specific entries"),
+            ab.FileChange(
+                path=_MAX_TARGET_O,
+                description="Removed 'bcpf' specific entries"),
+            ab.FileChange(
+                path=_MAX_TARGET_P,
+                description="Removed 'bcpf' specific entries"),
+            ab.FileChange(
+                path=_MAX_TARGET_Q,
+                description="Removed 'bcpf' specific entries"),
+            ab.FileChange(
+                path=_MAX_TARGET_R,
+                description="Removed 'bcpf' specific entries")
+        ]
+
+        result.file_changes.sort()
+        self.assertEqual(
+            expected_file_changes, result.file_changes, msg="file_changes")
+
+        expected_file_contents = {
+            "bcpf-dir/hiddenapi/hiddenapi-max-target-o-low-priority.txt":
+                """
+Lacme/test/Class;-><init>()V
+""",
+            "bcpf-dir/hiddenapi/hiddenapi-max-target-p.txt":
+                """
+Lacme/old/Class;->getWidget()Lacme/test/Widget;
+Lacme/test/Other;->getThing()Z
+""",
+            "bcpf-dir/hiddenapi/hiddenapi-max-target-q.txt":
+                """
+Lacme/test/Widget;-><init()V
+""",
+            "bcpf-dir/hiddenapi/hiddenapi-max-target-r-low-priority.txt":
+                """
+Lacme/test/Gadget;->NAME:Ljava/lang/String;
+""",
+            _MAX_TARGET_O:
+                """
+Lacme/items/Magnet;->size:I
+""",
+            _MAX_TARGET_P:
+                """
+Lacme/items/Rocket;->size:I
+""",
+            _MAX_TARGET_Q:
+                """
+Lacme/items/Rock;->size:I
+""",
+            _MAX_TARGET_R:
+                """
+Lacme/items/Lever;->size:I
+""",
+        }
+        for file_change in result.file_changes:
+            path = file_change.path
+            expected_contents = expected_file_contents[path].lstrip()
+            abs_path = os.path.join(self.test_dir, path)
+            with open(abs_path, "r", encoding="utf8") as tio:
+                contents = tio.read()
+                self.assertEqual(
+                    expected_contents, contents, msg=f"{path} contents")
+
+    def test_compute_hiddenapi_package_properties(self):
+        fs = {
+            "out/soong/.intermediates/bcpf-dir/bcpf/all-flags.csv":
+                """
+La/b/C;->m()V
+La/b/c/D;->m()V
+La/b/c/E;->m()V
+Lb/c/D;->m()V
+Lb/c/E;->m()V
+Lb/c/d/E;->m()V
+""",
+            "out/soong/hiddenapi/hiddenapi-flags.csv":
+                """
+La/b/C;->m()V
+La/b/D;->m()V
+La/b/E;->m()V
+La/b/c/D;->m()V
+La/b/c/E;->m()V
+La/b/c/d/E;->m()V
+La/b/c/d/e/F;->m()V
+Lb/c/D;->m()V
+Lb/c/E;->m()V
+Lb/c/d/E;->m()V
+"""
+        }
+        analyzer = self.create_analyzer_for_test(fs)
+        analyzer.load_all_flags()
+
+        result = ab.Result()
+        analyzer.compute_hiddenapi_package_properties(result)
+        self.assertEqual(["a.b"], list(result.split_packages.keys()))
+
+        reason = result.split_packages["a.b"]
+        self.assertEqual(["a.b.C"], reason.bcpf)
+        self.assertEqual(["a.b.D", "a.b.E"], reason.other)
+
+        self.assertEqual(["a.b.c"], list(result.single_packages.keys()))
+
+        reason = result.single_packages["a.b.c"]
+        self.assertEqual(["a.b.c"], reason.bcpf)
+        self.assertEqual(["a.b.c.d", "a.b.c.d.e"], reason.other)
+
+        self.assertEqual(["b"], result.package_prefixes)
+
+
+class TestHiddenApiPropertyChange(unittest.TestCase):
+
+    def setUp(self):
+        # Create a temporary directory
+        self.test_dir = tempfile.mkdtemp()
+
+    def tearDown(self):
+        # Remove the directory after the test
+        shutil.rmtree(self.test_dir)
+
+    def check_change_fix(self, change, bpmodify_output, expected):
+        file = os.path.join(self.test_dir, "Android.bp")
+
+        with open(file, "w", encoding="utf8") as tio:
+            tio.write(bpmodify_output.strip("\n"))
+
+        bpmodify_runner = ab.BpModifyRunner(
+            os.path.join(os.path.dirname(sys.argv[0]), "bpmodify"))
+        change.fix_bp_file(file, "bcpf", bpmodify_runner)
+
+        with open(file, "r", encoding="utf8") as tio:
+            contents = tio.read()
+            self.assertEqual(expected.lstrip("\n"), contents)
+
+    def check_change_snippet(self, change, expected):
+        snippet = change.snippet("        ")
+        self.assertEqual(expected, snippet)
+
+    def test_change_property_with_value_no_comment(self):
+        change = ab.HiddenApiPropertyChange(
+            property_name="split_packages",
+            values=["android.provider"],
+        )
+
+        self.check_change_snippet(
+            change, """
+        split_packages: [
+            "android.provider",
+        ],
+""")
+
+        self.check_change_fix(
+            change, """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [
+            "android.provider",
+        ],
+    },
+}
+""", """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [
+            "android.provider",
+        ],
+    },
+}
+""")
+
+    def test_change_property_with_value_and_comment(self):
+        change = ab.HiddenApiPropertyChange(
+            property_name="split_packages",
+            values=["android.provider"],
+            property_comment=_MULTI_LINE_COMMENT,
+        )
+
+        self.check_change_snippet(
+            change, """
+        // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut arcu
+        // justo, bibendum eu malesuada vel, fringilla in odio. Etiam gravida
+        // ultricies sem tincidunt luctus.
+        split_packages: [
+            "android.provider",
+        ],
+""")
+
+        self.check_change_fix(
+            change, """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [
+            "android.provider",
+        ],
+
+        single_packages: [
+            "android.system",
+        ],
+
+    },
+}
+""", """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+        // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut arcu
+        // justo, bibendum eu malesuada vel, fringilla in odio. Etiam gravida
+        // ultricies sem tincidunt luctus.
+        split_packages: [
+            "android.provider",
+        ],
+
+        single_packages: [
+            "android.system",
+        ],
+
+    },
+}
+""")
+
+    def test_set_property_with_value_and_comment(self):
+        change = ab.HiddenApiPropertyChange(
+            property_name="split_packages",
+            values=["another.provider", "other.system"],
+            property_comment=_MULTI_LINE_COMMENT,
+            action=ab.PropertyChangeAction.REPLACE,
+        )
+
+        self.check_change_snippet(
+            change, """
+        // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut arcu
+        // justo, bibendum eu malesuada vel, fringilla in odio. Etiam gravida
+        // ultricies sem tincidunt luctus.
+        split_packages: [
+            "another.provider",
+            "other.system",
+        ],
+""")
+
+        self.check_change_fix(
+            change, """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [
+            "another.provider",
+            "other.system",
+        ],
+    },
+}
+""", """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+
+        // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut arcu
+        // justo, bibendum eu malesuada vel, fringilla in odio. Etiam gravida
+        // ultricies sem tincidunt luctus.
+        split_packages: [
+            "another.provider",
+            "other.system",
+        ],
+    },
+}
+""")
+
+    def test_set_property_with_no_value_or_comment(self):
+        change = ab.HiddenApiPropertyChange(
+            property_name="split_packages",
+            values=[],
+            action=ab.PropertyChangeAction.REPLACE,
+        )
+
+        self.check_change_snippet(change, """
+        split_packages: [],
+""")
+
+        self.check_change_fix(
+            change, """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [
+            "another.provider",
+            "other.system",
+        ],
+        package_prefixes: ["android.provider"],
+    },
+}
+""", """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [],
+        package_prefixes: ["android.provider"],
+    },
+}
+""")
+
+    def test_set_empty_property_with_no_value_or_comment(self):
+        change = ab.HiddenApiPropertyChange(
+            property_name="split_packages",
+            values=[],
+            action=ab.PropertyChangeAction.REPLACE,
+        )
+
+        self.check_change_snippet(change, """
+        split_packages: [],
+""")
+
+        self.check_change_fix(
+            change, """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [],
+        package_prefixes: ["android.provider"],
+    },
+}
+""", """
+bootclasspath_fragment {
+    name: "bcpf",
+
+    // modified by the Soong or platform compat team.
+    hidden_api: {
+        max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+        split_packages: [],
+        package_prefixes: ["android.provider"],
+    },
+}
+""")
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=3)
diff --git a/scripts/hiddenapi/signature_patterns.py b/scripts/hiddenapi/signature_patterns.py
index e75ee95..5a82be7 100755
--- a/scripts/hiddenapi/signature_patterns.py
+++ b/scripts/hiddenapi/signature_patterns.py
@@ -25,92 +25,138 @@
 import sys
 
 
-def dict_reader(csvfile):
+def dict_reader(csv_file):
     return csv.DictReader(
-        csvfile, delimiter=',', quotechar='|', fieldnames=['signature'])
+        csv_file, delimiter=',', quotechar='|', fieldnames=['signature'])
 
 
-def dotPackageToSlashPackage(pkg):
+def dot_package_to_slash_package(pkg):
     return pkg.replace('.', '/')
 
 
-def slashPackageToDotPackage(pkg):
+def dot_packages_to_slash_packages(pkgs):
+    return [dot_package_to_slash_package(p) for p in pkgs]
+
+
+def slash_package_to_dot_package(pkg):
     return pkg.replace('/', '.')
 
 
-def isSplitPackage(splitPackages, pkg):
-    return splitPackages and (pkg in splitPackages or '*' in splitPackages)
+def slash_packages_to_dot_packages(pkgs):
+    return [slash_package_to_dot_package(p) for p in pkgs]
 
 
-def matchedByPackagePrefixPattern(packagePrefixes, prefix):
-    for packagePrefix in packagePrefixes:
+def is_split_package(split_packages, pkg):
+    return split_packages and (pkg in split_packages or '*' in split_packages)
+
+
+def matched_by_package_prefix_pattern(package_prefixes, prefix):
+    for packagePrefix in package_prefixes:
         if prefix == packagePrefix:
             return packagePrefix
-        elif prefix.startswith(packagePrefix) and prefix[len(
-                packagePrefix)] == '/':
+        if (prefix.startswith(packagePrefix) and
+                prefix[len(packagePrefix)] == '/'):
             return packagePrefix
     return False
 
 
-def validate_package_prefixes(splitPackages, packagePrefixes):
+def validate_package_is_not_matched_by_package_prefix(package_type, pkg,
+                                                      package_prefixes):
+    package_prefix = matched_by_package_prefix_pattern(package_prefixes, pkg)
+    if package_prefix:
+        # A package prefix matches the package.
+        package_for_output = slash_package_to_dot_package(pkg)
+        package_prefix_for_output = slash_package_to_dot_package(package_prefix)
+        return [
+            f'{package_type} {package_for_output} is matched by '
+            f'package prefix {package_prefix_for_output}'
+        ]
+    return []
+
+
+def validate_package_prefixes(split_packages, single_packages,
+                              package_prefixes):
     # If there are no package prefixes then there is no possible conflict
     # between them and the split packages.
-    if len(packagePrefixes) == 0:
-        return
+    if len(package_prefixes) == 0:
+        return []
 
     # Check to make sure that the split packages and package prefixes do not
     # overlap.
     errors = []
-    for splitPackage in splitPackages:
-        if splitPackage == '*':
+    for split_package in split_packages:
+        if split_package == '*':
             # A package prefix matches a split package.
-            packagePrefixesForOutput = ', '.join(
-                map(slashPackageToDotPackage, packagePrefixes))
+            package_prefixes_for_output = ', '.join(
+                slash_packages_to_dot_packages(package_prefixes))
             errors.append(
-                'split package "*" conflicts with all package prefixes %s\n'
-                '    add split_packages:[] to fix' % packagePrefixesForOutput)
+                "split package '*' conflicts with all package prefixes "
+                f'{package_prefixes_for_output}\n'
+                '    add split_packages:[] to fix')
         else:
-            packagePrefix = matchedByPackagePrefixPattern(
-                packagePrefixes, splitPackage)
-            if packagePrefix:
-                # A package prefix matches a split package.
-                splitPackageForOutput = slashPackageToDotPackage(splitPackage)
-                packagePrefixForOutput = slashPackageToDotPackage(packagePrefix)
-                errors.append(
-                    'split package %s is matched by package prefix %s' %
-                    (splitPackageForOutput, packagePrefixForOutput))
+            errs = validate_package_is_not_matched_by_package_prefix(
+                'split package', split_package, package_prefixes)
+            errors.extend(errs)
+
+    # Check to make sure that the single packages and package prefixes do not
+    # overlap.
+    for single_package in single_packages:
+        errs = validate_package_is_not_matched_by_package_prefix(
+            'single package', single_package, package_prefixes)
+        errors.extend(errs)
     return errors
 
 
-def validate_split_packages(splitPackages):
+def validate_split_packages(split_packages):
     errors = []
-    if '*' in splitPackages and len(splitPackages) > 1:
+    if '*' in split_packages and len(split_packages) > 1:
         errors.append('split packages are invalid as they contain both the'
                       ' wildcard (*) and specific packages, use the wildcard or'
                       ' specific packages, not a mixture')
     return errors
 
 
-def produce_patterns_from_file(file, splitPackages=None, packagePrefixes=None):
-    with open(file, 'r') as f:
-        return produce_patterns_from_stream(f, splitPackages, packagePrefixes)
+def validate_single_packages(split_packages, single_packages):
+    overlaps = []
+    for single_package in single_packages:
+        if single_package in split_packages:
+            overlaps.append(single_package)
+    if overlaps:
+        indented = ''.join([f'\n    {o}' for o in overlaps])
+        return [
+            f'single_packages and split_packages overlap, please ensure the '
+            f'following packages are only present in one:{indented}'
+        ]
+    return []
+
+
+def produce_patterns_from_file(file,
+                               split_packages=None,
+                               single_packages=None,
+                               package_prefixes=None):
+    with open(file, 'r', encoding='utf8') as f:
+        return produce_patterns_from_stream(f, split_packages, single_packages,
+                                            package_prefixes)
 
 
 def produce_patterns_from_stream(stream,
-                                 splitPackages=None,
-                                 packagePrefixes=None):
-    splitPackages = set(splitPackages or [])
-    packagePrefixes = list(packagePrefixes or [])
+                                 split_packages=None,
+                                 single_packages=None,
+                                 package_prefixes=None):
+    split_packages = set(split_packages or [])
+    single_packages = set(single_packages or [])
+    package_prefixes = list(package_prefixes or [])
     # Read in all the signatures into a list and remove any unnecessary class
     # and member names.
     patterns = set()
+    unmatched_packages = set()
     for row in dict_reader(stream):
         signature = row['signature']
         text = signature.removeprefix('L')
         # Remove the class specific member signature
         pieces = text.split(';->')
-        qualifiedClassName = pieces[0]
-        pieces = qualifiedClassName.rsplit('/', maxsplit=1)
+        qualified_class_name = pieces[0]
+        pieces = qualified_class_name.rsplit('/', maxsplit=1)
         pkg = pieces[0]
         # If the package is split across multiple modules then it cannot be used
         # to select the subset of the monolithic flags that this module
@@ -121,27 +167,54 @@
         # If the package is not split then every class in the package must be
         # provided by this module so there is no need to list the classes
         # explicitly so just use the package name instead.
-        if isSplitPackage(splitPackages, pkg):
+        if is_split_package(split_packages, pkg):
             # Remove inner class names.
-            pieces = qualifiedClassName.split('$', maxsplit=1)
+            pieces = qualified_class_name.split('$', maxsplit=1)
             pattern = pieces[0]
-        else:
+            patterns.add(pattern)
+        elif pkg in single_packages:
             # Add a * to ensure that the pattern matches the classes in that
             # package.
             pattern = pkg + '/*'
-        patterns.add(pattern)
+            patterns.add(pattern)
+        else:
+            unmatched_packages.add(pkg)
+
+    # Remove any unmatched packages that would be matched by a package prefix
+    # pattern.
+    unmatched_packages = [
+        p for p in unmatched_packages
+        if not matched_by_package_prefix_pattern(package_prefixes, p)
+    ]
+    errors = []
+    if unmatched_packages:
+        unmatched_packages.sort()
+        indented = ''.join([
+            f'\n    {slash_package_to_dot_package(p)}'
+            for p in unmatched_packages
+        ])
+        errors.append('The following packages were unexpected, please add them '
+                      'to one of the hidden_api properties, split_packages, '
+                      f'single_packages or package_prefixes:{indented}')
 
     # Remove any patterns that would be matched by a package prefix pattern.
-    patterns = list(
-        filter(lambda p: not matchedByPackagePrefixPattern(packagePrefixes, p),
-               patterns))
+    patterns = [
+        p for p in patterns
+        if not matched_by_package_prefix_pattern(package_prefixes, p)
+    ]
     # Add the package prefix patterns to the list. Add a ** to ensure that each
     # package prefix pattern will match the classes in that package and all
     # sub-packages.
-    patterns = patterns + list(map(lambda x: x + '/**', packagePrefixes))
+    patterns = patterns + [f'{p}/**' for p in package_prefixes]
     # Sort the patterns.
     patterns.sort()
-    return patterns
+    return patterns, errors
+
+
+def print_and_exit(errors):
+    for error in errors:
+        print(error)
+    sys.exit(1)
 
 
 def main(args):
@@ -155,35 +228,50 @@
     args_parser.add_argument(
         '--split-package',
         action='append',
-        help='A package that is split across multiple bootclasspath_fragment modules'
-    )
+        help='A package that is split across multiple bootclasspath_fragment '
+        'modules')
     args_parser.add_argument(
         '--package-prefix',
         action='append',
         help='A package prefix unique to this set of flags')
+    args_parser.add_argument(
+        '--single-package',
+        action='append',
+        help='A single package unique to this set of flags')
     args_parser.add_argument('--output', help='Generated signature prefixes')
     args = args_parser.parse_args(args)
 
-    splitPackages = set(map(dotPackageToSlashPackage, args.split_package or []))
-    errors = validate_split_packages(splitPackages)
+    split_packages = set(
+        dot_packages_to_slash_packages(args.split_package or []))
+    errors = validate_split_packages(split_packages)
+    if errors:
+        print_and_exit(errors)
 
-    packagePrefixes = list(
-        map(dotPackageToSlashPackage, args.package_prefix or []))
+    single_packages = list(
+        dot_packages_to_slash_packages(args.single_package or []))
 
-    if not errors:
-        errors = validate_package_prefixes(splitPackages, packagePrefixes)
+    errors = validate_single_packages(split_packages, single_packages)
+    if errors:
+        print_and_exit(errors)
+
+    package_prefixes = dot_packages_to_slash_packages(args.package_prefix or [])
+
+    errors = validate_package_prefixes(split_packages, single_packages,
+                                       package_prefixes)
+    if errors:
+        print_and_exit(errors)
+
+    patterns = []
+    # Read in all the patterns into a list.
+    patterns, errors = produce_patterns_from_file(args.flags, split_packages,
+                                                  single_packages,
+                                                  package_prefixes)
 
     if errors:
-        for error in errors:
-            print(error)
-        sys.exit(1)
-
-    # Read in all the patterns into a list.
-    patterns = produce_patterns_from_file(args.flags, splitPackages,
-                                          packagePrefixes)
+        print_and_exit(errors)
 
     # Write out all the patterns.
-    with open(args.output, 'w') as outputFile:
+    with open(args.output, 'w', encoding='utf8') as outputFile:
         for pattern in patterns:
             outputFile.write(pattern)
             outputFile.write('\n')
diff --git a/scripts/hiddenapi/signature_patterns_test.py b/scripts/hiddenapi/signature_patterns_test.py
index b59dfd7..90b3d0b 100755
--- a/scripts/hiddenapi/signature_patterns_test.py
+++ b/scripts/hiddenapi/signature_patterns_test.py
@@ -17,37 +17,52 @@
 import io
 import unittest
 
-from signature_patterns import *  #pylint: disable=unused-wildcard-import,wildcard-import
+import signature_patterns
 
 
 class TestGeneratedPatterns(unittest.TestCase):
 
-    csvFlags = """
+    csv_flags = """
 Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V,blocked
 Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;,public-api
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
 """
 
-    def produce_patterns_from_string(self,
-                                     csv,
-                                     splitPackages=None,
-                                     packagePrefixes=None):
-        with io.StringIO(csv) as f:
-            return produce_patterns_from_stream(f, splitPackages,
-                                                packagePrefixes)
+    @staticmethod
+    def produce_patterns_from_string(csv_text,
+                                     split_packages=None,
+                                     single_packages=None,
+                                     package_prefixes=None):
+        with io.StringIO(csv_text) as f:
+            return signature_patterns.produce_patterns_from_stream(
+                f, split_packages, single_packages, package_prefixes)
+
+    def test_generate_unmatched(self):
+        _, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags)
+        self.assertEqual([
+            'The following packages were unexpected, please add them to one of '
+            'the hidden_api properties, split_packages, single_packages or '
+            'package_prefixes:\n'
+            '    java.lang'
+        ], errors)
 
     def test_generate_default(self):
-        patterns = self.produce_patterns_from_string(
-            TestGeneratedPatterns.csvFlags)
+        patterns, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags, single_packages=['java/lang'])
+        self.assertEqual([], errors)
+
         expected = [
             'java/lang/*',
         ]
         self.assertEqual(expected, patterns)
 
     def test_generate_split_package(self):
-        patterns = self.produce_patterns_from_string(
-            TestGeneratedPatterns.csvFlags, splitPackages={'java/lang'})
+        patterns, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags, split_packages={'java/lang'})
+        self.assertEqual([], errors)
+
         expected = [
             'java/lang/Character',
             'java/lang/Object',
@@ -56,8 +71,10 @@
         self.assertEqual(expected, patterns)
 
     def test_generate_split_package_wildcard(self):
-        patterns = self.produce_patterns_from_string(
-            TestGeneratedPatterns.csvFlags, splitPackages={'*'})
+        patterns, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags, split_packages={'*'})
+        self.assertEqual([], errors)
+
         expected = [
             'java/lang/Character',
             'java/lang/Object',
@@ -66,23 +83,27 @@
         self.assertEqual(expected, patterns)
 
     def test_generate_package_prefix(self):
-        patterns = self.produce_patterns_from_string(
-            TestGeneratedPatterns.csvFlags, packagePrefixes={'java/lang'})
+        patterns, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags, package_prefixes={'java/lang'})
+        self.assertEqual([], errors)
+
         expected = [
             'java/lang/**',
         ]
         self.assertEqual(expected, patterns)
 
     def test_generate_package_prefix_top_package(self):
-        patterns = self.produce_patterns_from_string(
-            TestGeneratedPatterns.csvFlags, packagePrefixes={'java'})
+        patterns, errors = self.produce_patterns_from_string(
+            TestGeneratedPatterns.csv_flags, package_prefixes={'java'})
+        self.assertEqual([], errors)
+
         expected = [
             'java/**',
         ]
         self.assertEqual(expected, patterns)
 
     def test_split_package_wildcard_conflicts_with_other_split_packages(self):
-        errors = validate_split_packages({'*', 'java'})
+        errors = signature_patterns.validate_split_packages({'*', 'java'})
         expected = [
             'split packages are invalid as they contain both the wildcard (*)'
             ' and specific packages, use the wildcard or specific packages,'
@@ -91,21 +112,39 @@
         self.assertEqual(expected, errors)
 
     def test_split_package_wildcard_conflicts_with_package_prefixes(self):
-        errors = validate_package_prefixes({'*'}, packagePrefixes={'java'})
+        errors = signature_patterns.validate_package_prefixes(
+            {'*'}, [], package_prefixes={'java'})
         expected = [
-            'split package "*" conflicts with all package prefixes java\n'
+            "split package '*' conflicts with all package prefixes java\n"
             '    add split_packages:[] to fix',
         ]
         self.assertEqual(expected, errors)
 
-    def test_split_package_conflict(self):
-        errors = validate_package_prefixes({'java/split'},
-                                           packagePrefixes={'java'})
+    def test_split_package_conflicts_with_package_prefixes(self):
+        errors = signature_patterns.validate_package_prefixes(
+            {'java/split'}, [], package_prefixes={'java'})
         expected = [
             'split package java.split is matched by package prefix java',
         ]
         self.assertEqual(expected, errors)
 
+    def test_single_package_conflicts_with_package_prefixes(self):
+        errors = signature_patterns.validate_package_prefixes(
+            {}, ['java/single'], package_prefixes={'java'})
+        expected = [
+            'single package java.single is matched by package prefix java',
+        ]
+        self.assertEqual(expected, errors)
+
+    def test_single_package_conflicts_with_split_packages(self):
+        errors = signature_patterns.validate_single_packages({'java/pkg'},
+                                                             ['java/pkg'])
+        expected = [
+            'single_packages and split_packages overlap, please ensure the '
+            'following packages are only present in one:\n    java/pkg'
+        ]
+        self.assertEqual(expected, errors)
+
 
 if __name__ == '__main__':
     unittest.main(verbosity=2)
diff --git a/scripts/hiddenapi/signature_trie.py b/scripts/hiddenapi/signature_trie.py
new file mode 100644
index 0000000..3650fa1
--- /dev/null
+++ b/scripts/hiddenapi/signature_trie.py
@@ -0,0 +1,345 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Verify that one set of hidden API flags is a subset of another."""
+import dataclasses
+import typing
+
+from itertools import chain
+
+
+@dataclasses.dataclass()
+class Node:
+    """A node in the signature trie."""
+
+    # The type of the node.
+    #
+    # Leaf nodes are of type "member".
+    # Interior nodes can be either "package", or "class".
+    type: str
+
+    # The selector of the node.
+    #
+    # That is a string that can be used to select the node, e.g. in a pattern
+    # that is passed to InteriorNode.get_matching_rows().
+    selector: str
+
+    def values(self, selector):
+        """Get the values from a set of selected nodes.
+
+        :param selector: a function that can be applied to a key in the nodes
+            attribute to determine whether to return its values.
+
+        :return: A list of iterables of all the values associated with
+            this node and its children.
+        """
+        values = []
+        self.append_values(values, selector)
+        return values
+
+    def append_values(self, values, selector):
+        """Append the values associated with this node and its children.
+
+        For each item (key, child) in nodes the child node's values are returned
+        if and only if the selector returns True when called on its key. A child
+        node's values are all the values associated with it and all its
+        descendant nodes.
+
+        :param selector: a function that can be applied to a key in the nodes
+        attribute to determine whether to return its values.
+        :param values: a list of a iterables of values.
+        """
+        raise NotImplementedError("Please Implement this method")
+
+    def child_nodes(self):
+        """Get an iterable of the child nodes of this node."""
+        raise NotImplementedError("Please Implement this method")
+
+
+# pylint: disable=line-too-long
+@dataclasses.dataclass()
+class InteriorNode(Node):
+    """An interior node in a trie.
+
+    Each interior node has a dict that maps from an element of a signature to
+    either another interior node or a leaf. Each interior node represents either
+    a package, class or nested class. Class members are represented by a Leaf.
+
+    Associating the set of flags [public-api] with the signature
+    "Ljava/lang/Object;->String()Ljava/lang/String;" will cause the following
+    nodes to be created:
+    Node()
+    ^- package:java -> Node()
+       ^- package:lang -> Node()
+           ^- class:Object -> Node()
+              ^- member:String()Ljava/lang/String; -> Leaf([public-api])
+
+    Associating the set of flags [blocked,core-platform-api] with the signature
+    "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;"
+    will cause the following nodes to be created:
+    Node()
+    ^- package:java -> Node()
+       ^- package:lang -> Node()
+           ^- class:Character -> Node()
+              ^- class:UnicodeScript -> Node()
+                 ^- member:of(I)Ljava/lang/Character$UnicodeScript;
+                    -> Leaf([blocked,core-platform-api])
+    """
+
+    # pylint: enable=line-too-long
+
+    # A dict from an element of the signature to the Node/Leaf containing the
+    # next element/value.
+    nodes: typing.Dict[str, Node] = dataclasses.field(default_factory=dict)
+
+    # pylint: disable=line-too-long
+    @staticmethod
+    def signature_to_elements(signature):
+        """Split a signature or a prefix into a number of elements:
+
+        1. The packages (excluding the leading L preceding the first package).
+        2. The class names, from outermost to innermost.
+        3. The member signature.
+        e.g.
+        Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
+        will be broken down into these elements:
+        1. package:java
+        2. package:lang
+        3. class:Character
+        4. class:UnicodeScript
+        5. member:of(I)Ljava/lang/Character$UnicodeScript;
+        """
+        # Remove the leading L.
+        #  - java/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
+        text = signature.removeprefix("L")
+        # Split the signature between qualified class name and the class member
+        # signature.
+        #  0 - java/lang/Character$UnicodeScript
+        #  1 - of(I)Ljava/lang/Character$UnicodeScript;
+        parts = text.split(";->")
+        # If there is no member then this will be an empty list.
+        member = parts[1:]
+        # Split the qualified class name into packages, and class name.
+        #  0 - java
+        #  1 - lang
+        #  2 - Character$UnicodeScript
+        elements = parts[0].split("/")
+        last_element = elements[-1]
+        wildcard = []
+        classes = []
+        if "*" in last_element:
+            if last_element not in ("*", "**"):
+                raise Exception(f"Invalid signature '{signature}': invalid "
+                                f"wildcard '{last_element}'")
+            packages = elements[0:-1]
+            # Cannot specify a wildcard and target a specific member
+            if member:
+                raise Exception(f"Invalid signature '{signature}': contains "
+                                f"wildcard '{last_element}' and "
+                                f"member signature '{member[0]}'")
+            wildcard = [last_element]
+        elif last_element.islower():
+            raise Exception(f"Invalid signature '{signature}': last element "
+                            f"'{last_element}' is lower case but should be an "
+                            f"upper case class name or wildcard")
+        else:
+            packages = elements[0:-1]
+            # Split the class name into outer / inner classes
+            #  0 - Character
+            #  1 - UnicodeScript
+            classes = last_element.removesuffix(";").split("$")
+
+        # Assemble the parts into a single list, adding prefixes to identify
+        # the different parts. If a wildcard is provided then it looks something
+        # like this:
+        #  0 - package:java
+        #  1 - package:lang
+        #  2 - *
+        #
+        # Otherwise, it looks something like this:
+        #  0 - package:java
+        #  1 - package:lang
+        #  2 - class:Character
+        #  3 - class:UnicodeScript
+        #  4 - member:of(I)Ljava/lang/Character$UnicodeScript;
+        return list(
+            chain([("package", x) for x in packages],
+                  [("class", x) for x in classes],
+                  [("member", x) for x in member],
+                  [("wildcard", x) for x in wildcard]))
+
+    # pylint: enable=line-too-long
+
+    @staticmethod
+    def split_element(element):
+        element_type, element_value = element
+        return element_type, element_value
+
+    @staticmethod
+    def element_type(element):
+        element_type, _ = InteriorNode.split_element(element)
+        return element_type
+
+    @staticmethod
+    def elements_to_selector(elements):
+        """Compute a selector for a set of elements.
+
+        A selector uniquely identifies a specific Node in the trie. It is
+        essentially a prefix of a signature (without the leading L).
+
+        e.g. a trie containing "Ljava/lang/Object;->String()Ljava/lang/String;"
+        would contain nodes with the following selectors:
+        * "java"
+        * "java/lang"
+        * "java/lang/Object"
+        * "java/lang/Object;->String()Ljava/lang/String;"
+        """
+        signature = ""
+        preceding_type = ""
+        for element in elements:
+            element_type, element_value = InteriorNode.split_element(element)
+            separator = ""
+            if element_type == "package":
+                separator = "/"
+            elif element_type == "class":
+                if preceding_type == "class":
+                    separator = "$"
+                else:
+                    separator = "/"
+            elif element_type == "wildcard":
+                separator = "/"
+            elif element_type == "member":
+                separator += ";->"
+
+            if signature:
+                signature += separator
+
+            signature += element_value
+
+            preceding_type = element_type
+
+        return signature
+
+    def add(self, signature, value, only_if_matches=False):
+        """Associate the value with the specific signature.
+
+        :param signature: the member signature
+        :param value: the value to associated with the signature
+        :param only_if_matches: True if the value is added only if the signature
+             matches at least one of the existing top level packages.
+        :return: n/a
+        """
+        # Split the signature into elements.
+        elements = self.signature_to_elements(signature)
+        # Find the Node associated with the deepest class.
+        node = self
+        for index, element in enumerate(elements[:-1]):
+            if element in node.nodes:
+                node = node.nodes[element]
+            elif only_if_matches and index == 0:
+                return
+            else:
+                selector = self.elements_to_selector(elements[0:index + 1])
+                next_node = InteriorNode(
+                    type=InteriorNode.element_type(element), selector=selector)
+                node.nodes[element] = next_node
+                node = next_node
+        # Add a Leaf containing the value and associate it with the member
+        # signature within the class.
+        last_element = elements[-1]
+        last_element_type = self.element_type(last_element)
+        if last_element_type != "member":
+            raise Exception(
+                f"Invalid signature: {signature}, does not identify a "
+                "specific member")
+        if last_element in node.nodes:
+            raise Exception(f"Duplicate signature: {signature}")
+        leaf = Leaf(
+            type=last_element_type,
+            selector=signature,
+            value=value,
+        )
+        node.nodes[last_element] = leaf
+
+    def get_matching_rows(self, pattern):
+        """Get the values (plural) associated with the pattern.
+
+        e.g. If the pattern is a full signature then this will return a list
+        containing the value associated with that signature.
+
+        If the pattern is a class then this will return a list containing the
+        values associated with all members of that class.
+
+        If the pattern ends with "*" then the preceding part is treated as a
+        package and this will return a list containing the values associated
+        with all the members of all the classes in that package.
+
+        If the pattern ends with "**" then the preceding part is treated
+        as a package and this will return a list containing the values
+        associated with all the members of all the classes in that package and
+        all sub-packages.
+
+        :param pattern: the pattern which could be a complete signature or a
+        class, or package wildcard.
+        :return: an iterable containing all the values associated with the
+        pattern.
+        """
+        elements = self.signature_to_elements(pattern)
+        node = self
+
+        # Include all values from this node and all its children.
+        selector = lambda x: True
+
+        last_element = elements[-1]
+        last_element_type, last_element_value = self.split_element(last_element)
+        if last_element_type == "wildcard":
+            elements = elements[:-1]
+            if last_element_value == "*":
+                # Do not include values from sub-packages.
+                selector = lambda x: InteriorNode.element_type(x) != "package"
+
+        for element in elements:
+            if element in node.nodes:
+                node = node.nodes[element]
+            else:
+                return []
+
+        return node.values(selector)
+
+    def append_values(self, values, selector):
+        for key, node in self.nodes.items():
+            if selector(key):
+                node.append_values(values, lambda x: True)
+
+    def child_nodes(self):
+        return self.nodes.values()
+
+
+@dataclasses.dataclass()
+class Leaf(Node):
+    """A leaf of the trie"""
+
+    # The value associated with this leaf.
+    value: typing.Any
+
+    def append_values(self, values, selector):
+        values.append(self.value)
+
+    def child_nodes(self):
+        return []
+
+
+def signature_trie():
+    return InteriorNode(type="root", selector="")
diff --git a/scripts/hiddenapi/signature_trie_test.py b/scripts/hiddenapi/signature_trie_test.py
new file mode 100755
index 0000000..6d4e660
--- /dev/null
+++ b/scripts/hiddenapi/signature_trie_test.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Unit tests for verify_overlaps_test.py."""
+import io
+import unittest
+
+from signature_trie import InteriorNode
+from signature_trie import signature_trie
+
+
+class TestSignatureToElements(unittest.TestCase):
+
+    @staticmethod
+    def signature_to_elements(signature):
+        return InteriorNode.signature_to_elements(signature)
+
+    @staticmethod
+    def elements_to_signature(elements):
+        return InteriorNode.elements_to_selector(elements)
+
+    def test_nested_inner_classes(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("class", "ProcessBuilder"),
+            ("class", "Redirect"),
+            ("class", "1"),
+            ("member", "<init>()V"),
+        ]
+        signature = "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, "L" + self.elements_to_signature(elements))
+
+    def test_basic_member(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("class", "Object"),
+            ("member", "hashCode()I"),
+        ]
+        signature = "Ljava/lang/Object;->hashCode()I"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, "L" + self.elements_to_signature(elements))
+
+    def test_double_dollar_class(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("class", "CharSequence"),
+            ("class", ""),
+            ("class", "ExternalSyntheticLambda0"),
+            ("member", "<init>(Ljava/lang/CharSequence;)V"),
+        ]
+        signature = "Ljava/lang/CharSequence$$ExternalSyntheticLambda0;" \
+                    "-><init>(Ljava/lang/CharSequence;)V"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, "L" + self.elements_to_signature(elements))
+
+    def test_no_member(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("class", "CharSequence"),
+            ("class", ""),
+            ("class", "ExternalSyntheticLambda0"),
+        ]
+        signature = "Ljava/lang/CharSequence$$ExternalSyntheticLambda0"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, "L" + self.elements_to_signature(elements))
+
+    def test_wildcard(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("wildcard", "*"),
+        ]
+        signature = "java/lang/*"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, self.elements_to_signature(elements))
+
+    def test_recursive_wildcard(self):
+        elements = [
+            ("package", "java"),
+            ("package", "lang"),
+            ("wildcard", "**"),
+        ]
+        signature = "java/lang/**"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, self.elements_to_signature(elements))
+
+    def test_no_packages_wildcard(self):
+        elements = [
+            ("wildcard", "*"),
+        ]
+        signature = "*"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, self.elements_to_signature(elements))
+
+    def test_no_packages_recursive_wildcard(self):
+        elements = [
+            ("wildcard", "**"),
+        ]
+        signature = "**"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, self.elements_to_signature(elements))
+
+    def test_invalid_no_class_or_wildcard(self):
+        signature = "java/lang"
+        with self.assertRaises(Exception) as context:
+            self.signature_to_elements(signature)
+        self.assertIn(
+            "last element 'lang' is lower case but should be an "
+            "upper case class name or wildcard", str(context.exception))
+
+    def test_non_standard_class_name(self):
+        elements = [
+            ("package", "javax"),
+            ("package", "crypto"),
+            ("class", "extObjectInputStream"),
+        ]
+        signature = "Ljavax/crypto/extObjectInputStream"
+        self.assertEqual(elements, self.signature_to_elements(signature))
+        self.assertEqual(signature, "L" + self.elements_to_signature(elements))
+
+    def test_invalid_pattern_wildcard(self):
+        pattern = "Ljava/lang/Class*"
+        with self.assertRaises(Exception) as context:
+            self.signature_to_elements(pattern)
+        self.assertIn("invalid wildcard 'Class*'", str(context.exception))
+
+    def test_invalid_pattern_wildcard_and_member(self):
+        pattern = "Ljava/lang/*;->hashCode()I"
+        with self.assertRaises(Exception) as context:
+            self.signature_to_elements(pattern)
+        self.assertIn(
+            "contains wildcard '*' and member signature 'hashCode()I'",
+            str(context.exception))
+
+
+class TestValues(unittest.TestCase):
+    def test_add_then_get(self):
+        trie = signature_trie()
+        trie.add("La/b/C;->l()", 1)
+        trie.add("La/b/C$D;->m()", "A")
+        trie.add("La/b/C$D;->n()", {})
+
+        package_a_node = next(iter(trie.child_nodes()))
+        self.assertEqual("package", package_a_node.type)
+        self.assertEqual("a", package_a_node.selector)
+
+        package_b_node = next(iter(package_a_node.child_nodes()))
+        self.assertEqual("package", package_b_node.type)
+        self.assertEqual("a/b", package_b_node.selector)
+
+        class_c_node = next(iter(package_b_node.child_nodes()))
+        self.assertEqual("class", class_c_node.type)
+        self.assertEqual("a/b/C", class_c_node.selector)
+
+        self.assertEqual([1, "A", {}], class_c_node.values(lambda _: True))
+
+class TestGetMatchingRows(unittest.TestCase):
+    extractInput = """
+Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
+Ljava/lang/Character;->serialVersionUID:J
+Ljava/lang/Object;->hashCode()I
+Ljava/lang/Object;->toString()Ljava/lang/String;
+Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V
+Ljava/util/zip/ZipFile;-><clinit>()V
+"""
+
+    def read_trie(self):
+        trie = signature_trie()
+        with io.StringIO(self.extractInput.strip()) as f:
+            for line in iter(f.readline, ""):
+                line = line.rstrip()
+                trie.add(line, line)
+        return trie
+
+    def check_patterns(self, pattern, expected):
+        trie = self.read_trie()
+        self.check_node_patterns(trie, pattern, expected)
+
+    def check_node_patterns(self, node, pattern, expected):
+        actual = list(node.get_matching_rows(pattern))
+        actual.sort()
+        self.assertEqual(expected, actual)
+
+    def test_member_pattern(self):
+        self.check_patterns("java/util/zip/ZipFile;-><clinit>()V",
+                            ["Ljava/util/zip/ZipFile;-><clinit>()V"])
+
+    def test_class_pattern(self):
+        self.check_patterns("java/lang/Object", [
+            "Ljava/lang/Object;->hashCode()I",
+            "Ljava/lang/Object;->toString()Ljava/lang/String;",
+        ])
+
+    # pylint: disable=line-too-long
+    def test_nested_class_pattern(self):
+        self.check_patterns("java/lang/Character", [
+            "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;",
+            "Ljava/lang/Character;->serialVersionUID:J",
+        ])
+
+    def test_wildcard(self):
+        self.check_patterns("java/lang/*", [
+            "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;",
+            "Ljava/lang/Character;->serialVersionUID:J",
+            "Ljava/lang/Object;->hashCode()I",
+            "Ljava/lang/Object;->toString()Ljava/lang/String;",
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
+        ])
+
+    def test_recursive_wildcard(self):
+        self.check_patterns("java/**", [
+            "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;",
+            "Ljava/lang/Character;->serialVersionUID:J",
+            "Ljava/lang/Object;->hashCode()I",
+            "Ljava/lang/Object;->toString()Ljava/lang/String;",
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
+            "Ljava/util/zip/ZipFile;-><clinit>()V",
+        ])
+
+    def test_node_wildcard(self):
+        trie = self.read_trie()
+        node = list(trie.child_nodes())[0]
+        self.check_node_patterns(node, "**", [
+            "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;",
+            "Ljava/lang/Character;->serialVersionUID:J",
+            "Ljava/lang/Object;->hashCode()I",
+            "Ljava/lang/Object;->toString()Ljava/lang/String;",
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
+            "Ljava/util/zip/ZipFile;-><clinit>()V",
+        ])
+
+    # pylint: enable=line-too-long
+
+
+if __name__ == "__main__":
+    unittest.main(verbosity=2)
diff --git a/scripts/hiddenapi/verify_overlaps.py b/scripts/hiddenapi/verify_overlaps.py
index 4cd7e63..f985a49 100755
--- a/scripts/hiddenapi/verify_overlaps.py
+++ b/scripts/hiddenapi/verify_overlaps.py
@@ -13,253 +13,28 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-"""Verify that one set of hidden API flags is a subset of another.
-"""
+"""Verify that one set of hidden API flags is a subset of another."""
 
 import argparse
 import csv
 import sys
 from itertools import chain
 
-#pylint: disable=line-too-long
-class InteriorNode:
-    """An interior node in a trie.
-
-    Each interior node has a dict that maps from an element of a signature to
-    either another interior node or a leaf. Each interior node represents either
-    a package, class or nested class. Class members are represented by a Leaf.
-
-    Associating the set of flags [public-api] with the signature
-    "Ljava/lang/Object;->String()Ljava/lang/String;" will cause the following
-    nodes to be created:
-    Node()
-    ^- package:java -> Node()
-       ^- package:lang -> Node()
-           ^- class:Object -> Node()
-              ^- member:String()Ljava/lang/String; -> Leaf([public-api])
-
-    Associating the set of flags [blocked,core-platform-api] with the signature
-    "Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;"
-    will cause the following nodes to be created:
-    Node()
-    ^- package:java -> Node()
-       ^- package:lang -> Node()
-           ^- class:Character -> Node()
-              ^- class:UnicodeScript -> Node()
-                 ^- member:of(I)Ljava/lang/Character$UnicodeScript;
-                    -> Leaf([blocked,core-platform-api])
-
-    Attributes:
-        nodes: a dict from an element of the signature to the Node/Leaf
-          containing the next element/value.
-    """
-    #pylint: enable=line-too-long
-
-    def __init__(self):
-        self.nodes = {}
-
-    #pylint: disable=line-too-long
-    def signatureToElements(self, signature):
-        """Split a signature or a prefix into a number of elements:
-        1. The packages (excluding the leading L preceding the first package).
-        2. The class names, from outermost to innermost.
-        3. The member signature.
-        e.g.
-        Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
-        will be broken down into these elements:
-        1. package:java
-        2. package:lang
-        3. class:Character
-        4. class:UnicodeScript
-        5. member:of(I)Ljava/lang/Character$UnicodeScript;
-        """
-        # Remove the leading L.
-        #  - java/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;
-        text = signature.removeprefix("L")
-        # Split the signature between qualified class name and the class member
-        # signature.
-        #  0 - java/lang/Character$UnicodeScript
-        #  1 - of(I)Ljava/lang/Character$UnicodeScript;
-        parts = text.split(";->")
-        member = parts[1:]
-        # Split the qualified class name into packages, and class name.
-        #  0 - java
-        #  1 - lang
-        #  2 - Character$UnicodeScript
-        elements = parts[0].split("/")
-        packages = elements[0:-1]
-        className = elements[-1]
-        if className in ("*" , "**"): #pylint: disable=no-else-return
-            # Cannot specify a wildcard and target a specific member
-            if len(member) != 0:
-                raise Exception(
-                    "Invalid signature %s: contains wildcard %s and member " \
-                    "signature %s"
-                    % (signature, className, member[0]))
-            wildcard = [className]
-            # Assemble the parts into a single list, adding prefixes to identify
-            # the different parts.
-            #  0 - package:java
-            #  1 - package:lang
-            #  2 - *
-            return list(
-                chain(["package:" + x for x in packages], wildcard))
-        else:
-            # Split the class name into outer / inner classes
-            #  0 - Character
-            #  1 - UnicodeScript
-            classes = className.split("$")
-            # Assemble the parts into a single list, adding prefixes to identify
-            # the different parts.
-            #  0 - package:java
-            #  1 - package:lang
-            #  2 - class:Character
-            #  3 - class:UnicodeScript
-            #  4 - member:of(I)Ljava/lang/Character$UnicodeScript;
-            return list(
-                chain(
-                    ["package:" + x for x in packages],
-                    ["class:" + x for x in classes],
-                    ["member:" + x for x in member]))
-    #pylint: enable=line-too-long
-
-    def add(self, signature, value):
-        """Associate the value with the specific signature.
-
-        :param signature: the member signature
-        :param value: the value to associated with the signature
-        :return: n/a
-        """
-        # Split the signature into elements.
-        elements = self.signatureToElements(signature)
-        # Find the Node associated with the deepest class.
-        node = self
-        for element in elements[:-1]:
-            if element in node.nodes:
-                node = node.nodes[element]
-            else:
-                next_node = InteriorNode()
-                node.nodes[element] = next_node
-                node = next_node
-        # Add a Leaf containing the value and associate it with the member
-        # signature within the class.
-        lastElement = elements[-1]
-        if not lastElement.startswith("member:"):
-            raise Exception(
-                "Invalid signature: %s, does not identify a specific member" %
-                signature)
-        if lastElement in node.nodes:
-            raise Exception("Duplicate signature: %s" % signature)
-        node.nodes[lastElement] = Leaf(value)
-
-    def getMatchingRows(self, pattern):
-        """Get the values (plural) associated with the pattern.
-
-        e.g. If the pattern is a full signature then this will return a list
-        containing the value associated with that signature.
-
-        If the pattern is a class then this will return a list containing the
-        values associated with all members of that class.
-
-        If the pattern is a package then this will return a list containing the
-        values associated with all the members of all the classes in that
-        package and sub-packages.
-
-        If the pattern ends with "*" then the preceding part is treated as a
-        package and this will return a list containing the values associated
-        with all the members of all the classes in that package.
-
-        If the pattern ends with "**" then the preceding part is treated
-        as a package and this will return a list containing the values
-        associated with all the members of all the classes in that package and
-        all sub-packages.
-
-        :param pattern: the pattern which could be a complete signature or a
-        class, or package wildcard.
-        :return: an iterable containing all the values associated with the
-        pattern.
-        """
-        elements = self.signatureToElements(pattern)
-        node = self
-        # Include all values from this node and all its children.
-        selector = lambda x: True
-        lastElement = elements[-1]
-        if lastElement in ("*", "**"):
-            elements = elements[:-1]
-            if lastElement == "*":
-                # Do not include values from sub-packages.
-                selector = lambda x: not x.startswith("package:")
-        for element in elements:
-            if element in node.nodes:
-                node = node.nodes[element]
-            else:
-                return []
-        return chain.from_iterable(node.values(selector))
-
-    def values(self, selector):
-        """:param selector: a function that can be applied to a key in the nodes
-        attribute to determine whether to return its values.
-
-        :return: A list of iterables of all the values associated with
-        this node and its children.
-        """
-        values = []
-        self.appendValues(values, selector)
-        return values
-
-    def appendValues(self, values, selector):
-        """Append the values associated with this node and its children to the
-        list.
-
-        For each item (key, child) in nodes the child node's values are returned
-        if and only if the selector returns True when called on its key. A child
-        node's values are all the values associated with it and all its
-        descendant nodes.
-
-        :param selector: a function that can be applied to a key in the nodes
-        attribute to determine whether to return its values.
-        :param values: a list of a iterables of values.
-        """
-        for key, node in self.nodes.items():
-            if selector(key):
-                node.appendValues(values, lambda x: True)
+from signature_trie import signature_trie
 
 
-class Leaf:
-    """A leaf of the trie
-
-    Attributes:
-        value: the value associated with this leaf.
-    """
-
-    def __init__(self, value):
-        self.value = value
-
-    def values(self, selector): #pylint: disable=unused-argument
-        """:return: A list of a list of the value associated with this node.
-        """
-        return [[self.value]]
-
-    def appendValues(self, values, selector): #pylint: disable=unused-argument
-        """Appends a list of the value associated with this node to the list.
-
-        :param values: a list of a iterables of values.
-        """
-        values.append([self.value])
-
-
-def dict_reader(csvfile):
+def dict_reader(csv_file):
     return csv.DictReader(
-        csvfile, delimiter=",", quotechar="|", fieldnames=["signature"])
+        csv_file, delimiter=",", quotechar="|", fieldnames=["signature"])
 
 
 def read_flag_trie_from_file(file):
-    with open(file, "r") as stream:
+    with open(file, "r", encoding="utf8") as stream:
         return read_flag_trie_from_stream(stream)
 
 
 def read_flag_trie_from_stream(stream):
-    trie = InteriorNode()
+    trie = signature_trie()
     reader = dict_reader(stream)
     for row in reader:
         signature = row["signature"]
@@ -268,26 +43,24 @@
 
 
 def extract_subset_from_monolithic_flags_as_dict_from_file(
-        monolithicTrie, patternsFile):
-    """Extract a subset of flags from the dict containing all the monolithic
-    flags.
+        monolithic_trie, patterns_file):
+    """Extract a subset of flags from the dict of monolithic flags.
 
-    :param monolithicFlagsDict: the dict containing all the monolithic flags.
-    :param patternsFile: a file containing a list of signature patterns that
+    :param monolithic_trie: the trie containing all the monolithic flags.
+    :param patterns_file: a file containing a list of signature patterns that
     define the subset.
     :return: the dict from signature to row.
     """
-    with open(patternsFile, "r") as stream:
+    with open(patterns_file, "r", encoding="utf8") as stream:
         return extract_subset_from_monolithic_flags_as_dict_from_stream(
-            monolithicTrie, stream)
+            monolithic_trie, stream)
 
 
 def extract_subset_from_monolithic_flags_as_dict_from_stream(
-        monolithicTrie, stream):
-    """Extract a subset of flags from the trie containing all the monolithic
-    flags.
+        monolithic_trie, stream):
+    """Extract a subset of flags from the trie of monolithic flags.
 
-    :param monolithicTrie: the trie containing all the monolithic flags.
+    :param monolithic_trie: the trie containing all the monolithic flags.
     :param stream: a stream containing a list of signature patterns that define
     the subset.
     :return: the dict from signature to row.
@@ -295,7 +68,7 @@
     dict_signature_to_row = {}
     for pattern in stream:
         pattern = pattern.rstrip()
-        rows = monolithicTrie.getMatchingRows(pattern)
+        rows = monolithic_trie.get_matching_rows(pattern)
         for row in rows:
             signature = row["signature"]
             dict_signature_to_row[signature] = row
@@ -303,8 +76,10 @@
 
 
 def read_signature_csv_from_stream_as_dict(stream):
-    """Read the csv contents from the stream into a dict. The first column is
-    assumed to be the signature and used as the key.
+    """Read the csv contents from the stream into a dict.
+
+    The first column is assumed to be the signature and used as the
+    key.
 
     The whole row is stored as the value.
     :param stream: the csv contents to read
@@ -318,85 +93,110 @@
     return dict_signature_to_row
 
 
-def read_signature_csv_from_file_as_dict(csvFile):
-    """Read the csvFile into a dict. The first column is assumed to be the
-    signature and used as the key.
+def read_signature_csv_from_file_as_dict(csv_file):
+    """Read the csvFile into a dict.
+
+    The first column is assumed to be the signature and used as the
+    key.
 
     The whole row is stored as the value.
-    :param csvFile: the csv file to read
+    :param csv_file: the csv file to read
     :return: the dict from signature to row.
     """
-    with open(csvFile, "r") as f:
+    with open(csv_file, "r", encoding="utf8") as f:
         return read_signature_csv_from_stream_as_dict(f)
 
 
-def compare_signature_flags(monolithicFlagsDict, modularFlagsDict):
+def compare_signature_flags(monolithic_flags_dict, modular_flags_dict,
+                            implementation_flags):
     """Compare the signature flags between the two dicts.
 
-    :param monolithicFlagsDict: the dict containing the subset of the monolithic
-    flags that should be equal to the modular flags.
-    :param modularFlagsDict:the dict containing the flags produced by a single
+    :param monolithic_flags_dict: the dict containing the subset of the
+    monolithic flags that should be equal to the modular flags.
+    :param modular_flags_dict:the dict containing the flags produced by a single
     bootclasspath_fragment module.
     :return: list of mismatches., each mismatch is a tuple where the first item
     is the signature, and the second and third items are lists of the flags from
     modular dict, and monolithic dict respectively.
     """
-    mismatchingSignatures = []
+    mismatching_signatures = []
     # Create a sorted set of all the signatures from both the monolithic and
     # modular dicts.
-    allSignatures = sorted(
-        set(chain(monolithicFlagsDict.keys(), modularFlagsDict.keys())))
-    for signature in allSignatures:
-        monolithicRow = monolithicFlagsDict.get(signature, {})
-        monolithicFlags = monolithicRow.get(None, [])
-        if signature in modularFlagsDict:
-            modularRow = modularFlagsDict.get(signature, {})
-            modularFlags = modularRow.get(None, [])
+    all_signatures = sorted(
+        set(chain(monolithic_flags_dict.keys(), modular_flags_dict.keys())))
+    for signature in all_signatures:
+        monolithic_row = monolithic_flags_dict.get(signature, {})
+        monolithic_flags = monolithic_row.get(None, [])
+        if signature in modular_flags_dict:
+            modular_row = modular_flags_dict.get(signature, {})
+            modular_flags = modular_row.get(None, [])
         else:
-            modularFlags = ["blocked"]
-        if monolithicFlags != modularFlags:
-            mismatchingSignatures.append(
-                (signature, modularFlags, monolithicFlags))
-    return mismatchingSignatures
+            modular_flags = implementation_flags
+        if monolithic_flags != modular_flags:
+            mismatching_signatures.append(
+                (signature, modular_flags, monolithic_flags))
+    return mismatching_signatures
 
 
 def main(argv):
     args_parser = argparse.ArgumentParser(
         description="Verify that sets of hidden API flags are each a subset of "
-        "the monolithic flag file."
-    )
-    args_parser.add_argument("monolithicFlags", help="The monolithic flag file")
+        "the monolithic flag file. For each module this uses the provided "
+        "signature patterns to select a subset of the monolithic flags and "
+        "then it compares that subset against the filtered flags provided by "
+        "the module. If the module's filtered flags does not contain flags for "
+        "a signature then it is assumed to have been filtered out because it "
+        "was not part of an API and so is assumed to have the implementation "
+        "flags.")
     args_parser.add_argument(
-        "modularFlags",
-        nargs=argparse.REMAINDER,
-        help="Flags produced by individual bootclasspath_fragment modules")
+        "--monolithic-flags", help="The monolithic flag file")
+    args_parser.add_argument(
+        "--module-flags",
+        action="append",
+        help="A colon separated pair of paths. The first is a path to a "
+        "filtered set of flags, and the second is a path to a set of "
+        "signature patterns that identify the set of classes belonging to "
+        "a single bootclasspath_fragment module. Specify once for each module "
+        "that needs to be checked.")
+    args_parser.add_argument(
+        "--implementation-flag",
+        action="append",
+        help="A flag in the set of flags that identifies a signature which is "
+        "not part of an API, i.e. is the signature of a private implementation "
+        "member. Specify as many times as necessary to define the "
+        "implementation flag set. If this is not specified then the "
+        "implementation flag set is empty.")
     args = args_parser.parse_args(argv[1:])
 
     # Read in all the flags into the trie
-    monolithicFlagsPath = args.monolithicFlags
-    monolithicTrie = read_flag_trie_from_file(monolithicFlagsPath)
+    monolithic_flags_path = args.monolithic_flags
+    monolithic_trie = read_flag_trie_from_file(monolithic_flags_path)
+
+    implementation_flags = args.implementation_flag or []
 
     # For each subset specified on the command line, create dicts for the flags
     # provided by the subset and the corresponding flags from the complete set
     # of flags and compare them.
     failed = False
-    for modularPair in args.modularFlags:
-        parts = modularPair.split(":")
-        modularFlagsPath = parts[0]
-        modularPatternsPath = parts[1]
-        modularFlagsDict = read_signature_csv_from_file_as_dict(
-            modularFlagsPath)
-        monolithicFlagsSubsetDict = \
+    module_pairs = args.module_flags or []
+    for modular_pair in module_pairs:
+        parts = modular_pair.split(":")
+        modular_flags_path = parts[0]
+        modular_patterns_path = parts[1]
+        modular_flags_dict = read_signature_csv_from_file_as_dict(
+            modular_flags_path)
+        monolithic_flags_subset_dict = \
             extract_subset_from_monolithic_flags_as_dict_from_file(
-            monolithicTrie, modularPatternsPath)
-        mismatchingSignatures = compare_signature_flags(
-            monolithicFlagsSubsetDict, modularFlagsDict)
-        if mismatchingSignatures:
+                monolithic_trie, modular_patterns_path)
+        mismatching_signatures = compare_signature_flags(
+            monolithic_flags_subset_dict, modular_flags_dict,
+            implementation_flags)
+        if mismatching_signatures:
             failed = True
             print("ERROR: Hidden API flags are inconsistent:")
-            print("< " + modularFlagsPath)
-            print("> " + monolithicFlagsPath)
-            for mismatch in mismatchingSignatures:
+            print("< " + modular_flags_path)
+            print("> " + monolithic_flags_path)
+            for mismatch in mismatching_signatures:
                 signature = mismatch[0]
                 print()
                 print("< " + ",".join([signature] + mismatch[1]))
diff --git a/scripts/hiddenapi/verify_overlaps_test.py b/scripts/hiddenapi/verify_overlaps_test.py
index 22a1cdf..0a489ee 100755
--- a/scripts/hiddenapi/verify_overlaps_test.py
+++ b/scripts/hiddenapi/verify_overlaps_test.py
@@ -17,69 +17,26 @@
 import io
 import unittest
 
-from verify_overlaps import * #pylint: disable=unused-wildcard-import,wildcard-import
+import verify_overlaps as vo
 
 
-class TestSignatureToElements(unittest.TestCase):
-
-    def signatureToElements(self, signature):
-        return InteriorNode().signatureToElements(signature)
-
-    def test_signatureToElements_1(self):
-        expected = [
-            'package:java',
-            'package:lang',
-            'class:ProcessBuilder',
-            'class:Redirect',
-            'class:1',
-            'member:<init>()V',
-        ]
-        self.assertEqual(
-            expected,
-            self.signatureToElements(
-                'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V'))
-
-    def test_signatureToElements_2(self):
-        expected = [
-            'package:java',
-            'package:lang',
-            'class:Object',
-            'member:hashCode()I',
-        ]
-        self.assertEqual(
-            expected,
-            self.signatureToElements('Ljava/lang/Object;->hashCode()I'))
-
-    def test_signatureToElements_3(self):
-        expected = [
-            'package:java',
-            'package:lang',
-            'class:CharSequence',
-            'class:',
-            'class:ExternalSyntheticLambda0',
-            'member:<init>(Ljava/lang/CharSequence;)V',
-        ]
-        self.assertEqual(
-            expected,
-            self.signatureToElements(
-                'Ljava/lang/CharSequence$$ExternalSyntheticLambda0;'
-                '-><init>(Ljava/lang/CharSequence;)V'))
-
-#pylint: disable=line-too-long
 class TestDetectOverlaps(unittest.TestCase):
 
-    def read_flag_trie_from_string(self, csvdata):
+    @staticmethod
+    def read_flag_trie_from_string(csvdata):
         with io.StringIO(csvdata) as f:
-            return read_flag_trie_from_stream(f)
+            return vo.read_flag_trie_from_stream(f)
 
-    def read_signature_csv_from_string_as_dict(self, csvdata):
+    @staticmethod
+    def read_signature_csv_from_string_as_dict(csvdata):
         with io.StringIO(csvdata) as f:
-            return read_signature_csv_from_stream_as_dict(f)
+            return vo.read_signature_csv_from_stream_as_dict(f)
 
+    @staticmethod
     def extract_subset_from_monolithic_flags_as_dict_from_string(
-            self, monolithic, patterns):
+            monolithic, patterns):
         with io.StringIO(patterns) as f:
-            return extract_subset_from_monolithic_flags_as_dict_from_stream(
+            return vo.extract_subset_from_monolithic_flags_as_dict_from_stream(
                 monolithic, f)
 
     extractInput = """
@@ -95,14 +52,14 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'Ljava/lang/Object;->hashCode()I'
+        patterns = "Ljava/lang/Object;->hashCode()I"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
         }
         self.assertEqual(expected, subset)
@@ -111,18 +68,18 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Object'
+        patterns = "java/lang/Object"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
             },
         }
         self.assertEqual(expected, subset)
@@ -131,20 +88,20 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Character'
+        patterns = "java/lang/Character"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
+            },
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
         }
         self.assertEqual(expected, subset)
@@ -153,17 +110,17 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Character$UnicodeScript'
+        patterns = "java/lang/Character$UnicodeScript"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
+            },
         }
         self.assertEqual(expected, subset)
 
@@ -171,32 +128,32 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/*'
+        patterns = "java/lang/*"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
             },
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
+            },
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
             },
         }
         self.assertEqual(expected, subset)
@@ -205,59 +162,47 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/**'
+        patterns = "java/**"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
             },
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
             },
-            'Ljava/util/zip/ZipFile;-><clinit>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/util/zip/ZipFile;-><clinit>()V',
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
+            },
+            "Ljava/util/zip/ZipFile;-><clinit>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/util/zip/ZipFile;-><clinit>()V",
             },
         }
         self.assertEqual(expected, subset)
 
-    def test_extract_subset_invalid_pattern_wildcard_and_member(self):
-        monolithic = self.read_flag_trie_from_string(
-            TestDetectOverlaps.extractInput)
-
-        patterns = 'Ljava/lang/*;->hashCode()I'
-
-        with self.assertRaises(Exception) as context:
-            self.extract_subset_from_monolithic_flags_as_dict_from_string(
-                monolithic, patterns)
-        self.assertTrue('contains wildcard * and member signature hashCode()I'
-                        in str(context.exception))
-
     def test_read_trie_duplicate(self):
         with self.assertRaises(Exception) as context:
             self.read_flag_trie_from_string("""
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 Ljava/lang/Object;->hashCode()I,blocked
 """)
-        self.assertTrue('Duplicate signature: Ljava/lang/Object;->hashCode()I'
+        self.assertTrue("Duplicate signature: Ljava/lang/Object;->hashCode()I"
                         in str(context.exception))
 
     def test_read_trie_missing_member(self):
@@ -266,8 +211,8 @@
 Ljava/lang/Object,public-api,system-api,test-api
 """)
         self.assertTrue(
-            'Invalid signature: Ljava/lang/Object, does not identify a specific member'
-            in str(context.exception))
+            "Invalid signature: Ljava/lang/Object, "
+            "does not identify a specific member" in str(context.exception))
 
     def test_match(self):
         monolithic = self.read_signature_csv_from_string_as_dict("""
@@ -276,7 +221,8 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = []
         self.assertEqual(expected, mismatches)
 
@@ -287,12 +233,13 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
-                ['public-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
+                ["public-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -304,12 +251,13 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
-                ['blocked'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
+                ["blocked"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -321,26 +269,28 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['blocked'],
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["blocked"],
+                ["public-api", "system-api", "test-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
 
     def test_match_treat_missing_from_modular_as_blocked(self):
-        monolithic = self.read_signature_csv_from_string_as_dict('')
+        monolithic = self.read_signature_csv_from_string_as_dict("")
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
                 [],
             ),
         ]
@@ -351,12 +301,13 @@
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 """)
         modular = {}
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = [
             (
-                'Ljava/lang/Object;->hashCode()I',
-                ['blocked'],
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->hashCode()I",
+                ["blocked"],
+                ["public-api", "system-api", "test-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -366,10 +317,50 @@
 Ljava/lang/Object;->hashCode()I,blocked
 """)
         modular = {}
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular,
+                                                ["blocked"])
         expected = []
         self.assertEqual(expected, mismatches)
-#pylint: enable=line-too-long
 
-if __name__ == '__main__':
+    def test_match_treat_missing_from_modular_as_empty(self):
+        monolithic = self.read_signature_csv_from_string_as_dict("")
+        modular = self.read_signature_csv_from_string_as_dict("""
+Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
+""")
+        mismatches = vo.compare_signature_flags(monolithic, modular, [])
+        expected = [
+            (
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
+                [],
+            ),
+        ]
+        self.assertEqual(expected, mismatches)
+
+    def test_mismatch_treat_missing_from_modular_as_empty(self):
+        monolithic = self.read_signature_csv_from_string_as_dict("""
+Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
+""")
+        modular = {}
+        mismatches = vo.compare_signature_flags(monolithic, modular, [])
+        expected = [
+            (
+                "Ljava/lang/Object;->hashCode()I",
+                [],
+                ["public-api", "system-api", "test-api"],
+            ),
+        ]
+        self.assertEqual(expected, mismatches)
+
+    def test_empty_missing_from_modular(self):
+        monolithic = self.read_signature_csv_from_string_as_dict("""
+Ljava/lang/Object;->hashCode()I
+""")
+        modular = {}
+        mismatches = vo.compare_signature_flags(monolithic, modular, [])
+        expected = []
+        self.assertEqual(expected, mismatches)
+
+
+if __name__ == "__main__":
     unittest.main(verbosity=2)
diff --git a/scripts/list_image.sh b/scripts/list_image.sh
new file mode 100755
index 0000000..0542fa6
--- /dev/null
+++ b/scripts/list_image.sh
@@ -0,0 +1,51 @@
+#! /bin/bash
+
+# Recursively list Android image directory.
+set -eu
+set -o pipefail
+
+function die() { format=$1; shift; printf "$format\n" "$@"; exit 1; }
+
+# Figure out the filer utility.
+declare filer=
+[[ -z "${ANDROID_HOST_OUT:-}" ]] || filer=${ANDROID_HOST_OUT}/bin/debugfs_static
+if [[ "${1:-}" =~ --debugfs_path=(.*) ]]; then
+  filer=${BASH_REMATCH[1]}
+  shift
+fi
+if [[ -z "${filer:-}" ]]; then
+  maybefiler="$(dirname $0)/debugfs_static"
+  [[ ! -x "$maybefiler" ]] || filer="$maybefiler"
+fi
+
+(( $# >0 )) || die "%s [--debugfs_path=<path>] IMAGE" "$0"
+
+[[ -n "${filer:-}" ]] || die "cannot locate 'debugfs' executable: \
+--debugfs_path= is missing, ANDROID_HOST_OUT is not set, \
+and 'debugfs_static' is not colocated with this script"
+declare -r image="$1"
+
+function dolevel() {
+  printf "%s/\n" "$1"
+  # Each line of the file output consists of 6 fields separated with '/'.
+  # The second one contains the file's attributes, and the fifth its name.
+  $filer -R "ls -l -p $1" "$image" 2>/dev/null |\
+    sed -nr 's|^/.*/(.*)/.*/.*/(.+)/.*/$|\2 \1|p' | LANG=C sort | \
+  while read name attr; do
+    [[ "$name" != '.' && "$name" != '..' ]] || continue
+    path="$1/$name"
+    # If the second char of the attributes is '4', it is a directory.
+    if [[ $attr =~ ^.4 ]]; then
+      dolevel "$path"
+    else
+      printf "%s\n" "$path"
+    fi
+  done
+}
+
+# The filer always prints its version on stderr, so we are going
+# to redirect it to the bit bucket. On the other hand, the filer's
+# return code on error is still 0. Let's run it once to without
+# redirecting stderr to see that there is at least one entry.
+$filer -R "ls -l -p" "$image" | grep -q -m1 -P '^/.*/.*/.*/.*/.+/.*/$'
+dolevel .
diff --git a/scripts/mergenotice.py b/scripts/mergenotice.py
deleted file mode 100755
index fe99073..0000000
--- a/scripts/mergenotice.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-Merges input notice files to the output file while ignoring duplicated files
-This script shouldn't be confused with build/soong/scripts/generate-notice-files.py
-which is responsible for creating the final notice file for all artifacts
-installed. This script has rather limited scope; it is meant to create a merged
-notice file for a set of modules that are packaged together, e.g. in an APEX.
-The merged notice file does not reveal the individual files in the package.
-"""
-
-import sys
-import argparse
-
-def get_args():
-  parser = argparse.ArgumentParser(description='Merge notice files.')
-  parser.add_argument('--output', help='output file path.')
-  parser.add_argument('inputs', metavar='INPUT', nargs='+',
-                      help='input notice file')
-  return parser.parse_args()
-
-def main(argv):
-  args = get_args()
-
-  processed = set()
-  with open(args.output, 'w+') as output:
-    for input in args.inputs:
-      with open(input, 'r') as f:
-        data = f.read().strip()
-        if data not in processed:
-          processed.add(data)
-          output.write('%s\n\n' % data)
-
-if __name__ == '__main__':
-  main(sys.argv)
diff --git a/scripts/microfactory.bash b/scripts/microfactory.bash
index 5e702e0..192b38f 100644
--- a/scripts/microfactory.bash
+++ b/scripts/microfactory.bash
@@ -59,7 +59,7 @@
     BUILDDIR=$(getoutdir) \
       SRCDIR=${TOP} \
       BLUEPRINTDIR=${TOP}/build/blueprint \
-      EXTRA_ARGS="-pkg-path android/soong=${TOP}/build/soong -pkg-path google.golang.org/protobuf=${TOP}/external/golang-protobuf" \
+      EXTRA_ARGS="-pkg-path android/soong=${TOP}/build/soong -pkg-path rbcrun=${TOP}/build/make/tools/rbcrun -pkg-path google.golang.org/protobuf=${TOP}/external/golang-protobuf -pkg-path go.starlark.net=${TOP}/external/starlark-go" \
       build_go $@
 }
 
diff --git a/scripts/rbc-run b/scripts/rbc-run
index b8a6c0c..8d93f0e 100755
--- a/scripts/rbc-run
+++ b/scripts/rbc-run
@@ -6,8 +6,8 @@
 set -eu
 
 declare -r output_root="${OUT_DIR:-out}"
-declare -r runner="${output_root}/soong/rbcrun"
-declare -r converter="${output_root}/soong/mk2rbc"
+declare -r runner="${output_root}/rbcrun"
+declare -r converter="${output_root}/mk2rbc"
 declare -r launcher="${output_root}/rbc/launcher.rbc"
 declare -r makefile_list="${output_root}/.module_paths/configuration.list"
 declare -r makefile="$1"
diff --git a/scripts/test_config_fixer.py b/scripts/test_config_fixer.py
index c150e8c..3dbc22e 100644
--- a/scripts/test_config_fixer.py
+++ b/scripts/test_config_fixer.py
@@ -28,6 +28,8 @@
 from manifest import parse_test_config
 from manifest import write_xml
 
+KNOWN_PREPARERS = ['com.android.tradefed.targetprep.TestAppInstallSetup',
+                   'com.android.tradefed.targetprep.suite.SuiteApkInstaller']
 
 def parse_args():
   """Parse commandline arguments."""
@@ -64,7 +66,7 @@
   tests = get_children_with_tag(test_config, 'target_preparer')
 
   for test in tests:
-    if test.getAttribute('class') == "com.android.tradefed.targetprep.TestAppInstallSetup":
+    if test.getAttribute('class') in KNOWN_PREPARERS:
       options = get_children_with_tag(test, 'option')
       for option in options:
         if option.getAttribute('name') == "test-file-name":
diff --git a/scripts/test_config_fixer_test.py b/scripts/test_config_fixer_test.py
index d00a593..39ce5b3 100644
--- a/scripts/test_config_fixer_test.py
+++ b/scripts/test_config_fixer_test.py
@@ -70,7 +70,7 @@
 class OverwriteTestFileNameTest(unittest.TestCase):
   """ Unit tests for overwrite_test_file_name function """
 
-  test_config = (
+  test_config_test_app_install_setup = (
       '<?xml version="1.0" encoding="utf-8"?>\n'
       '<configuration description="Runs some tests.">\n'
       '    <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">\n'
@@ -82,15 +82,38 @@
       '    </test>\n'
       '</configuration>\n')
 
-  def test_all(self):
-    doc = minidom.parseString(self.test_config % ("foo.apk"))
+  test_config_suite_apk_installer = (
+      '<?xml version="1.0" encoding="utf-8"?>\n'
+      '<configuration description="Runs some tests.">\n'
+      '    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">\n'
+      '        <option name="test-file-name" value="%s"/>\n'
+      '    </target_preparer>\n'
+      '    <test class="com.android.tradefed.testtype.AndroidJUnitTest">\n'
+      '        <option name="package" value="com.android.foo"/>\n'
+      '        <option name="runtime-hint" value="20s"/>\n'
+      '    </test>\n'
+      '</configuration>\n')
+
+  def test_testappinstallsetup(self):
+    doc = minidom.parseString(self.test_config_test_app_install_setup % ("foo.apk"))
 
     test_config_fixer.overwrite_test_file_name(doc, "bar.apk")
     output = io.StringIO()
     test_config_fixer.write_xml(output, doc)
 
     # Only the matching package name in a test node should be updated.
-    expected = self.test_config % ("bar.apk")
+    expected = self.test_config_test_app_install_setup % ("bar.apk")
+    self.assertEqual(expected, output.getvalue())
+
+  def test_suiteapkinstaller(self):
+    doc = minidom.parseString(self.test_config_suite_apk_installer % ("foo.apk"))
+
+    test_config_fixer.overwrite_test_file_name(doc, "bar.apk")
+    output = io.StringIO()
+    test_config_fixer.write_xml(output, doc)
+
+    # Only the matching package name in a test node should be updated.
+    expected = self.test_config_suite_apk_installer % ("bar.apk")
     self.assertEqual(expected, output.getvalue())
 
 
diff --git a/sdk/testing.go b/sdk/testing.go
index 294f1a5..062f200 100644
--- a/sdk/testing.go
+++ b/sdk/testing.go
@@ -25,6 +25,8 @@
 	"android/soong/cc"
 	"android/soong/genrule"
 	"android/soong/java"
+
+	"github.com/google/blueprint/proptools"
 )
 
 // Prepare for running an sdk test with an apex.
@@ -81,6 +83,11 @@
 		}
 	}),
 
+	// Add a build number file.
+	android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+		variables.BuildNumberFile = proptools.StringPtr(BUILD_NUMBER_FILE)
+	}),
+
 	// Make sure that every test provides all the source files.
 	android.PrepareForTestDisallowNonExistentPaths,
 	android.MockFS{
@@ -143,6 +150,8 @@
 	copyRules := &strings.Builder{}
 	otherCopyRules := &strings.Builder{}
 	snapshotDirPrefix := sdk.builderForTests.snapshotDir.String() + "/"
+
+	seenBuildNumberFile := false
 	for _, bp := range buildParams {
 		switch bp.Rule.String() {
 		case android.Cp.String():
@@ -152,8 +161,14 @@
 			src := android.NormalizePathForTesting(bp.Input)
 			// We differentiate between copy rules for the snapshot, and copy rules for the install file.
 			if strings.HasPrefix(output.String(), snapshotDirPrefix) {
-				// Get source relative to build directory.
-				_, _ = fmt.Fprintf(copyRules, "%s -> %s\n", src, dest)
+				// Don't include the build-number.txt file in the copy rules as that would break lots of
+				// tests, just verify that it is copied here as it should appear in every snapshot.
+				if output.Base() == BUILD_NUMBER_FILE {
+					seenBuildNumberFile = true
+				} else {
+					// Get source relative to build directory.
+					_, _ = fmt.Fprintf(copyRules, "%s -> %s\n", src, dest)
+				}
 				info.snapshotContents = append(info.snapshotContents, dest)
 			} else {
 				_, _ = fmt.Fprintf(otherCopyRules, "%s -> %s\n", src, dest)
@@ -189,6 +204,10 @@
 		}
 	}
 
+	if !seenBuildNumberFile {
+		panic(fmt.Sprintf("Every snapshot must include the %s file", BUILD_NUMBER_FILE))
+	}
+
 	info.copyRules = copyRules.String()
 	info.otherCopyRules = otherCopyRules.String()
 
diff --git a/sdk/update.go b/sdk/update.go
index 389e845..5db604b 100644
--- a/sdk/update.go
+++ b/sdk/update.go
@@ -281,6 +281,10 @@
 	return append(variants, newVariant)
 }
 
+// BUILD_NUMBER_FILE is the name of the file in the snapshot zip that will contain the number of
+// the build from which the snapshot was produced.
+const BUILD_NUMBER_FILE = "snapshot-creation-build-number.txt"
+
 // SDK directory structure
 // <sdk_root>/
 //     Android.bp   : definition of a 'sdk' module is here. This is a hand-made one.
@@ -479,6 +483,9 @@
 
 	bp.build(pctx, ctx, nil)
 
+	// Copy the build number file into the snapshot.
+	builder.CopyToSnapshot(ctx.Config().BuildNumberFile(ctx), BUILD_NUMBER_FILE)
+
 	filesToZip := builder.filesToZip
 
 	// zip them all
diff --git a/sh/sh_binary.go b/sh/sh_binary.go
index 4190e84..d1beaba 100644
--- a/sh/sh_binary.go
+++ b/sh/sh_binary.go
@@ -149,6 +149,9 @@
 	// Only available for host sh_test modules.
 	Data_device_libs []string `android:"path,arch_variant"`
 
+	// Install the test into a folder named for the module in all test suites.
+	Per_testcase_directory *bool
+
 	// Test options.
 	Test_options TestOptions
 }
@@ -464,6 +467,7 @@
 				if Bool(s.testProperties.Test_options.Unit_test) {
 					entries.SetBool("LOCAL_IS_UNIT_TEST", true)
 				}
+				entries.SetBoolIfTrue("LOCAL_COMPATIBILITY_PER_TESTCASE_DIRECTORY", Bool(s.testProperties.Per_testcase_directory))
 			},
 		},
 	}}
diff --git a/snapshot/host_fake_snapshot.go b/snapshot/host_fake_snapshot.go
index 6b4e12b..b04657d 100644
--- a/snapshot/host_fake_snapshot.go
+++ b/snapshot/host_fake_snapshot.go
@@ -68,6 +68,12 @@
 	registerHostSnapshotComponents(android.InitRegistrationContext)
 }
 
+// Add prebuilt information to snapshot data
+type hostSnapshotFakeJsonFlags struct {
+	SnapshotJsonFlags
+	Prebuilt bool `json:",omitempty"`
+}
+
 func registerHostSnapshotComponents(ctx android.RegistrationContext) {
 	ctx.RegisterSingletonType("host-fake-snapshot", HostToolsFakeAndroidSingleton)
 }
@@ -94,7 +100,9 @@
 	// Find all host binary modules add 'fake' versions to snapshot
 	var outputs android.Paths
 	seen := make(map[string]bool)
-	var jsonData []SnapshotJsonFlags
+	var jsonData []hostSnapshotFakeJsonFlags
+	prebuilts := make(map[string]bool)
+
 	ctx.VisitAllModules(func(module android.Module) {
 		if module.Target().Os != ctx.Config().BuildOSTarget.Os {
 			return
@@ -104,9 +112,10 @@
 		}
 
 		if android.IsModulePrebuilt(module) {
+			// Add non-prebuilt module name to map of prebuilts
+			prebuilts[android.RemoveOptionalPrebuiltPrefix(module.Name())] = true
 			return
 		}
-
 		if !module.Enabled() || module.IsHideFromMake() {
 			return
 		}
@@ -114,17 +123,23 @@
 		if !apexInfo.IsForPlatform() {
 			return
 		}
-		path := hostBinToolPath(module)
+		path := hostToolPath(module)
 		if path.Valid() && path.String() != "" {
 			outFile := filepath.Join(c.snapshotDir, path.String())
 			if !seen[outFile] {
 				seen[outFile] = true
 				outputs = append(outputs, WriteStringToFileRule(ctx, "", outFile))
-				jsonData = append(jsonData, *hostBinJsonDesc(module))
+				jsonData = append(jsonData, hostSnapshotFakeJsonFlags{*hostJsonDesc(module), false})
 			}
 		}
 	})
-
+	// Update any module prebuilt information
+	for idx, _ := range jsonData {
+		if _, ok := prebuilts[jsonData[idx].ModuleName]; ok {
+			// Prebuilt exists for this module
+			jsonData[idx].Prebuilt = true
+		}
+	}
 	marsh, err := json.Marshal(jsonData)
 	if err != nil {
 		ctx.Errorf("host fake snapshot json marshal failure: %#v", err)
diff --git a/snapshot/host_snapshot.go b/snapshot/host_snapshot.go
index 09a382e..9793218 100644
--- a/snapshot/host_snapshot.go
+++ b/snapshot/host_snapshot.go
@@ -19,6 +19,7 @@
 	"fmt"
 	"path/filepath"
 	"sort"
+	"strings"
 
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
@@ -62,6 +63,11 @@
 	installDir android.InstallPath
 }
 
+type ProcMacro interface {
+	ProcMacro() bool
+	CrateName() string
+}
+
 func hostSnapshotFactory() android.Module {
 	module := &hostSnapshot{}
 	initHostToolsModule(module)
@@ -94,7 +100,7 @@
 
 	// Create JSON file based on the direct dependencies
 	ctx.VisitDirectDeps(func(dep android.Module) {
-		desc := hostBinJsonDesc(dep)
+		desc := hostJsonDesc(dep)
 		if desc != nil {
 			jsonData = append(jsonData, *desc)
 		}
@@ -145,7 +151,7 @@
 
 	f.installDir = android.PathForModuleInstall(ctx)
 
-	f.CopyDepsToZip(ctx, depsZipFile)
+	f.CopyDepsToZip(ctx, f.GatherPackagingSpecs(ctx), depsZipFile)
 
 	builder := android.NewRuleBuilder(pctx, ctx)
 	builder.Command().
@@ -183,7 +189,7 @@
 }
 
 // Get host tools path and relative install string helpers
-func hostBinToolPath(m android.Module) android.OptionalPath {
+func hostToolPath(m android.Module) android.OptionalPath {
 	if provider, ok := m.(android.HostToolProvider); ok {
 		return provider.HostToolPath()
 	}
@@ -198,18 +204,30 @@
 	return outString
 }
 
-// Create JSON description for given module, only create descriptions for binary modueles which
-// provide a valid HostToolPath
-func hostBinJsonDesc(m android.Module) *SnapshotJsonFlags {
-	path := hostBinToolPath(m)
+// Create JSON description for given module, only create descriptions for binary modules
+// and rust_proc_macro modules which provide a valid HostToolPath
+func hostJsonDesc(m android.Module) *SnapshotJsonFlags {
+	path := hostToolPath(m)
 	relPath := hostRelativePathString(m)
+	procMacro := false
+	moduleStem := filepath.Base(path.String())
+	crateName := ""
+
+	if pm, ok := m.(ProcMacro); ok && pm.ProcMacro() {
+		procMacro = pm.ProcMacro()
+		moduleStem = strings.TrimSuffix(moduleStem, filepath.Ext(moduleStem))
+		crateName = pm.CrateName()
+	}
+
 	if path.Valid() && path.String() != "" {
 		return &SnapshotJsonFlags{
 			ModuleName:          m.Name(),
-			ModuleStemName:      filepath.Base(path.String()),
+			ModuleStemName:      moduleStem,
 			Filename:            path.String(),
 			Required:            append(m.HostRequiredModuleNames(), m.RequiredModuleNames()...),
 			RelativeInstallPath: relPath,
+			RustProcMacro:       procMacro,
+			CrateName:           crateName,
 		}
 	}
 	return nil
diff --git a/snapshot/snapshot_base.go b/snapshot/snapshot_base.go
index 79d3cf6..4a14f2e 100644
--- a/snapshot/snapshot_base.go
+++ b/snapshot/snapshot_base.go
@@ -114,6 +114,8 @@
 	RelativeInstallPath string `json:",omitempty"`
 	Filename            string `json:",omitempty"`
 	ModuleStemName      string `json:",omitempty"`
+	RustProcMacro       bool   `json:",omitempty"`
+	CrateName           string `json:",omitempty"`
 
 	// dependencies
 	Required []string `json:",omitempty"`
diff --git a/soong_ui.bash b/soong_ui.bash
index c1c236b..49c4b78 100755
--- a/soong_ui.bash
+++ b/soong_ui.bash
@@ -53,6 +53,8 @@
 source ${TOP}/build/soong/scripts/microfactory.bash
 
 soong_build_go soong_ui android/soong/cmd/soong_ui
+soong_build_go mk2rbc android/soong/mk2rbc/cmd
+soong_build_go rbcrun rbcrun/cmd
 
 cd ${TOP}
 exec "$(getoutdir)/soong_ui" "$@"
diff --git a/starlark_fmt/format.go b/starlark_fmt/format.go
index 23eee59..3e51fa1 100644
--- a/starlark_fmt/format.go
+++ b/starlark_fmt/format.go
@@ -39,21 +39,26 @@
 
 // PrintsStringList returns a Starlark-compatible string of a list of Strings/Labels.
 func PrintStringList(items []string, indentLevel int) string {
-	return PrintList(items, indentLevel, `"%s"`)
+	return PrintList(items, indentLevel, func(s string) string {
+		if strings.Contains(s, "\"") {
+			return `'''%s'''`
+		}
+		return `"%s"`
+	})
 }
 
 // PrintList returns a Starlark-compatible string of list formmated as requested.
-func PrintList(items []string, indentLevel int, formatString string) string {
+func PrintList(items []string, indentLevel int, formatString func(string) string) string {
 	if len(items) == 0 {
 		return "[]"
 	} else if len(items) == 1 {
-		return fmt.Sprintf("["+formatString+"]", items[0])
+		return fmt.Sprintf("["+formatString(items[0])+"]", items[0])
 	}
 	list := make([]string, 0, len(items)+2)
 	list = append(list, "[")
 	innerIndent := Indention(indentLevel + 1)
 	for _, item := range items {
-		list = append(list, fmt.Sprintf(`%s`+formatString+`,`, innerIndent, item))
+		list = append(list, fmt.Sprintf(`%s`+formatString(item)+`,`, innerIndent, item))
 	}
 	list = append(list, Indention(indentLevel)+"]")
 	return strings.Join(list, "\n")
diff --git a/starlark_fmt/format_test.go b/starlark_fmt/format_test.go
index 90f78ef..9450a31 100644
--- a/starlark_fmt/format_test.go
+++ b/starlark_fmt/format_test.go
@@ -18,6 +18,10 @@
 	"testing"
 )
 
+func simpleFormat(s string) string {
+	return "%s"
+}
+
 func TestPrintEmptyStringList(t *testing.T) {
 	in := []string{}
 	indentLevel := 0
@@ -54,7 +58,7 @@
 func TestPrintEmptyList(t *testing.T) {
 	in := []string{}
 	indentLevel := 0
-	out := PrintList(in, indentLevel, "%s")
+	out := PrintList(in, indentLevel, simpleFormat)
 	expectedOut := "[]"
 	if out != expectedOut {
 		t.Errorf("Expected %q, got %q", expectedOut, out)
@@ -64,7 +68,7 @@
 func TestPrintSingleElementList(t *testing.T) {
 	in := []string{"1"}
 	indentLevel := 0
-	out := PrintList(in, indentLevel, "%s")
+	out := PrintList(in, indentLevel, simpleFormat)
 	expectedOut := `[1]`
 	if out != expectedOut {
 		t.Errorf("Expected %q, got %q", expectedOut, out)
@@ -74,7 +78,7 @@
 func TestPrintMultiElementList(t *testing.T) {
 	in := []string{"1", "2"}
 	indentLevel := 0
-	out := PrintList(in, indentLevel, "%s")
+	out := PrintList(in, indentLevel, simpleFormat)
 	expectedOut := `[
     1,
     2,
@@ -87,7 +91,7 @@
 func TestListWithNonZeroIndent(t *testing.T) {
 	in := []string{"1", "2"}
 	indentLevel := 1
-	out := PrintList(in, indentLevel, "%s")
+	out := PrintList(in, indentLevel, simpleFormat)
 	expectedOut := `[
         1,
         2,
diff --git a/sysprop/sysprop_test.go b/sysprop/sysprop_test.go
index f935f06..88ef615 100644
--- a/sysprop/sysprop_test.go
+++ b/sysprop/sysprop_test.go
@@ -98,9 +98,6 @@
 
 		"build/soong/scripts/jar-wrapper.sh": nil,
 
-		"build/make/core/proguard.flags":             nil,
-		"build/make/core/proguard_basic_keeps.flags": nil,
-
 		"jdk8/jre/lib/jce.jar": nil,
 		"jdk8/jre/lib/rt.jar":  nil,
 		"jdk8/lib/tools.jar":   nil,
diff --git a/tests/lib.sh b/tests/lib.sh
index e6074f8..1bb2df9 100644
--- a/tests/lib.sh
+++ b/tests/lib.sh
@@ -83,11 +83,14 @@
 function create_mock_soong {
   copy_directory build/blueprint
   copy_directory build/soong
+  copy_directory build/make/tools/rbcrun
 
   symlink_directory prebuilts/go
   symlink_directory prebuilts/build-tools
+  symlink_directory prebuilts/clang/host
   symlink_directory external/go-cmp
   symlink_directory external/golang-protobuf
+  symlink_directory external/starlark-go
 
   touch "$MOCK_TOP/Android.bp"
 }
diff --git a/ui/build/build.go b/ui/build/build.go
index 2e44aaa..aadf4af 100644
--- a/ui/build/build.go
+++ b/ui/build/build.go
@@ -18,6 +18,7 @@
 	"io/ioutil"
 	"os"
 	"path/filepath"
+	"sync"
 	"text/template"
 
 	"android/soong/ui/metrics"
@@ -205,6 +206,8 @@
 		return
 	}
 
+	defer waitForDist(ctx)
+
 	// checkProblematicFiles aborts the build if Android.mk or CleanSpec.mk are found at the root of the tree.
 	checkProblematicFiles(ctx)
 
@@ -264,6 +267,7 @@
 
 	if config.StartRBE() {
 		startRBE(ctx, config)
+		defer DumpRBEMetrics(ctx, config, filepath.Join(config.LogsDir(), "rbe_metrics.pb"))
 	}
 
 	if what&RunProductConfig != 0 {
@@ -328,8 +332,18 @@
 	}
 }
 
+var distWaitGroup sync.WaitGroup
+
+// waitForDist waits for all backgrounded distGzipFile and distFile writes to finish
+func waitForDist(ctx Context) {
+	ctx.BeginTrace("soong_ui", "dist")
+	defer ctx.EndTrace()
+
+	distWaitGroup.Wait()
+}
+
 // distGzipFile writes a compressed copy of src to the distDir if dist is enabled.  Failures
-// are printed but non-fatal.
+// are printed but non-fatal. Uses the distWaitGroup func for backgrounding (optimization).
 func distGzipFile(ctx Context, config Config, src string, subDirs ...string) {
 	if !config.Dist() {
 		return
@@ -342,13 +356,17 @@
 		ctx.Printf("failed to mkdir %s: %s", destDir, err.Error())
 	}
 
-	if err := gzipFileToDir(src, destDir); err != nil {
-		ctx.Printf("failed to dist %s: %s", filepath.Base(src), err.Error())
-	}
+	distWaitGroup.Add(1)
+	go func() {
+		defer distWaitGroup.Done()
+		if err := gzipFileToDir(src, destDir); err != nil {
+			ctx.Printf("failed to dist %s: %s", filepath.Base(src), err.Error())
+		}
+	}()
 }
 
 // distFile writes a copy of src to the distDir if dist is enabled.  Failures are printed but
-// non-fatal.
+// non-fatal. Uses the distWaitGroup func for backgrounding (optimization).
 func distFile(ctx Context, config Config, src string, subDirs ...string) {
 	if !config.Dist() {
 		return
@@ -361,7 +379,11 @@
 		ctx.Printf("failed to mkdir %s: %s", destDir, err.Error())
 	}
 
-	if _, err := copyFile(src, filepath.Join(destDir, filepath.Base(src))); err != nil {
-		ctx.Printf("failed to dist %s: %s", filepath.Base(src), err.Error())
-	}
+	distWaitGroup.Add(1)
+	go func() {
+		defer distWaitGroup.Done()
+		if _, err := copyFile(src, filepath.Join(destDir, filepath.Base(src))); err != nil {
+			ctx.Printf("failed to dist %s: %s", filepath.Base(src), err.Error())
+		}
+	}()
 }
diff --git a/ui/build/config.go b/ui/build/config.go
index 1dd948c..e271bfc 100644
--- a/ui/build/config.go
+++ b/ui/build/config.go
@@ -35,10 +35,10 @@
 )
 
 const (
-	envConfigDir  = "vendor/google/tools/soong_config"
-	jsonSuffix    = "json"
+	envConfigDir = "vendor/google/tools/soong_config"
+	jsonSuffix   = "json"
 
-	configFetcher = "vendor/google/tools/soong/expconfigfetcher"
+	configFetcher         = "vendor/google/tools/soong/expconfigfetcher"
 	envConfigFetchTimeout = 10 * time.Second
 )
 
@@ -62,6 +62,7 @@
 	jsonModuleGraph bool
 	bp2build        bool
 	queryview       bool
+	reportMkMetrics bool // Collect and report mk2bp migration progress metrics.
 	soongDocs       bool
 	skipConfig      bool
 	skipKati        bool
@@ -143,6 +144,12 @@
 // fetchEnvConfig optionally fetches environment config from an
 // experiments system to control Soong features dynamically.
 func fetchEnvConfig(ctx Context, config *configImpl, envConfigName string) error {
+	configName := envConfigName + "." + jsonSuffix
+	expConfigFetcher := &smpb.ExpConfigFetcher{}
+	defer func() {
+		ctx.Metrics.ExpConfigFetcher(expConfigFetcher)
+	}()
+
 	s, err := os.Stat(configFetcher)
 	if err != nil {
 		if os.IsNotExist(err) {
@@ -151,31 +158,38 @@
 		return err
 	}
 	if s.Mode()&0111 == 0 {
+		status := smpb.ExpConfigFetcher_ERROR
+		expConfigFetcher.Status = &status
 		return fmt.Errorf("configuration fetcher binary %v is not executable: %v", configFetcher, s.Mode())
 	}
 
-	configExists := false
-	outConfigFilePath := filepath.Join(config.OutDir(), envConfigName + jsonSuffix)
-	if _, err := os.Stat(outConfigFilePath); err == nil {
-		configExists = true
-	}
-
 	tCtx, cancel := context.WithTimeout(ctx, envConfigFetchTimeout)
 	defer cancel()
-	cmd := exec.CommandContext(tCtx, configFetcher, "-output_config_dir", config.OutDir())
+	fetchStart := time.Now()
+	cmd := exec.CommandContext(tCtx, configFetcher, "-output_config_dir", config.OutDir(),
+		"-output_config_name", configName)
 	if err := cmd.Start(); err != nil {
+		status := smpb.ExpConfigFetcher_ERROR
+		expConfigFetcher.Status = &status
 		return err
 	}
 
-	// If a config file already exists, return immediately and run the config file
-	// fetch in the background. Otherwise, wait for the config file to be fetched.
-	if configExists {
-		go cmd.Wait()
-		return nil
-	}
 	if err := cmd.Wait(); err != nil {
+		status := smpb.ExpConfigFetcher_ERROR
+		expConfigFetcher.Status = &status
 		return err
 	}
+	fetchEnd := time.Now()
+	expConfigFetcher.Micros = proto.Uint64(uint64(fetchEnd.Sub(fetchStart).Microseconds()))
+	outConfigFilePath := filepath.Join(config.OutDir(), configName)
+	expConfigFetcher.Filename = proto.String(outConfigFilePath)
+	if _, err := os.Stat(outConfigFilePath); err == nil {
+		status := smpb.ExpConfigFetcher_CONFIG
+		expConfigFetcher.Status = &status
+	} else {
+		status := smpb.ExpConfigFetcher_NO_CONFIG
+		expConfigFetcher.Status = &status
+	}
 	return nil
 }
 
@@ -186,7 +200,7 @@
 	}
 
 	if err := fetchEnvConfig(ctx, config, bc); err != nil {
-		fmt.Fprintf(os.Stderr, "Failed to fetch config file: %v", err)
+		fmt.Fprintf(os.Stderr, "Failed to fetch config file: %v\n", err)
 	}
 
 	configDirs := []string{
@@ -367,10 +381,14 @@
 	java8Home := filepath.Join("prebuilts/jdk/jdk8", ret.HostPrebuiltTag())
 	java9Home := filepath.Join("prebuilts/jdk/jdk9", ret.HostPrebuiltTag())
 	java11Home := filepath.Join("prebuilts/jdk/jdk11", ret.HostPrebuiltTag())
+	java17Home := filepath.Join("prebuilts/jdk/jdk17", ret.HostPrebuiltTag())
 	javaHome := func() string {
 		if override, ok := ret.environ.Get("OVERRIDE_ANDROID_JAVA_HOME"); ok {
 			return override
 		}
+		if ret.environ.IsEnvTrue("EXPERIMENTAL_USE_OPENJDK17_TOOLCHAIN") {
+			return java17Home
+		}
 		if toolchain11, ok := ret.environ.Get("EXPERIMENTAL_USE_OPENJDK11_TOOLCHAIN"); ok && toolchain11 != "true" {
 			ctx.Fatalln("The environment variable EXPERIMENTAL_USE_OPENJDK11_TOOLCHAIN is no longer supported. An OpenJDK 11 toolchain is now the global default.")
 		}
@@ -711,6 +729,8 @@
 			c.skipConfig = true
 		} else if arg == "--skip-soong-tests" {
 			c.skipSoongTests = true
+		} else if arg == "--mk-metrics" {
+			c.reportMkMetrics = true
 		} else if len(arg) > 0 && arg[0] == '-' {
 			parseArgNum := func(def int) int {
 				if len(arg) > 2 {
@@ -1184,7 +1204,12 @@
 }
 
 func (c *configImpl) rbeAuth() (string, string) {
-	credFlags := []string{"use_application_default_credentials", "use_gce_credentials", "credential_file"}
+	credFlags := []string{
+		"use_application_default_credentials",
+		"use_gce_credentials",
+		"credential_file",
+		"use_google_prod_creds",
+	}
 	for _, cf := range credFlags {
 		for _, f := range []string{"RBE_" + cf, "FLAG_" + cf} {
 			if v, ok := c.environ.Get(f); ok {
@@ -1381,6 +1406,11 @@
 	return filepath.Join(c.LogsDir(), "bazel_metrics")
 }
 
+// MkFileMetrics returns the file path for make-related metrics.
+func (c *configImpl) MkMetrics() string {
+	return filepath.Join(c.LogsDir(), "mk_metrics.pb")
+}
+
 func (c *configImpl) SetEmptyNinjaFile(v bool) {
 	c.emptyNinjaFile = v
 }
diff --git a/ui/build/dumpvars.go b/ui/build/dumpvars.go
index 3f10f75..285f156 100644
--- a/ui/build/dumpvars.go
+++ b/ui/build/dumpvars.go
@@ -205,6 +205,9 @@
 		"CCACHE_SLOPPINESS",
 		"CCACHE_BASEDIR",
 		"CCACHE_CPP2",
+
+		// LLVM compiler wrapper options
+		"TOOLCHAIN_RUSAGE_OUTPUT",
 	}
 
 	allVars := append(append([]string{
@@ -262,12 +265,6 @@
 		"BUILD_BROKEN_USES_BUILD_STATIC_LIBRARY",
 	}, exportEnvVars...), BannerVars...)
 
-	// We need Roboleaf converter and runner in the mixed mode
-	runMicrofactory(ctx, config, "mk2rbc", "android/soong/mk2rbc/cmd",
-		map[string]string{"android/soong": "build/soong"})
-	runMicrofactory(ctx, config, "rbcrun", "rbcrun/cmd",
-		map[string]string{"go.starlark.net": "external/starlark-go", "rbcrun": "build/make/tools/rbcrun"})
-
 	makeVars, err := dumpMakeVars(ctx, config, config.Arguments(), allVars, true, "")
 	if err != nil {
 		ctx.Fatalln("Error dumping make vars:", err)
diff --git a/ui/build/finder.go b/ui/build/finder.go
index 68efe21..262de3d 100644
--- a/ui/build/finder.go
+++ b/ui/build/finder.go
@@ -138,6 +138,17 @@
 		ctx.Fatalf("Could not export module list: %v", err)
 	}
 
+	// Gate collecting/reporting mk metrics on builds that specifically request
+	// it, as identifying the total number of mk files adds 4-5ms onto null
+	// builds.
+	if config.reportMkMetrics {
+		androidMksTotal := f.FindNamedAt(".", "Android.mk")
+
+		ctx.Metrics.SetToplevelMakefiles(len(androidMks))
+		ctx.Metrics.SetTotalMakefiles(len(androidMksTotal))
+		ctx.Metrics.DumpMkMetrics(config.MkMetrics())
+	}
+
 	// Stop searching a subdirectory recursively after finding a CleanSpec.mk.
 	cleanSpecs := f.FindFirstNamedAt(".", "CleanSpec.mk")
 	err = dumpListToFile(ctx, config, cleanSpecs, filepath.Join(dumpDir, "CleanSpec.mk.list"))
diff --git a/ui/build/ninja.go b/ui/build/ninja.go
index 41de6bd..dab1a9b 100644
--- a/ui/build/ninja.go
+++ b/ui/build/ninja.go
@@ -169,6 +169,9 @@
 			"CCACHE_BASEDIR",
 			"CCACHE_CPP2",
 			"CCACHE_DIR",
+
+			// LLVM compiler wrapper options
+			"TOOLCHAIN_RUSAGE_OUTPUT",
 		}, config.BuildBrokenNinjaUsesEnvVars()...)...)
 	}
 
diff --git a/ui/build/paths/config.go b/ui/build/paths/config.go
index 81c500d..831a80f 100644
--- a/ui/build/paths/config.go
+++ b/ui/build/paths/config.go
@@ -91,6 +91,7 @@
 	"pstree":  Allowed,
 	"rsync":   Allowed,
 	"sh":      Allowed,
+	"stubby":  Allowed,
 	"tr":      Allowed,
 	"unzip":   Allowed,
 	"zip":     Allowed,
diff --git a/ui/build/soong.go b/ui/build/soong.go
index 117a2a5..8992b4f 100644
--- a/ui/build/soong.go
+++ b/ui/build/soong.go
@@ -15,7 +15,9 @@
 package build
 
 import (
+	"errors"
 	"fmt"
+	"io/fs"
 	"io/ioutil"
 	"os"
 	"path/filepath"
@@ -491,10 +493,14 @@
 
 	ninja("bootstrap", "bootstrap.ninja", targets...)
 
-	var soongBuildMetrics *soong_metrics_proto.SoongBuildMetrics
 	if shouldCollectBuildSoongMetrics(config) {
 		soongBuildMetrics := loadSoongBuildMetrics(ctx, config)
-		logSoongBuildMetrics(ctx, soongBuildMetrics)
+		if soongBuildMetrics != nil {
+			logSoongBuildMetrics(ctx, soongBuildMetrics)
+			if ctx.Metrics != nil {
+				ctx.Metrics.SetSoongBuildMetrics(soongBuildMetrics)
+			}
+		}
 	}
 
 	distGzipFile(ctx, config, config.SoongNinjaFile(), "soong")
@@ -504,8 +510,8 @@
 		distGzipFile(ctx, config, config.SoongMakeVarsMk(), "soong")
 	}
 
-	if shouldCollectBuildSoongMetrics(config) && ctx.Metrics != nil {
-		ctx.Metrics.SetSoongBuildMetrics(soongBuildMetrics)
+	if config.JsonModuleGraph() {
+		distGzipFile(ctx, config, config.ModuleGraphFile(), "soong")
 	}
 }
 
@@ -534,9 +540,13 @@
 }
 
 func loadSoongBuildMetrics(ctx Context, config Config) *soong_metrics_proto.SoongBuildMetrics {
-	soongBuildMetricsFile := filepath.Join(config.OutDir(), "soong", "soong_build_metrics.pb")
-	buf, err := ioutil.ReadFile(soongBuildMetricsFile)
-	if err != nil {
+	soongBuildMetricsFile := filepath.Join(config.LogsDir(), "soong_build_metrics.pb")
+	buf, err := os.ReadFile(soongBuildMetricsFile)
+	if errors.Is(err, fs.ErrNotExist) {
+		// Soong may not have run during this invocation
+          ctx.Verbosef("Failed to read metrics file, %s: %s", soongBuildMetricsFile, err)
+		return nil
+	} else if err != nil {
 		ctx.Fatalf("Failed to load %s: %s", soongBuildMetricsFile, err)
 	}
 	soongBuildMetrics := &soong_metrics_proto.SoongBuildMetrics{}
diff --git a/ui/metrics/Android.bp b/ui/metrics/Android.bp
index 3ba3907..05db1d7 100644
--- a/ui/metrics/Android.bp
+++ b/ui/metrics/Android.bp
@@ -21,9 +21,10 @@
     pkgPath: "android/soong/ui/metrics",
     deps: [
         "golang-protobuf-proto",
+        "soong-ui-bp2build_metrics_proto",
         "soong-ui-metrics_upload_proto",
         "soong-ui-metrics_proto",
-        "soong-ui-bp2build_metrics_proto",
+        "soong-ui-mk_metrics_proto",
         "soong-ui-tracer",
         "soong-shared",
     ],
@@ -71,3 +72,15 @@
         "bp2build_metrics_proto/bp2build_metrics.pb.go",
     ],
 }
+
+bootstrap_go_package {
+    name: "soong-ui-mk_metrics_proto",
+    pkgPath: "android/soong/ui/metrics/mk_metrics_proto",
+    deps: [
+        "golang-protobuf-reflect-protoreflect",
+        "golang-protobuf-runtime-protoimpl",
+    ],
+    srcs: [
+        "mk_metrics_proto/mk_metrics.pb.go",
+    ],
+}
diff --git a/ui/metrics/bp2build_metrics_proto/bp2build_metrics.pb.go b/ui/metrics/bp2build_metrics_proto/bp2build_metrics.pb.go
index 95f02ca..93f3471 100644
--- a/ui/metrics/bp2build_metrics_proto/bp2build_metrics.pb.go
+++ b/ui/metrics/bp2build_metrics_proto/bp2build_metrics.pb.go
@@ -53,6 +53,9 @@
 	ConvertedModuleTypeCount map[string]uint64 `protobuf:"bytes,6,rep,name=convertedModuleTypeCount,proto3" json:"convertedModuleTypeCount,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
 	// Counts of total modules by module type.
 	TotalModuleTypeCount map[string]uint64 `protobuf:"bytes,7,rep,name=totalModuleTypeCount,proto3" json:"totalModuleTypeCount,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
+	// List of traced runtime events of bp2build, useful for tracking bp2build
+	// runtime.
+	Events []*Event `protobuf:"bytes,8,rep,name=events,proto3" json:"events,omitempty"`
 }
 
 func (x *Bp2BuildMetrics) Reset() {
@@ -136,13 +139,89 @@
 	return nil
 }
 
+func (x *Bp2BuildMetrics) GetEvents() []*Event {
+	if x != nil {
+		return x.Events
+	}
+	return nil
+}
+
+// Traced runtime event of bp2build.
+type Event struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// The event name.
+	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+	// The absolute start time of the event
+	// The number of nanoseconds elapsed since January 1, 1970 UTC.
+	StartTime uint64 `protobuf:"varint,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
+	// The real running time.
+	// The number of nanoseconds elapsed since start_time.
+	RealTime uint64 `protobuf:"varint,3,opt,name=real_time,json=realTime,proto3" json:"real_time,omitempty"`
+}
+
+func (x *Event) Reset() {
+	*x = Event{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_bp2build_metrics_proto_msgTypes[1]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *Event) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*Event) ProtoMessage() {}
+
+func (x *Event) ProtoReflect() protoreflect.Message {
+	mi := &file_bp2build_metrics_proto_msgTypes[1]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use Event.ProtoReflect.Descriptor instead.
+func (*Event) Descriptor() ([]byte, []int) {
+	return file_bp2build_metrics_proto_rawDescGZIP(), []int{1}
+}
+
+func (x *Event) GetName() string {
+	if x != nil {
+		return x.Name
+	}
+	return ""
+}
+
+func (x *Event) GetStartTime() uint64 {
+	if x != nil {
+		return x.StartTime
+	}
+	return 0
+}
+
+func (x *Event) GetRealTime() uint64 {
+	if x != nil {
+		return x.RealTime
+	}
+	return 0
+}
+
 var File_bp2build_metrics_proto protoreflect.FileDescriptor
 
 var file_bp2build_metrics_proto_rawDesc = []byte{
 	0x0a, 0x16, 0x62, 0x70, 0x32, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69,
 	0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1c, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f,
 	0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x62, 0x70, 0x32, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d,
-	0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0xac, 0x06, 0x0a, 0x0f, 0x42, 0x70, 0x32, 0x42, 0x75,
+	0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0xe9, 0x06, 0x0a, 0x0f, 0x42, 0x70, 0x32, 0x42, 0x75,
 	0x69, 0x6c, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x32, 0x0a, 0x14, 0x67, 0x65,
 	0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x43, 0x6f, 0x75,
 	0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x14, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61,
@@ -179,24 +258,34 @@
 	0x42, 0x75, 0x69, 0x6c, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x54, 0x6f, 0x74,
 	0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e,
 	0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x4d, 0x6f, 0x64,
-	0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x1a, 0x41, 0x0a, 0x13,
-	0x52, 0x75, 0x6c, 0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x6e,
-	0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
-	0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02,
-	0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a,
-	0x4b, 0x0a, 0x1d, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x64, 0x4d, 0x6f, 0x64, 0x75,
-	0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79,
+	0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x3b, 0x0a, 0x06,
+	0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73,
+	0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x62, 0x70, 0x32, 0x62, 0x75,
+	0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x45, 0x76, 0x65, 0x6e,
+	0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x41, 0x0a, 0x13, 0x52, 0x75, 0x6c,
+	0x65, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79,
 	0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,
 	0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
-	0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19,
-	0x54, 0x6f, 0x74, 0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43,
-	0x6f, 0x75, 0x6e, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79,
-	0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76,
-	0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75,
-	0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x31, 0x5a, 0x2f, 0x61, 0x6e, 0x64, 0x72, 0x6f, 0x69, 0x64,
-	0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f, 0x75, 0x69, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
-	0x73, 0x2f, 0x62, 0x70, 0x32, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69,
-	0x63, 0x73, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+	0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x4b, 0x0a, 0x1d,
+	0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x64, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54,
+	0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
+	0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
+	0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05,
+	0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x54, 0x6f, 0x74,
+	0x61, 0x6c, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x75, 0x6e,
+	0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20,
+	0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
+	0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
+	0x38, 0x01, 0x22, 0x57, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e,
+	0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12,
+	0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20,
+	0x01, 0x28, 0x04, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1b,
+	0x0a, 0x09, 0x72, 0x65, 0x61, 0x6c, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
+	0x04, 0x52, 0x08, 0x72, 0x65, 0x61, 0x6c, 0x54, 0x69, 0x6d, 0x65, 0x42, 0x31, 0x5a, 0x2f, 0x61,
+	0x6e, 0x64, 0x72, 0x6f, 0x69, 0x64, 0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f, 0x75, 0x69, 0x2f,
+	0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x62, 0x70, 0x32, 0x62, 0x75, 0x69, 0x6c, 0x64,
+	0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06,
+	0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
 }
 
 var (
@@ -211,22 +300,24 @@
 	return file_bp2build_metrics_proto_rawDescData
 }
 
-var file_bp2build_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 4)
+var file_bp2build_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
 var file_bp2build_metrics_proto_goTypes = []interface{}{
 	(*Bp2BuildMetrics)(nil), // 0: soong_build_bp2build_metrics.Bp2BuildMetrics
-	nil,                     // 1: soong_build_bp2build_metrics.Bp2BuildMetrics.RuleClassCountEntry
-	nil,                     // 2: soong_build_bp2build_metrics.Bp2BuildMetrics.ConvertedModuleTypeCountEntry
-	nil,                     // 3: soong_build_bp2build_metrics.Bp2BuildMetrics.TotalModuleTypeCountEntry
+	(*Event)(nil),           // 1: soong_build_bp2build_metrics.Event
+	nil,                     // 2: soong_build_bp2build_metrics.Bp2BuildMetrics.RuleClassCountEntry
+	nil,                     // 3: soong_build_bp2build_metrics.Bp2BuildMetrics.ConvertedModuleTypeCountEntry
+	nil,                     // 4: soong_build_bp2build_metrics.Bp2BuildMetrics.TotalModuleTypeCountEntry
 }
 var file_bp2build_metrics_proto_depIdxs = []int32{
-	1, // 0: soong_build_bp2build_metrics.Bp2BuildMetrics.ruleClassCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.RuleClassCountEntry
-	2, // 1: soong_build_bp2build_metrics.Bp2BuildMetrics.convertedModuleTypeCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.ConvertedModuleTypeCountEntry
-	3, // 2: soong_build_bp2build_metrics.Bp2BuildMetrics.totalModuleTypeCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.TotalModuleTypeCountEntry
-	3, // [3:3] is the sub-list for method output_type
-	3, // [3:3] is the sub-list for method input_type
-	3, // [3:3] is the sub-list for extension type_name
-	3, // [3:3] is the sub-list for extension extendee
-	0, // [0:3] is the sub-list for field type_name
+	2, // 0: soong_build_bp2build_metrics.Bp2BuildMetrics.ruleClassCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.RuleClassCountEntry
+	3, // 1: soong_build_bp2build_metrics.Bp2BuildMetrics.convertedModuleTypeCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.ConvertedModuleTypeCountEntry
+	4, // 2: soong_build_bp2build_metrics.Bp2BuildMetrics.totalModuleTypeCount:type_name -> soong_build_bp2build_metrics.Bp2BuildMetrics.TotalModuleTypeCountEntry
+	1, // 3: soong_build_bp2build_metrics.Bp2BuildMetrics.events:type_name -> soong_build_bp2build_metrics.Event
+	4, // [4:4] is the sub-list for method output_type
+	4, // [4:4] is the sub-list for method input_type
+	4, // [4:4] is the sub-list for extension type_name
+	4, // [4:4] is the sub-list for extension extendee
+	0, // [0:4] is the sub-list for field type_name
 }
 
 func init() { file_bp2build_metrics_proto_init() }
@@ -247,6 +338,18 @@
 				return nil
 			}
 		}
+		file_bp2build_metrics_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*Event); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
 	}
 	type x struct{}
 	out := protoimpl.TypeBuilder{
@@ -254,7 +357,7 @@
 			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
 			RawDescriptor: file_bp2build_metrics_proto_rawDesc,
 			NumEnums:      0,
-			NumMessages:   4,
+			NumMessages:   5,
 			NumExtensions: 0,
 			NumServices:   0,
 		},
diff --git a/ui/metrics/bp2build_metrics_proto/bp2build_metrics.proto b/ui/metrics/bp2build_metrics_proto/bp2build_metrics.proto
index 6d98a3d..19a7827 100644
--- a/ui/metrics/bp2build_metrics_proto/bp2build_metrics.proto
+++ b/ui/metrics/bp2build_metrics_proto/bp2build_metrics.proto
@@ -38,4 +38,22 @@
 
   // Counts of total modules by module type.
   map<string, uint64> totalModuleTypeCount = 7;
+
+  // List of traced runtime events of bp2build, useful for tracking bp2build
+  // runtime.
+  repeated Event events = 8;
+}
+
+// Traced runtime event of bp2build.
+message Event {
+  // The event name.
+  string name = 1;
+
+  // The absolute start time of the event
+  // The number of nanoseconds elapsed since January 1, 1970 UTC.
+  uint64 start_time = 2;
+
+  // The real running time.
+  // The number of nanoseconds elapsed since start_time.
+  uint64 real_time = 3;
 }
diff --git a/ui/metrics/metrics.go b/ui/metrics/metrics.go
index 80f8c1a..0c62865 100644
--- a/ui/metrics/metrics.go
+++ b/ui/metrics/metrics.go
@@ -38,9 +38,11 @@
 	"time"
 
 	"android/soong/shared"
+
 	"google.golang.org/protobuf/proto"
 
 	soong_metrics_proto "android/soong/ui/metrics/metrics_proto"
+	mk_metrics_proto "android/soong/ui/metrics/mk_metrics_proto"
 )
 
 const (
@@ -62,14 +64,22 @@
 	Total = "total"
 )
 
-// Metrics is a struct that stores collected metrics during the course
-// of a build which later is dumped to a MetricsBase protobuf file.
-// See ui/metrics/metrics_proto/metrics.proto for further details
-// on what information is collected.
+// Metrics is a struct that stores collected metrics during the course of a
+// build. It is later dumped to protobuf files. See underlying metrics protos
+// for further details on what information is collected.
 type Metrics struct {
-	// The protobuf message that is later written to the file.
+	// Protobuf containing various top-level build metrics. These include:
+	// 1. Build identifiers (ex: branch ID, requested product, hostname,
+	//    originating command)
+	// 2. Per-subprocess top-level metrics (ex: ninja process IO and runtime).
+	//    Note that, since these metrics are reported by soong_ui, there is little
+	//    insight that can be provided into performance breakdowns of individual
+	//    subprocesses.
 	metrics soong_metrics_proto.MetricsBase
 
+	// Protobuf containing metrics pertaining to number of makefiles in a build.
+	mkMetrics mk_metrics_proto.MkMetrics
+
 	// A list of pending build events.
 	EventTracer *EventTracer
 }
@@ -78,11 +88,24 @@
 func New() (metrics *Metrics) {
 	m := &Metrics{
 		metrics:     soong_metrics_proto.MetricsBase{},
+		mkMetrics:   mk_metrics_proto.MkMetrics{},
 		EventTracer: &EventTracer{},
 	}
 	return m
 }
 
+func (m *Metrics) SetTotalMakefiles(total int) {
+	m.mkMetrics.TotalMakefiles = uint32(total)
+}
+
+func (m *Metrics) SetToplevelMakefiles(total int) {
+	m.mkMetrics.ToplevelMakefiles = uint32(total)
+}
+
+func (m *Metrics) DumpMkMetrics(outPath string) {
+	shared.Save(&m.mkMetrics, outPath)
+}
+
 // SetTimeMetrics stores performance information from an executed block of
 // code.
 func (m *Metrics) SetTimeMetrics(perf soong_metrics_proto.PerfInfo) {
@@ -113,6 +136,11 @@
 	m.metrics.SystemResourceInfo = b
 }
 
+// ExpConfigFetcher stores information about the expconfigfetcher.
+func (m *Metrics) ExpConfigFetcher(b *soong_metrics_proto.ExpConfigFetcher) {
+	m.metrics.ExpConfigFetcher = b
+}
+
 // SetMetadataMetrics sets information about the build such as the target
 // product, host architecture and out directory.
 func (m *Metrics) SetMetadataMetrics(metadata map[string]string) {
diff --git a/ui/metrics/metrics_proto/metrics.pb.go b/ui/metrics/metrics_proto/metrics.pb.go
index 2e530b0..69f5689 100644
--- a/ui/metrics/metrics_proto/metrics.pb.go
+++ b/ui/metrics/metrics_proto/metrics.pb.go
@@ -217,6 +217,65 @@
 	return file_metrics_proto_rawDescGZIP(), []int{5, 0}
 }
 
+type ExpConfigFetcher_ConfigStatus int32
+
+const (
+	ExpConfigFetcher_NO_CONFIG ExpConfigFetcher_ConfigStatus = 0
+	ExpConfigFetcher_CONFIG    ExpConfigFetcher_ConfigStatus = 1
+	ExpConfigFetcher_ERROR     ExpConfigFetcher_ConfigStatus = 2
+)
+
+// Enum value maps for ExpConfigFetcher_ConfigStatus.
+var (
+	ExpConfigFetcher_ConfigStatus_name = map[int32]string{
+		0: "NO_CONFIG",
+		1: "CONFIG",
+		2: "ERROR",
+	}
+	ExpConfigFetcher_ConfigStatus_value = map[string]int32{
+		"NO_CONFIG": 0,
+		"CONFIG":    1,
+		"ERROR":     2,
+	}
+)
+
+func (x ExpConfigFetcher_ConfigStatus) Enum() *ExpConfigFetcher_ConfigStatus {
+	p := new(ExpConfigFetcher_ConfigStatus)
+	*p = x
+	return p
+}
+
+func (x ExpConfigFetcher_ConfigStatus) String() string {
+	return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
+}
+
+func (ExpConfigFetcher_ConfigStatus) Descriptor() protoreflect.EnumDescriptor {
+	return file_metrics_proto_enumTypes[3].Descriptor()
+}
+
+func (ExpConfigFetcher_ConfigStatus) Type() protoreflect.EnumType {
+	return &file_metrics_proto_enumTypes[3]
+}
+
+func (x ExpConfigFetcher_ConfigStatus) Number() protoreflect.EnumNumber {
+	return protoreflect.EnumNumber(x)
+}
+
+// Deprecated: Do not use.
+func (x *ExpConfigFetcher_ConfigStatus) UnmarshalJSON(b []byte) error {
+	num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)
+	if err != nil {
+		return err
+	}
+	*x = ExpConfigFetcher_ConfigStatus(num)
+	return nil
+}
+
+// Deprecated: Use ExpConfigFetcher_ConfigStatus.Descriptor instead.
+func (ExpConfigFetcher_ConfigStatus) EnumDescriptor() ([]byte, []int) {
+	return file_metrics_proto_rawDescGZIP(), []int{9, 0}
+}
+
 type MetricsBase struct {
 	state         protoimpl.MessageState
 	sizeCache     protoimpl.SizeCache
@@ -274,6 +333,8 @@
 	BuildCommand *string `protobuf:"bytes,26,opt,name=build_command,json=buildCommand" json:"build_command,omitempty"`
 	// The metrics for calling Bazel.
 	BazelRuns []*PerfInfo `protobuf:"bytes,27,rep,name=bazel_runs,json=bazelRuns" json:"bazel_runs,omitempty"`
+	// The metrics of the experiment config fetcher
+	ExpConfigFetcher *ExpConfigFetcher `protobuf:"bytes,28,opt,name=exp_config_fetcher,json=expConfigFetcher" json:"exp_config_fetcher,omitempty"`
 }
 
 // Default values for MetricsBase fields.
@@ -505,6 +566,13 @@
 	return nil
 }
 
+func (x *MetricsBase) GetExpConfigFetcher() *ExpConfigFetcher {
+	if x != nil {
+		return x.ExpConfigFetcher
+	}
+	return nil
+}
+
 type BuildConfig struct {
 	state         protoimpl.MessageState
 	sizeCache     protoimpl.SizeCache
@@ -1072,6 +1140,8 @@
 	TotalAllocSize *uint64 `protobuf:"varint,4,opt,name=total_alloc_size,json=totalAllocSize" json:"total_alloc_size,omitempty"`
 	// The approximate maximum size of the heap in soong_build in bytes.
 	MaxHeapSize *uint64 `protobuf:"varint,5,opt,name=max_heap_size,json=maxHeapSize" json:"max_heap_size,omitempty"`
+	// Runtime metrics for soong_build execution.
+	Events []*PerfInfo `protobuf:"bytes,6,rep,name=events" json:"events,omitempty"`
 }
 
 func (x *SoongBuildMetrics) Reset() {
@@ -1141,12 +1211,88 @@
 	return 0
 }
 
+func (x *SoongBuildMetrics) GetEvents() []*PerfInfo {
+	if x != nil {
+		return x.Events
+	}
+	return nil
+}
+
+type ExpConfigFetcher struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// The result of the call to expconfigfetcher
+	// NO_CONFIG - Not part of experiment
+	// CONFIG - Part of experiment, config copied successfully
+	// ERROR - expconfigfetcher failed
+	Status *ExpConfigFetcher_ConfigStatus `protobuf:"varint,1,opt,name=status,enum=soong_build_metrics.ExpConfigFetcher_ConfigStatus" json:"status,omitempty"`
+	// The output config filename
+	Filename *string `protobuf:"bytes,2,opt,name=filename" json:"filename,omitempty"`
+	// Time, in microseconds, taken by the expconfigfetcher
+	Micros *uint64 `protobuf:"varint,3,opt,name=micros" json:"micros,omitempty"`
+}
+
+func (x *ExpConfigFetcher) Reset() {
+	*x = ExpConfigFetcher{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_metrics_proto_msgTypes[9]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *ExpConfigFetcher) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ExpConfigFetcher) ProtoMessage() {}
+
+func (x *ExpConfigFetcher) ProtoReflect() protoreflect.Message {
+	mi := &file_metrics_proto_msgTypes[9]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use ExpConfigFetcher.ProtoReflect.Descriptor instead.
+func (*ExpConfigFetcher) Descriptor() ([]byte, []int) {
+	return file_metrics_proto_rawDescGZIP(), []int{9}
+}
+
+func (x *ExpConfigFetcher) GetStatus() ExpConfigFetcher_ConfigStatus {
+	if x != nil && x.Status != nil {
+		return *x.Status
+	}
+	return ExpConfigFetcher_NO_CONFIG
+}
+
+func (x *ExpConfigFetcher) GetFilename() string {
+	if x != nil && x.Filename != nil {
+		return *x.Filename
+	}
+	return ""
+}
+
+func (x *ExpConfigFetcher) GetMicros() uint64 {
+	if x != nil && x.Micros != nil {
+		return *x.Micros
+	}
+	return 0
+}
+
 var File_metrics_proto protoreflect.FileDescriptor
 
 var file_metrics_proto_rawDesc = []byte{
 	0x0a, 0x0d, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12,
 	0x13, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74,
-	0x72, 0x69, 0x63, 0x73, 0x22, 0xd8, 0x0c, 0x0a, 0x0b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73,
+	0x72, 0x69, 0x63, 0x73, 0x22, 0xad, 0x0d, 0x0a, 0x0b, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73,
 	0x42, 0x61, 0x73, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x64, 0x61,
 	0x74, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01,
 	0x28, 0x03, 0x52, 0x12, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x44, 0x61, 0x74, 0x65, 0x54, 0x69, 0x6d,
@@ -1240,121 +1386,143 @@
 	0x64, 0x12, 0x3c, 0x0a, 0x0a, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x5f, 0x72, 0x75, 0x6e, 0x73, 0x18,
 	0x1b, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75,
 	0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x50, 0x65, 0x72, 0x66,
-	0x49, 0x6e, 0x66, 0x6f, 0x52, 0x09, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x52, 0x75, 0x6e, 0x73, 0x22,
-	0x30, 0x0a, 0x0c, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x56, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x12,
-	0x08, 0x0a, 0x04, 0x55, 0x53, 0x45, 0x52, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x53, 0x45,
-	0x52, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x45, 0x4e, 0x47, 0x10,
-	0x02, 0x22, 0x3c, 0x0a, 0x04, 0x41, 0x72, 0x63, 0x68, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b,
-	0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x52, 0x4d, 0x10, 0x01, 0x12,
-	0x09, 0x0a, 0x05, 0x41, 0x52, 0x4d, 0x36, 0x34, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03, 0x58, 0x38,
-	0x36, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x58, 0x38, 0x36, 0x5f, 0x36, 0x34, 0x10, 0x04, 0x22,
-	0xd3, 0x01, 0x0a, 0x0b, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12,
-	0x19, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x5f, 0x67, 0x6f, 0x6d, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28,
-	0x08, 0x52, 0x07, 0x75, 0x73, 0x65, 0x47, 0x6f, 0x6d, 0x61, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73,
-	0x65, 0x5f, 0x72, 0x62, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x75, 0x73, 0x65,
-	0x52, 0x62, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x5f, 0x75, 0x73, 0x65,
-	0x5f, 0x67, 0x6f, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x66, 0x6f, 0x72,
-	0x63, 0x65, 0x55, 0x73, 0x65, 0x47, 0x6f, 0x6d, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x62, 0x61, 0x7a,
-	0x65, 0x6c, 0x5f, 0x61, 0x73, 0x5f, 0x6e, 0x69, 0x6e, 0x6a, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28,
-	0x08, 0x52, 0x0c, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x41, 0x73, 0x4e, 0x69, 0x6e, 0x6a, 0x61, 0x12,
-	0x2a, 0x0a, 0x11, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x5f, 0x6d, 0x69, 0x78, 0x65, 0x64, 0x5f, 0x62,
-	0x75, 0x69, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x62, 0x61, 0x7a, 0x65,
-	0x6c, 0x4d, 0x69, 0x78, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x74,
-	0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x74, 0x61,
-	0x72, 0x67, 0x65, 0x74, 0x73, 0x22, 0x6f, 0x0a, 0x12, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52,
-	0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x32, 0x0a, 0x15, 0x74,
-	0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x70, 0x68, 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x6d, 0x65,
-	0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x74, 0x6f, 0x74, 0x61,
-	0x6c, 0x50, 0x68, 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12,
-	0x25, 0x0a, 0x0e, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x63, 0x70, 0x75,
-	0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62,
-	0x6c, 0x65, 0x43, 0x70, 0x75, 0x73, 0x22, 0x81, 0x02, 0x0a, 0x08, 0x50, 0x65, 0x72, 0x66, 0x49,
-	0x6e, 0x66, 0x6f, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69,
-	0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69,
-	0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20,
-	0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x74, 0x61,
-	0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x73,
-	0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x65, 0x61, 0x6c,
-	0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x72, 0x65, 0x61,
-	0x6c, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x21, 0x0a, 0x0a, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x5f,
-	0x75, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x02, 0x18, 0x01, 0x52, 0x09, 0x6d,
-	0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x17, 0x70, 0x72, 0x6f, 0x63,
-	0x65, 0x73, 0x73, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x69,
-	0x6e, 0x66, 0x6f, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x6f, 0x6f, 0x6e,
-	0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e,
-	0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49,
-	0x6e, 0x66, 0x6f, 0x52, 0x15, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x73, 0x52, 0x65,
-	0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x22, 0xb9, 0x03, 0x0a, 0x13, 0x50,
-	0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e,
-	0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
-	0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74,
-	0x69, 0x6d, 0x65, 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04,
-	0x52, 0x0e, 0x75, 0x73, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73,
-	0x12, 0x2c, 0x0a, 0x12, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f,
-	0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x73, 0x79,
-	0x73, 0x74, 0x65, 0x6d, 0x54, 0x69, 0x6d, 0x65, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x12, 0x1c,
-	0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x73, 0x73, 0x5f, 0x6b, 0x62, 0x18, 0x04, 0x20, 0x01,
-	0x28, 0x04, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x52, 0x73, 0x73, 0x4b, 0x62, 0x12, 0x2a, 0x0a, 0x11,
-	0x6d, 0x69, 0x6e, 0x6f, 0x72, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x66, 0x61, 0x75, 0x6c, 0x74,
-	0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x6d, 0x69, 0x6e, 0x6f, 0x72, 0x50, 0x61,
-	0x67, 0x65, 0x46, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x61, 0x6a, 0x6f,
-	0x72, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x06, 0x20,
-	0x01, 0x28, 0x04, 0x52, 0x0f, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x50, 0x61, 0x67, 0x65, 0x46, 0x61,
-	0x75, 0x6c, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0b, 0x69, 0x6f, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74,
-	0x5f, 0x6b, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x69, 0x6f, 0x49, 0x6e, 0x70,
-	0x75, 0x74, 0x4b, 0x62, 0x12, 0x20, 0x0a, 0x0c, 0x69, 0x6f, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75,
-	0x74, 0x5f, 0x6b, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x69, 0x6f, 0x4f, 0x75,
-	0x74, 0x70, 0x75, 0x74, 0x4b, 0x62, 0x12, 0x3c, 0x0a, 0x1a, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74,
-	0x61, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x77, 0x69, 0x74,
-	0x63, 0x68, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x18, 0x76, 0x6f, 0x6c, 0x75,
-	0x6e, 0x74, 0x61, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x53, 0x77, 0x69, 0x74,
-	0x63, 0x68, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x1c, 0x69, 0x6e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74,
-	0x61, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x73, 0x77, 0x69, 0x74,
-	0x63, 0x68, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x04, 0x52, 0x1a, 0x69, 0x6e, 0x76, 0x6f,
-	0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x53, 0x77,
-	0x69, 0x74, 0x63, 0x68, 0x65, 0x73, 0x22, 0xe5, 0x01, 0x0a, 0x0e, 0x4d, 0x6f, 0x64, 0x75, 0x6c,
-	0x65, 0x54, 0x79, 0x70, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x5b, 0x0a, 0x0c, 0x62, 0x75, 0x69,
-	0x6c, 0x64, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32,
-	0x2f, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65,
-	0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65,
-	0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d,
-	0x3a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x52, 0x0b, 0x62, 0x75, 0x69, 0x6c, 0x64,
-	0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65,
-	0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x6f, 0x64,
-	0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x6e, 0x75, 0x6d, 0x5f, 0x6f,
-	0x66, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52,
-	0x0c, 0x6e, 0x75, 0x6d, 0x4f, 0x66, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x22, 0x2f, 0x0a,
-	0x0b, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x0b, 0x0a, 0x07,
-	0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x4f, 0x4f,
-	0x4e, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x4d, 0x41, 0x4b, 0x45, 0x10, 0x02, 0x22, 0x6c,
-	0x0a, 0x1a, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x73, 0x65, 0x72, 0x4a, 0x6f,
-	0x75, 0x72, 0x6e, 0x65, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x12, 0x0a, 0x04,
-	0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65,
-	0x12, 0x3a, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28,
-	0x0b, 0x32, 0x20, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f,
-	0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x42,
-	0x61, 0x73, 0x65, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0x62, 0x0a, 0x1b,
-	0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x73, 0x65, 0x72, 0x4a, 0x6f, 0x75, 0x72,
-	0x6e, 0x65, 0x79, 0x73, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x43, 0x0a, 0x04, 0x63,
-	0x75, 0x6a, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x73, 0x6f, 0x6f, 0x6e,
-	0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e,
-	0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x73, 0x65, 0x72, 0x4a, 0x6f, 0x75, 0x72,
-	0x6e, 0x65, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52, 0x04, 0x63, 0x75, 0x6a, 0x73,
-	0x22, 0xc3, 0x01, 0x0a, 0x11, 0x53, 0x6f, 0x6f, 0x6e, 0x67, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x4d,
-	0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65,
-	0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73,
-	0x12, 0x1a, 0x0a, 0x08, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01,
-	0x28, 0x0d, 0x52, 0x08, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74, 0x73, 0x12, 0x2a, 0x0a, 0x11,
-	0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x5f, 0x63, 0x6f, 0x75, 0x6e,
-	0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x6c,
-	0x6c, 0x6f, 0x63, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x6f, 0x74, 0x61,
-	0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01,
-	0x28, 0x04, 0x52, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x63, 0x53, 0x69,
-	0x7a, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x5f, 0x68, 0x65, 0x61, 0x70, 0x5f, 0x73,
-	0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x6d, 0x61, 0x78, 0x48, 0x65,
-	0x61, 0x70, 0x53, 0x69, 0x7a, 0x65, 0x42, 0x28, 0x5a, 0x26, 0x61, 0x6e, 0x64, 0x72, 0x6f, 0x69,
-	0x64, 0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f, 0x75, 0x69, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69,
-	0x63, 0x73, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
+	0x49, 0x6e, 0x66, 0x6f, 0x52, 0x09, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x52, 0x75, 0x6e, 0x73, 0x12,
+	0x53, 0x0a, 0x12, 0x65, 0x78, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x66, 0x65,
+	0x74, 0x63, 0x68, 0x65, 0x72, 0x18, 0x1c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x73, 0x6f,
+	0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
+	0x73, 0x2e, 0x45, 0x78, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x65, 0x74, 0x63, 0x68,
+	0x65, 0x72, 0x52, 0x10, 0x65, 0x78, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x65, 0x74,
+	0x63, 0x68, 0x65, 0x72, 0x22, 0x30, 0x0a, 0x0c, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x56, 0x61, 0x72,
+	0x69, 0x61, 0x6e, 0x74, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x53, 0x45, 0x52, 0x10, 0x00, 0x12, 0x0d,
+	0x0a, 0x09, 0x55, 0x53, 0x45, 0x52, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x07, 0x0a,
+	0x03, 0x45, 0x4e, 0x47, 0x10, 0x02, 0x22, 0x3c, 0x0a, 0x04, 0x41, 0x72, 0x63, 0x68, 0x12, 0x0b,
+	0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x41,
+	0x52, 0x4d, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x41, 0x52, 0x4d, 0x36, 0x34, 0x10, 0x02, 0x12,
+	0x07, 0x0a, 0x03, 0x58, 0x38, 0x36, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x58, 0x38, 0x36, 0x5f,
+	0x36, 0x34, 0x10, 0x04, 0x22, 0xd3, 0x01, 0x0a, 0x0b, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x43, 0x6f,
+	0x6e, 0x66, 0x69, 0x67, 0x12, 0x19, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x5f, 0x67, 0x6f, 0x6d, 0x61,
+	0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x75, 0x73, 0x65, 0x47, 0x6f, 0x6d, 0x61, 0x12,
+	0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x5f, 0x72, 0x62, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08,
+	0x52, 0x06, 0x75, 0x73, 0x65, 0x52, 0x62, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x6f, 0x72, 0x63,
+	0x65, 0x5f, 0x75, 0x73, 0x65, 0x5f, 0x67, 0x6f, 0x6d, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08,
+	0x52, 0x0c, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x73, 0x65, 0x47, 0x6f, 0x6d, 0x61, 0x12, 0x24,
+	0x0a, 0x0e, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x5f, 0x61, 0x73, 0x5f, 0x6e, 0x69, 0x6e, 0x6a, 0x61,
+	0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x41, 0x73, 0x4e,
+	0x69, 0x6e, 0x6a, 0x61, 0x12, 0x2a, 0x0a, 0x11, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x5f, 0x6d, 0x69,
+	0x78, 0x65, 0x64, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52,
+	0x0f, 0x62, 0x61, 0x7a, 0x65, 0x6c, 0x4d, 0x69, 0x78, 0x65, 0x64, 0x42, 0x75, 0x69, 0x6c, 0x64,
+	0x12, 0x18, 0x0a, 0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28,
+	0x09, 0x52, 0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x22, 0x6f, 0x0a, 0x12, 0x53, 0x79,
+	0x73, 0x74, 0x65, 0x6d, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f,
+	0x12, 0x32, 0x0a, 0x15, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x70, 0x68, 0x79, 0x73, 0x69, 0x63,
+	0x61, 0x6c, 0x5f, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52,
+	0x13, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x50, 0x68, 0x79, 0x73, 0x69, 0x63, 0x61, 0x6c, 0x4d, 0x65,
+	0x6d, 0x6f, 0x72, 0x79, 0x12, 0x25, 0x0a, 0x0e, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c,
+	0x65, 0x5f, 0x63, 0x70, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x61, 0x76,
+	0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x70, 0x75, 0x73, 0x22, 0x81, 0x02, 0x0a, 0x08,
+	0x50, 0x65, 0x72, 0x66, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63,
+	0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64,
+	0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61,
+	0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1d,
+	0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01,
+	0x28, 0x04, 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x1b, 0x0a,
+	0x09, 0x72, 0x65, 0x61, 0x6c, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04,
+	0x52, 0x08, 0x72, 0x65, 0x61, 0x6c, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x21, 0x0a, 0x0a, 0x6d, 0x65,
+	0x6d, 0x6f, 0x72, 0x79, 0x5f, 0x75, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x42, 0x02,
+	0x18, 0x01, 0x52, 0x09, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x55, 0x73, 0x65, 0x12, 0x60, 0x0a,
+	0x17, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75,
+	0x72, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28,
+	0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74,
+	0x72, 0x69, 0x63, 0x73, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x6f,
+	0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x15, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73,
+	0x73, 0x65, 0x73, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x22,
+	0xb9, 0x03, 0x0a, 0x13, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x52, 0x65, 0x73, 0x6f, 0x75,
+	0x72, 0x63, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
+	0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x75,
+	0x73, 0x65, 0x72, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x18,
+	0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x75, 0x73, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x4d,
+	0x69, 0x63, 0x72, 0x6f, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x5f,
+	0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28,
+	0x04, 0x52, 0x10, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x54, 0x69, 0x6d, 0x65, 0x4d, 0x69, 0x63,
+	0x72, 0x6f, 0x73, 0x12, 0x1c, 0x0a, 0x0a, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x73, 0x73, 0x5f, 0x6b,
+	0x62, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x6d, 0x61, 0x78, 0x52, 0x73, 0x73, 0x4b,
+	0x62, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x69, 0x6e, 0x6f, 0x72, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f,
+	0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x6d, 0x69,
+	0x6e, 0x6f, 0x72, 0x50, 0x61, 0x67, 0x65, 0x46, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x2a, 0x0a,
+	0x11, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x5f, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x66, 0x61, 0x75, 0x6c,
+	0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x50,
+	0x61, 0x67, 0x65, 0x46, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0b, 0x69, 0x6f, 0x5f,
+	0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x6b, 0x62, 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09,
+	0x69, 0x6f, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x4b, 0x62, 0x12, 0x20, 0x0a, 0x0c, 0x69, 0x6f, 0x5f,
+	0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6b, 0x62, 0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52,
+	0x0a, 0x69, 0x6f, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4b, 0x62, 0x12, 0x3c, 0x0a, 0x1a, 0x76,
+	0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74,
+	0x5f, 0x73, 0x77, 0x69, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x52,
+	0x18, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78,
+	0x74, 0x53, 0x77, 0x69, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x40, 0x0a, 0x1c, 0x69, 0x6e, 0x76,
+	0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74,
+	0x5f, 0x73, 0x77, 0x69, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x04, 0x52,
+	0x1a, 0x69, 0x6e, 0x76, 0x6f, 0x6c, 0x75, 0x6e, 0x74, 0x61, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74,
+	0x65, 0x78, 0x74, 0x53, 0x77, 0x69, 0x74, 0x63, 0x68, 0x65, 0x73, 0x22, 0xe5, 0x01, 0x0a, 0x0e,
+	0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x5b,
+	0x0a, 0x0c, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x01,
+	0x20, 0x01, 0x28, 0x0e, 0x32, 0x2f, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69,
+	0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c,
+	0x65, 0x54, 0x79, 0x70, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53,
+	0x79, 0x73, 0x74, 0x65, 0x6d, 0x3a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x52, 0x0b,
+	0x62, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x1f, 0x0a, 0x0b, 0x6d,
+	0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+	0x52, 0x0a, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x0e,
+	0x6e, 0x75, 0x6d, 0x5f, 0x6f, 0x66, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x03,
+	0x20, 0x01, 0x28, 0x0d, 0x52, 0x0c, 0x6e, 0x75, 0x6d, 0x4f, 0x66, 0x4d, 0x6f, 0x64, 0x75, 0x6c,
+	0x65, 0x73, 0x22, 0x2f, 0x0a, 0x0b, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x79, 0x73, 0x74, 0x65,
+	0x6d, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09,
+	0x0a, 0x05, 0x53, 0x4f, 0x4f, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x4d, 0x41, 0x4b,
+	0x45, 0x10, 0x02, 0x22, 0x6c, 0x0a, 0x1a, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55,
+	0x73, 0x65, 0x72, 0x4a, 0x6f, 0x75, 0x72, 0x6e, 0x65, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63,
+	0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
+	0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73,
+	0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62,
+	0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x4d, 0x65, 0x74,
+	0x72, 0x69, 0x63, 0x73, 0x42, 0x61, 0x73, 0x65, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
+	0x73, 0x22, 0x62, 0x0a, 0x1b, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x73, 0x65,
+	0x72, 0x4a, 0x6f, 0x75, 0x72, 0x6e, 0x65, 0x79, 0x73, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73,
+	0x12, 0x43, 0x0a, 0x04, 0x63, 0x75, 0x6a, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f,
+	0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74,
+	0x72, 0x69, 0x63, 0x73, 0x2e, 0x43, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x55, 0x73, 0x65,
+	0x72, 0x4a, 0x6f, 0x75, 0x72, 0x6e, 0x65, 0x79, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x52,
+	0x04, 0x63, 0x75, 0x6a, 0x73, 0x22, 0xfa, 0x01, 0x0a, 0x11, 0x53, 0x6f, 0x6f, 0x6e, 0x67, 0x42,
+	0x75, 0x69, 0x6c, 0x64, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x6d,
+	0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x6d, 0x6f,
+	0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74,
+	0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x76, 0x61, 0x72, 0x69, 0x61, 0x6e, 0x74,
+	0x73, 0x12, 0x2a, 0x0a, 0x11, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x63,
+	0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x74, 0x6f,
+	0x74, 0x61, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x63, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28, 0x0a,
+	0x10, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x63, 0x5f, 0x73, 0x69, 0x7a,
+	0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x6c,
+	0x6c, 0x6f, 0x63, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x22, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x5f, 0x68,
+	0x65, 0x61, 0x70, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b,
+	0x6d, 0x61, 0x78, 0x48, 0x65, 0x61, 0x70, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x65,
+	0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x6f,
+	0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
+	0x73, 0x2e, 0x50, 0x65, 0x72, 0x66, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e,
+	0x74, 0x73, 0x22, 0xc8, 0x01, 0x0a, 0x10, 0x45, 0x78, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
+	0x46, 0x65, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x4a, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75,
+	0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x32, 0x2e, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f,
+	0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x45, 0x78,
+	0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x65, 0x74, 0x63, 0x68, 0x65, 0x72, 0x2e, 0x43,
+	0x6f, 0x6e, 0x66, 0x69, 0x67, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61,
+	0x74, 0x75, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18,
+	0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x12,
+	0x16, 0x0a, 0x06, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52,
+	0x06, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x22, 0x34, 0x0a, 0x0c, 0x43, 0x6f, 0x6e, 0x66, 0x69,
+	0x67, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x5f, 0x43, 0x4f,
+	0x4e, 0x46, 0x49, 0x47, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x4f, 0x4e, 0x46, 0x49, 0x47,
+	0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x02, 0x42, 0x28, 0x5a,
+	0x26, 0x61, 0x6e, 0x64, 0x72, 0x6f, 0x69, 0x64, 0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f, 0x75,
+	0x69, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
+	0x73, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
 }
 
 var (
@@ -1369,45 +1537,50 @@
 	return file_metrics_proto_rawDescData
 }
 
-var file_metrics_proto_enumTypes = make([]protoimpl.EnumInfo, 3)
-var file_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 9)
+var file_metrics_proto_enumTypes = make([]protoimpl.EnumInfo, 4)
+var file_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
 var file_metrics_proto_goTypes = []interface{}{
 	(MetricsBase_BuildVariant)(0),       // 0: soong_build_metrics.MetricsBase.BuildVariant
 	(MetricsBase_Arch)(0),               // 1: soong_build_metrics.MetricsBase.Arch
 	(ModuleTypeInfo_BuildSystem)(0),     // 2: soong_build_metrics.ModuleTypeInfo.BuildSystem
-	(*MetricsBase)(nil),                 // 3: soong_build_metrics.MetricsBase
-	(*BuildConfig)(nil),                 // 4: soong_build_metrics.BuildConfig
-	(*SystemResourceInfo)(nil),          // 5: soong_build_metrics.SystemResourceInfo
-	(*PerfInfo)(nil),                    // 6: soong_build_metrics.PerfInfo
-	(*ProcessResourceInfo)(nil),         // 7: soong_build_metrics.ProcessResourceInfo
-	(*ModuleTypeInfo)(nil),              // 8: soong_build_metrics.ModuleTypeInfo
-	(*CriticalUserJourneyMetrics)(nil),  // 9: soong_build_metrics.CriticalUserJourneyMetrics
-	(*CriticalUserJourneysMetrics)(nil), // 10: soong_build_metrics.CriticalUserJourneysMetrics
-	(*SoongBuildMetrics)(nil),           // 11: soong_build_metrics.SoongBuildMetrics
+	(ExpConfigFetcher_ConfigStatus)(0),  // 3: soong_build_metrics.ExpConfigFetcher.ConfigStatus
+	(*MetricsBase)(nil),                 // 4: soong_build_metrics.MetricsBase
+	(*BuildConfig)(nil),                 // 5: soong_build_metrics.BuildConfig
+	(*SystemResourceInfo)(nil),          // 6: soong_build_metrics.SystemResourceInfo
+	(*PerfInfo)(nil),                    // 7: soong_build_metrics.PerfInfo
+	(*ProcessResourceInfo)(nil),         // 8: soong_build_metrics.ProcessResourceInfo
+	(*ModuleTypeInfo)(nil),              // 9: soong_build_metrics.ModuleTypeInfo
+	(*CriticalUserJourneyMetrics)(nil),  // 10: soong_build_metrics.CriticalUserJourneyMetrics
+	(*CriticalUserJourneysMetrics)(nil), // 11: soong_build_metrics.CriticalUserJourneysMetrics
+	(*SoongBuildMetrics)(nil),           // 12: soong_build_metrics.SoongBuildMetrics
+	(*ExpConfigFetcher)(nil),            // 13: soong_build_metrics.ExpConfigFetcher
 }
 var file_metrics_proto_depIdxs = []int32{
 	0,  // 0: soong_build_metrics.MetricsBase.target_build_variant:type_name -> soong_build_metrics.MetricsBase.BuildVariant
 	1,  // 1: soong_build_metrics.MetricsBase.target_arch:type_name -> soong_build_metrics.MetricsBase.Arch
 	1,  // 2: soong_build_metrics.MetricsBase.host_arch:type_name -> soong_build_metrics.MetricsBase.Arch
 	1,  // 3: soong_build_metrics.MetricsBase.host_2nd_arch:type_name -> soong_build_metrics.MetricsBase.Arch
-	6,  // 4: soong_build_metrics.MetricsBase.setup_tools:type_name -> soong_build_metrics.PerfInfo
-	6,  // 5: soong_build_metrics.MetricsBase.kati_runs:type_name -> soong_build_metrics.PerfInfo
-	6,  // 6: soong_build_metrics.MetricsBase.soong_runs:type_name -> soong_build_metrics.PerfInfo
-	6,  // 7: soong_build_metrics.MetricsBase.ninja_runs:type_name -> soong_build_metrics.PerfInfo
-	6,  // 8: soong_build_metrics.MetricsBase.total:type_name -> soong_build_metrics.PerfInfo
-	11, // 9: soong_build_metrics.MetricsBase.soong_build_metrics:type_name -> soong_build_metrics.SoongBuildMetrics
-	4,  // 10: soong_build_metrics.MetricsBase.build_config:type_name -> soong_build_metrics.BuildConfig
-	5,  // 11: soong_build_metrics.MetricsBase.system_resource_info:type_name -> soong_build_metrics.SystemResourceInfo
-	6,  // 12: soong_build_metrics.MetricsBase.bazel_runs:type_name -> soong_build_metrics.PerfInfo
-	7,  // 13: soong_build_metrics.PerfInfo.processes_resource_info:type_name -> soong_build_metrics.ProcessResourceInfo
-	2,  // 14: soong_build_metrics.ModuleTypeInfo.build_system:type_name -> soong_build_metrics.ModuleTypeInfo.BuildSystem
-	3,  // 15: soong_build_metrics.CriticalUserJourneyMetrics.metrics:type_name -> soong_build_metrics.MetricsBase
-	9,  // 16: soong_build_metrics.CriticalUserJourneysMetrics.cujs:type_name -> soong_build_metrics.CriticalUserJourneyMetrics
-	17, // [17:17] is the sub-list for method output_type
-	17, // [17:17] is the sub-list for method input_type
-	17, // [17:17] is the sub-list for extension type_name
-	17, // [17:17] is the sub-list for extension extendee
-	0,  // [0:17] is the sub-list for field type_name
+	7,  // 4: soong_build_metrics.MetricsBase.setup_tools:type_name -> soong_build_metrics.PerfInfo
+	7,  // 5: soong_build_metrics.MetricsBase.kati_runs:type_name -> soong_build_metrics.PerfInfo
+	7,  // 6: soong_build_metrics.MetricsBase.soong_runs:type_name -> soong_build_metrics.PerfInfo
+	7,  // 7: soong_build_metrics.MetricsBase.ninja_runs:type_name -> soong_build_metrics.PerfInfo
+	7,  // 8: soong_build_metrics.MetricsBase.total:type_name -> soong_build_metrics.PerfInfo
+	12, // 9: soong_build_metrics.MetricsBase.soong_build_metrics:type_name -> soong_build_metrics.SoongBuildMetrics
+	5,  // 10: soong_build_metrics.MetricsBase.build_config:type_name -> soong_build_metrics.BuildConfig
+	6,  // 11: soong_build_metrics.MetricsBase.system_resource_info:type_name -> soong_build_metrics.SystemResourceInfo
+	7,  // 12: soong_build_metrics.MetricsBase.bazel_runs:type_name -> soong_build_metrics.PerfInfo
+	13, // 13: soong_build_metrics.MetricsBase.exp_config_fetcher:type_name -> soong_build_metrics.ExpConfigFetcher
+	8,  // 14: soong_build_metrics.PerfInfo.processes_resource_info:type_name -> soong_build_metrics.ProcessResourceInfo
+	2,  // 15: soong_build_metrics.ModuleTypeInfo.build_system:type_name -> soong_build_metrics.ModuleTypeInfo.BuildSystem
+	4,  // 16: soong_build_metrics.CriticalUserJourneyMetrics.metrics:type_name -> soong_build_metrics.MetricsBase
+	10, // 17: soong_build_metrics.CriticalUserJourneysMetrics.cujs:type_name -> soong_build_metrics.CriticalUserJourneyMetrics
+	7,  // 18: soong_build_metrics.SoongBuildMetrics.events:type_name -> soong_build_metrics.PerfInfo
+	3,  // 19: soong_build_metrics.ExpConfigFetcher.status:type_name -> soong_build_metrics.ExpConfigFetcher.ConfigStatus
+	20, // [20:20] is the sub-list for method output_type
+	20, // [20:20] is the sub-list for method input_type
+	20, // [20:20] is the sub-list for extension type_name
+	20, // [20:20] is the sub-list for extension extendee
+	0,  // [0:20] is the sub-list for field type_name
 }
 
 func init() { file_metrics_proto_init() }
@@ -1524,14 +1697,26 @@
 				return nil
 			}
 		}
+		file_metrics_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*ExpConfigFetcher); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
 	}
 	type x struct{}
 	out := protoimpl.TypeBuilder{
 		File: protoimpl.DescBuilder{
 			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
 			RawDescriptor: file_metrics_proto_rawDesc,
-			NumEnums:      3,
-			NumMessages:   9,
+			NumEnums:      4,
+			NumMessages:   10,
 			NumExtensions: 0,
 			NumServices:   0,
 		},
diff --git a/ui/metrics/metrics_proto/metrics.proto b/ui/metrics/metrics_proto/metrics.proto
index db0a14a..814eb67 100644
--- a/ui/metrics/metrics_proto/metrics.proto
+++ b/ui/metrics/metrics_proto/metrics.proto
@@ -108,6 +108,9 @@
 
   // The metrics for calling Bazel.
   repeated PerfInfo bazel_runs = 27;
+
+  // The metrics of the experiment config fetcher
+  optional ExpConfigFetcher exp_config_fetcher = 28;
 }
 
 message BuildConfig {
@@ -235,4 +238,26 @@
 
   // The approximate maximum size of the heap in soong_build in bytes.
   optional uint64 max_heap_size = 5;
+
+  // Runtime metrics for soong_build execution.
+  repeated PerfInfo events = 6;
+}
+
+message ExpConfigFetcher {
+  enum ConfigStatus {
+    NO_CONFIG = 0;
+    CONFIG = 1;
+    ERROR = 2;
+  }
+  // The result of the call to expconfigfetcher
+  // NO_CONFIG - Not part of experiment
+  // CONFIG - Part of experiment, config copied successfully
+  // ERROR - expconfigfetcher failed
+  optional ConfigStatus status = 1;
+
+  // The output config filename
+  optional string filename = 2;
+
+  // Time, in microseconds, taken by the expconfigfetcher
+  optional uint64 micros = 3;
 }
diff --git a/ui/metrics/mk_metrics_proto/mk_metrics.pb.go b/ui/metrics/mk_metrics_proto/mk_metrics.pb.go
new file mode 100644
index 0000000..32e136a
--- /dev/null
+++ b/ui/metrics/mk_metrics_proto/mk_metrics.pb.go
@@ -0,0 +1,177 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// versions:
+// 	protoc-gen-go v1.27.1
+// 	protoc        v3.9.1
+// source: mk_metrics.proto
+
+package mk_metrics_proto
+
+import (
+	protoreflect "google.golang.org/protobuf/reflect/protoreflect"
+	protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+	reflect "reflect"
+	sync "sync"
+)
+
+const (
+	// Verify that this generated code is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
+	// Verify that runtime/protoimpl is sufficiently up-to-date.
+	_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
+)
+
+// Contains metrics pertaining to makefiles.
+type MkMetrics struct {
+	state         protoimpl.MessageState
+	sizeCache     protoimpl.SizeCache
+	unknownFields protoimpl.UnknownFields
+
+	// Total number of mk files present in the workspace.
+	TotalMakefiles uint32 `protobuf:"varint,1,opt,name=totalMakefiles,proto3" json:"totalMakefiles,omitempty"`
+	// Number of top-level mk files present in the workspace.
+	// A mk file is "top level" if there are no mk files in its parent
+	// direrctories.
+	// This value is equivalent to the number of entries in Android.mk.list.
+	ToplevelMakefiles uint32 `protobuf:"varint,2,opt,name=toplevelMakefiles,proto3" json:"toplevelMakefiles,omitempty"`
+}
+
+func (x *MkMetrics) Reset() {
+	*x = MkMetrics{}
+	if protoimpl.UnsafeEnabled {
+		mi := &file_mk_metrics_proto_msgTypes[0]
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		ms.StoreMessageInfo(mi)
+	}
+}
+
+func (x *MkMetrics) String() string {
+	return protoimpl.X.MessageStringOf(x)
+}
+
+func (*MkMetrics) ProtoMessage() {}
+
+func (x *MkMetrics) ProtoReflect() protoreflect.Message {
+	mi := &file_mk_metrics_proto_msgTypes[0]
+	if protoimpl.UnsafeEnabled && x != nil {
+		ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+		if ms.LoadMessageInfo() == nil {
+			ms.StoreMessageInfo(mi)
+		}
+		return ms
+	}
+	return mi.MessageOf(x)
+}
+
+// Deprecated: Use MkMetrics.ProtoReflect.Descriptor instead.
+func (*MkMetrics) Descriptor() ([]byte, []int) {
+	return file_mk_metrics_proto_rawDescGZIP(), []int{0}
+}
+
+func (x *MkMetrics) GetTotalMakefiles() uint32 {
+	if x != nil {
+		return x.TotalMakefiles
+	}
+	return 0
+}
+
+func (x *MkMetrics) GetToplevelMakefiles() uint32 {
+	if x != nil {
+		return x.ToplevelMakefiles
+	}
+	return 0
+}
+
+var File_mk_metrics_proto protoreflect.FileDescriptor
+
+var file_mk_metrics_proto_rawDesc = []byte{
+	0x0a, 0x10, 0x6d, 0x6b, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2e, 0x70, 0x72, 0x6f,
+	0x74, 0x6f, 0x12, 0x16, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f,
+	0x6d, 0x6b, 0x5f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0x61, 0x0a, 0x09, 0x4d, 0x6b,
+	0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x12, 0x26, 0x0a, 0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c,
+	0x4d, 0x61, 0x6b, 0x65, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52,
+	0x0e, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x4d, 0x61, 0x6b, 0x65, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12,
+	0x2c, 0x0a, 0x11, 0x74, 0x6f, 0x70, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x4d, 0x61, 0x6b, 0x65, 0x66,
+	0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x11, 0x74, 0x6f, 0x70, 0x6c,
+	0x65, 0x76, 0x65, 0x6c, 0x4d, 0x61, 0x6b, 0x65, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x42, 0x2b, 0x5a,
+	0x29, 0x61, 0x6e, 0x64, 0x72, 0x6f, 0x69, 0x64, 0x2f, 0x73, 0x6f, 0x6f, 0x6e, 0x67, 0x2f, 0x75,
+	0x69, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x6d, 0x6b, 0x5f, 0x6d, 0x65, 0x74,
+	0x72, 0x69, 0x63, 0x73, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
+	0x6f, 0x33,
+}
+
+var (
+	file_mk_metrics_proto_rawDescOnce sync.Once
+	file_mk_metrics_proto_rawDescData = file_mk_metrics_proto_rawDesc
+)
+
+func file_mk_metrics_proto_rawDescGZIP() []byte {
+	file_mk_metrics_proto_rawDescOnce.Do(func() {
+		file_mk_metrics_proto_rawDescData = protoimpl.X.CompressGZIP(file_mk_metrics_proto_rawDescData)
+	})
+	return file_mk_metrics_proto_rawDescData
+}
+
+var file_mk_metrics_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
+var file_mk_metrics_proto_goTypes = []interface{}{
+	(*MkMetrics)(nil), // 0: soong_build_mk_metrics.MkMetrics
+}
+var file_mk_metrics_proto_depIdxs = []int32{
+	0, // [0:0] is the sub-list for method output_type
+	0, // [0:0] is the sub-list for method input_type
+	0, // [0:0] is the sub-list for extension type_name
+	0, // [0:0] is the sub-list for extension extendee
+	0, // [0:0] is the sub-list for field type_name
+}
+
+func init() { file_mk_metrics_proto_init() }
+func file_mk_metrics_proto_init() {
+	if File_mk_metrics_proto != nil {
+		return
+	}
+	if !protoimpl.UnsafeEnabled {
+		file_mk_metrics_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
+			switch v := v.(*MkMetrics); i {
+			case 0:
+				return &v.state
+			case 1:
+				return &v.sizeCache
+			case 2:
+				return &v.unknownFields
+			default:
+				return nil
+			}
+		}
+	}
+	type x struct{}
+	out := protoimpl.TypeBuilder{
+		File: protoimpl.DescBuilder{
+			GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
+			RawDescriptor: file_mk_metrics_proto_rawDesc,
+			NumEnums:      0,
+			NumMessages:   1,
+			NumExtensions: 0,
+			NumServices:   0,
+		},
+		GoTypes:           file_mk_metrics_proto_goTypes,
+		DependencyIndexes: file_mk_metrics_proto_depIdxs,
+		MessageInfos:      file_mk_metrics_proto_msgTypes,
+	}.Build()
+	File_mk_metrics_proto = out.File
+	file_mk_metrics_proto_rawDesc = nil
+	file_mk_metrics_proto_goTypes = nil
+	file_mk_metrics_proto_depIdxs = nil
+}
diff --git a/ui/metrics/mk_metrics_proto/mk_metrics.proto b/ui/metrics/mk_metrics_proto/mk_metrics.proto
new file mode 100644
index 0000000..df7bca3
--- /dev/null
+++ b/ui/metrics/mk_metrics_proto/mk_metrics.proto
@@ -0,0 +1,30 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package soong_build_mk_metrics;
+option go_package = "android/soong/ui/metrics/mk_metrics_proto";
+
+// Contains metrics pertaining to makefiles.
+message MkMetrics {
+  // Total number of mk files present in the workspace.
+  uint32 totalMakefiles = 1;
+
+  // Number of top-level mk files present in the workspace.
+  // A mk file is "top level" if there are no mk files in its parent
+  // direrctories.
+  // This value is equivalent to the number of entries in Android.mk.list.
+  uint32 toplevelMakefiles = 2;
+}
diff --git a/ui/metrics/mk_metrics_proto/regen.sh b/ui/metrics/mk_metrics_proto/regen.sh
new file mode 100755
index 0000000..64018d4
--- /dev/null
+++ b/ui/metrics/mk_metrics_proto/regen.sh
@@ -0,0 +1,29 @@
+#!/bin/bash -e
+
+# Copyright 2022 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generates the golang source file of the mk_metrics.proto protobuf file.
+
+function die() { echo "ERROR: $1" >&2; exit 1; }
+
+readonly error_msg="Maybe you need to run 'lunch aosp_arm-eng && m aprotoc blueprint_tools'?"
+
+if ! hash aprotoc &>/dev/null; then
+  die "could not find aprotoc. ${error_msg}"
+fi
+
+if ! aprotoc --go_out=paths=source_relative:. mk_metrics.proto; then
+  die "build failed. ${error_msg}"
+fi