Merge changes from topic "export-vars"

* changes:
  export Java variables to Bazel
  refactor Bazel variable export
diff --git a/android/bazel.go b/android/bazel.go
index e3fb0a6..edf67d4 100644
--- a/android/bazel.go
+++ b/android/bazel.go
@@ -435,144 +435,103 @@
 
 	// Per-module denylist to always opt modules out of both bp2build and mixed builds.
 	bp2buildModuleDoNotConvertList = []string{
-		"libnativehelper_compat_libc", // Broken compile: implicit declaration of function 'strerror_r' is invalid in C99
+		// cc bugs
+		"libsepol",                                  // TODO(b/207408632): Unsupported case of .l sources in cc library rules
+		"libactivitymanager_aidl",                   // TODO(b/207426160): Unsupported use of aidl sources (via Dactivity_manager_procstate_aidl) in a cc_library
+		"gen-kotlin-build-file.py",                  // TODO(b/198619163) module has same name as source
+		"libgtest_ndk_c++", "libgtest_main_ndk_c++", // TODO(b/201816222): Requires sdk_version support.
+		"linkerconfig", "mdnsd", // TODO(b/202876379): has arch-variant static_executable
+		"linker",       // TODO(b/228316882): cc_binary uses link_crt
+		"libdebuggerd", // TODO(b/228314770): support product variable-specific header_libs
+		"versioner",    // TODO(b/228313961):  depends on prebuilt shared library libclang-cpp_host as a shared library, which does not supply expected providers for a shared library
 
-		"libart",                             // depends on unconverted modules: art_operator_srcs, libodrstatslog, libelffile, art_cmdlineparser_headers, cpp-define-generator-definitions, libcpu_features, libdexfile, libartpalette, libbacktrace, libnativebridge, libnativeloader, libsigchain, libunwindstack, libartbase, libprofile, cpp-define-generator-asm-support, apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, libstatssocket, heapprofd_client_api
-		"libart-runtime-gtest",               // depends on unconverted modules: libgtest_isolated, libart-compiler, libdexfile, libprofile, libartbase, libbacktrace, libartbase-art-gtest
-		"libart_headers",                     // depends on unconverted modules: art_libartbase_headers
-		"libartd",                            // depends on unconverted modules: apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, libstatssocket, heapprofd_client_api, art_operator_srcs, libodrstatslog, libelffiled, art_cmdlineparser_headers, cpp-define-generator-definitions, libcpu_features, libdexfiled, libartpalette, libbacktrace, libnativebridge, libnativeloader, libsigchain, libunwindstack, libartbased, libprofiled, cpp-define-generator-asm-support
-		"libartd-runtime-gtest",              // depends on unconverted modules: libgtest_isolated, libartd-compiler, libdexfiled, libprofiled, libartbased, libbacktrace, libartbased-art-gtest
-		"libstatslog_art",                    // depends on unconverted modules: statslog_art.cpp, statslog_art.h
-		"statslog_art.h", "statslog_art.cpp", // depends on unconverted modules: stats-log-api-gen
-
-		"libandroid_runtime_lazy", // depends on unconverted modules: libbinder_headers
-		"libcmd",                  // depends on unconverted modules: libbinder
-
-		"libdexfile_support_static",                                                       // Depends on unconverted module: libdexfile_external_headers
-		"libunwindstack_local", "libunwindstack_utils", "libc_malloc_debug", "libfdtrack", // Depends on unconverted module: libunwindstack
-
-		"libdexfile_support",          // TODO(b/210546943): Enabled based on product variables.
-		"libdexfile_external_headers", // TODO(b/210546943): Enabled based on product variables.
-
-		"libunwindstack",                // Depends on unconverted module libdexfile_support.
-		"libnativehelper_compat_libc++", // Broken compile: implicit declaration of function 'strerror_r' is invalid in C99
-
-		"chkcon", "sefcontext_compile", // depends on unconverted modules: libsepol
-
-		"libsepol", // TODO(b/207408632): Unsupported case of .l sources in cc library rules
-
-		"gen-kotlin-build-file.py", // module has same name as source
-
-		"libactivitymanager_aidl", // TODO(b/207426160): Depends on activity_manager_procstate_aidl, which is an aidl filegroup.
-
-		"libnativehelper_lazy_mts_jni", "libnativehelper_mts_jni", // depends on unconverted modules: libgmock_ndk
-		"libnativetesthelper_jni", "libgmock_main_ndk", "libgmock_ndk", // depends on unconverted module: libgtest_ndk_c++
-
-		"statslog-framework-java-gen", "statslog.cpp", "statslog.h", "statslog.rs", "statslog_header.rs", // depends on unconverted modules: stats-log-api-gen
-
-		"stats-log-api-gen", // depends on unconverted modules: libstats_proto_host, libprotobuf-cpp-full
-
-		"libstatslog", // depends on unconverted modules: statslog.cpp, statslog.h, ...
-
-		"cmd",                                                        // depends on unconverted module packagemanager_aidl-cpp, of unsupported type aidl_interface
-		"servicedispatcher",                                          // depends on unconverted module android.debug_aidl, of unsupported type aidl_interface
-		"libutilscallstack",                                          // depends on unconverted module libbacktrace
-		"libbacktrace",                                               // depends on unconverted module libunwindstack
-		"libdebuggerd_handler",                                       // depends on unconverted module libdebuggerd_handler_core
-		"libdebuggerd_handler_core", "libdebuggerd_handler_fallback", // depends on unconverted module libdebuggerd
-		"unwind_for_offline", // depends on unconverted module libunwindstack_utils
-		"libdebuggerd",       // depends on unconverted modules libdexfile_support, libunwindstack, gwp_asan_crash_handler, libtombstone_proto, libprotobuf-cpp-lite
-		"libdexfile_static",  // depends on libartpalette, libartbase, libdexfile, which are of unsupported type: art_cc_library.
-
-		"static_crasher", // depends on unconverted modules: libdebuggerd_handler
-
-		"pbtombstone", "crash_dump", // depends on libdebuggerd, libunwindstack
-
-		"libbase_ndk", // http://b/186826477, fails to link libctscamera2_jni for device (required for CtsCameraTestCases)
-
-		"libprotobuf-internal-protos",      // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-internal-python-srcs", // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-java-full",            // b/210751803, we don't handle path property for filegroups
-		"host-libprotobuf-java-full",       // b/210751803, we don't handle path property for filegroups
-		"libprotobuf-java-util-full",       // b/210751803, we don't handle path property for filegroups
-		"apex_manifest_proto_java",         // b/210751803, depends on libprotobuf-java-full
-		"conscrypt",                        // b/210751803, we don't handle path property for filegroups
-		"conscrypt-for-host",               // b/210751803, we don't handle path property for filegroups
-
-		"libprotobuf-java-nano",      // b/220869005, depends on non-public_current SDK
-		"host-libprotobuf-java-nano", // b/220869005, depends on libprotobuf-java-nano
-
-		"libc_musl_sysroot_bionic_arch_headers", // b/218405924, depends on soong_zip
-		"libc_musl_sysroot_bionic_headers",      // b/218405924, depends on soong_zip and generates duplicate srcs
+		// java bugs
+		"libbase_ndk", // TODO(b/186826477): fails to link libctscamera2_jni for device (required for CtsCameraTestCases)
 
 		// python protos
-		"libprotobuf-python",                           // contains .proto sources
-		"conv_linker_config",                           // depends on linker_config_proto, a python lib with proto sources
-		"apex_build_info_proto", "apex_manifest_proto", // a python lib with proto sources
-		"linker_config_proto", // contains .proto sources
+		"libprotobuf-python",                           // TODO(b/196084681): contains .proto sources
+		"apex_build_info_proto", "apex_manifest_proto", // TODO(b/196084681): a python lib with proto sources
+		"linker_config_proto", // TODO(b/196084681): contains .proto sources
 
-		"brotli-fuzzer-corpus", // b/202015218: outputs are in location incompatible with bazel genrule handling.
+		// genrule incompatibilities
+		"brotli-fuzzer-corpus",                                       // TODO(b/202015218): outputs are in location incompatible with bazel genrule handling.
+		"platform_tools_properties", "build_tools_source_properties", // TODO(b/203369847): multiple genrules in the same package creating the same file
 
-		// python modules
-		"analyze_bcpf", // depends on bpmodify a blueprint_go_binary.
+		// aar support
+		"prebuilt_car-ui-androidx-core-common",         // TODO(b/224773339), genrule dependency creates an .aar, not a .jar
+		"prebuilt_platform-robolectric-4.4-prebuilt",   // aosp/1999250, needs .aar support in Jars
+		"prebuilt_platform-robolectric-4.5.1-prebuilt", // aosp/1999250, needs .aar support in Jars
 
-		// b/203369847: multiple genrules in the same package creating the same file
-		// //development/sdk/...
-		"platform_tools_properties",
-		"build_tools_source_properties",
-
-		// APEX support
-		"com.android.runtime", // depends on unconverted modules: bionic-linker-config, linkerconfig
-
-		"libgtest_ndk_c++",      // b/201816222: Requires sdk_version support.
-		"libgtest_main_ndk_c++", // b/201816222: Requires sdk_version support.
-
-		"abb",                     // depends on unconverted modules: libcmd, libbinder
-		"adb",                     // depends on unconverted modules: AdbWinApi, libadb_host, libandroidfw, libapp_processes_protos_full, libfastdeploy_host, libopenscreen-discovery, libopenscreen-platform-impl, libusb, bin2c_fastdeployagent, AdbWinUsbApi
-		"libadb_host",             // depends on unconverted modules: libopenscreen-discovery, libopenscreen-platform-impl, libusb, AdbWinApi
-		"libfastdeploy_host",      // depends on unconverted modules: libandroidfw, libusb, AdbWinApi
-		"linker",                  // depends on unconverted modules: libdebuggerd_handler_fallback
-		"linker_reloc_bench_main", // depends on unconverted modules: liblinker_reloc_bench_*
-		"versioner",               // depends on unconverted modules: libclang_cxx_host, libLLVM_host, of unsupported type llvm_host_prebuilt_library_shared
-
-		"linkerconfig", // http://b/202876379 has arch-variant static_executable
-		"mdnsd",        // http://b/202876379 has arch-variant static_executable
-
-		"CarHTMLViewer", // depends on unconverted modules android.car-stubs, car-ui-lib
-
-		"libdexfile",  // depends on unconverted modules: dexfile_operator_srcs, libartbase, libartpalette,
-		"libdexfiled", // depends on unconverted modules: dexfile_operator_srcs, libartbased, libartpalette
+		// path property for filegroups
+		"conscrypt",                        // TODO(b/210751803), we don't handle path property for filegroups
+		"conscrypt-for-host",               // TODO(b/210751803), we don't handle path property for filegroups
+		"host-libprotobuf-java-full",       // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-internal-protos",      // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-internal-python-srcs", // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-java-full",            // TODO(b/210751803), we don't handle path property for filegroups
+		"libprotobuf-java-util-full",       // TODO(b/210751803), we don't handle path property for filegroups
 
 		// go deps:
+		"analyze_bcpf",                                                                               // depends on bpmodify a blueprint_go_binary.
 		"apex-protos",                                                                                // depends on soong_zip, a go binary
 		"generated_android_icu4j_src_files", "generated_android_icu4j_test_files", "icu4c_test_data", // depends on unconverted modules: soong_zip
 		"host_bionic_linker_asm",                                                  // depends on extract_linker, a go binary.
 		"host_bionic_linker_script",                                               // depends on extract_linker, a go binary.
-		"robolectric-sqlite4java-native",                                          // depends on soong_zip, a go binary
-		"robolectric_tzdata",                                                      // depends on soong_zip, a go binary
+		"libc_musl_sysroot_bionic_arch_headers",                                   // depends on soong_zip
+		"libc_musl_sysroot_bionic_headers",                                        // 218405924, depends on soong_zip and generates duplicate srcs
 		"libc_musl_sysroot_libc++_headers", "libc_musl_sysroot_libc++abi_headers", // depends on soong_zip, zip2zip
+		"robolectric-sqlite4java-native", // depends on soong_zip, a go binary
+		"robolectric_tzdata",             // depends on soong_zip, a go binary
 
-		"android_icu4j_srcgen_binary", // Bazel build error: deps not allowed without srcs; move to runtime_deps
-		"core-icu4j-for-host",         // Bazel build error: deps not allowed without srcs; move to runtime_deps
+		// rust support
+		"libtombstoned_client_rust_bridge_code", "libtombstoned_client_wrapper", // rust conversions are not supported
 
-		// java deps
-		"android_icu4j_srcgen",          // depends on unconverted modules: currysrc
-		"bin2c_fastdeployagent",         // depends on deployagent, a java binary
-		"currysrc",                      // depends on unconverted modules: currysrc_org.eclipse, guavalib, jopt-simple-4.9
-		"robolectric-sqlite4java-0.282", // depends on unconverted modules: robolectric-sqlite4java-import, robolectric-sqlite4java-native
-		"timezone-host",                 // depends on unconverted modules: art.module.api.annotations
-		"truth-host-prebuilt",           // depends on unconverted modules: truth-prebuilt
-		"truth-prebuilt",                // depends on unconverted modules: asm-7.0, guava
-
-		"generated_android_icu4j_resources",      // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
-		"generated_android_icu4j_test_resources", // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
-
-		"art-script",     // depends on unconverted modules: dalvikvm, dex2oat
-		"dex2oat-script", // depends on unconverted modules: dex2oat
-
-		"prebuilt_car-ui-androidx-core-common",         // b/224773339, genrule dependency creates an .aar, not a .jar
-		"prebuilt_platform-robolectric-4.4-prebuilt",   // aosp/1999250, needs .aar support in Jars
-		"prebuilt_platform-robolectric-4.5.1-prebuilt", // aosp/1999250, needs .aar support in Jars
+		// unconverted deps
+		"CarHTMLViewer",                // depends on unconverted modules android.car-stubs, car-ui-lib
+		"abb",                          // depends on unconverted modules: libcmd, libbinder
+		"adb",                          // depends on unconverted modules: AdbWinApi, libandroidfw, libopenscreen-discovery, libopenscreen-platform-impl, libusb, bin2c_fastdeployagent, AdbWinUsbApi
+		"android_icu4j_srcgen",         // depends on unconverted modules: currysrc
+		"android_icu4j_srcgen_binary",  // depends on unconverted modules: android_icu4j_srcgen, currysrc
+		"apex_manifest_proto_java",     // b/210751803, depends on libprotobuf-java-full
+		"art-script",                   // depends on unconverted modules: dalvikvm, dex2oat
+		"bin2c_fastdeployagent",        // depends on unconverted modules: deployagent
+		"chkcon", "sefcontext_compile", // depends on unconverted modules: libsepol
+		"com.android.runtime",                                        // depends on unconverted modules: bionic-linker-config, linkerconfig
+		"conv_linker_config",                                         // depends on unconverted modules: linker_config_proto
+		"currysrc",                                                   // depends on unconverted modules: currysrc_org.eclipse, guavalib, jopt-simple-4.9
+		"dex2oat-script",                                             // depends on unconverted modules: dex2oat
+		"generated_android_icu4j_resources",                          // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
+		"generated_android_icu4j_test_resources",                     // depends on unconverted modules: android_icu4j_srcgen_binary, soong_zip
+		"host-libprotobuf-java-nano",                                 // b/220869005, depends on libprotobuf-java-nano
+		"libadb_host",                                                // depends on unconverted modules: AdbWinApi, libopenscreen-discovery, libopenscreen-platform-impl, libusb
+		"libart",                                                     // depends on unconverted modules: apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, heapprofd_client_api, art_operator_srcs, libcpu_features, libodrstatslog, libelffile, art_cmdlineparser_headers, cpp-define-generator-definitions, libdexfile, libnativebridge, libnativeloader, libsigchain, libartbase, libprofile, cpp-define-generator-asm-support
+		"libart-runtime-gtest",                                       // depends on unconverted modules: libgtest_isolated, libart-compiler, libdexfile, libprofile, libartbase, libartbase-art-gtest
+		"libart_headers",                                             // depends on unconverted modules: art_libartbase_headers
+		"libartd",                                                    // depends on unconverted modules: art_operator_srcs, libcpu_features, libodrstatslog, libelffiled, art_cmdlineparser_headers, cpp-define-generator-definitions, libdexfiled, libnativebridge, libnativeloader, libsigchain, libartbased, libprofiled, cpp-define-generator-asm-support, apex-info-list-tinyxml, libtinyxml2, libnativeloader-headers, heapprofd_client_api
+		"libartd-runtime-gtest",                                      // depends on unconverted modules: libgtest_isolated, libartd-compiler, libdexfiled, libprofiled, libartbased, libartbased-art-gtest
+		"libdebuggerd_handler",                                       // depends on unconverted module libdebuggerd_handler_core
+		"libdebuggerd_handler_core", "libdebuggerd_handler_fallback", // depends on unconverted module libdebuggerd
+		"libdexfile",                                              // depends on unconverted modules: dexfile_operator_srcs, libartbase, libartpalette,
+		"libdexfile_static",                                       // depends on unconverted modules: libartbase, libdexfile
+		"libdexfiled",                                             // depends on unconverted modules: dexfile_operator_srcs, libartbased, libartpalette
+		"libfastdeploy_host",                                      // depends on unconverted modules: libandroidfw, libusb, AdbWinApi
+		"libgmock_main_ndk",                                       // depends on unconverted modules: libgtest_ndk_c++
+		"libgmock_ndk",                                            // depends on unconverted modules: libgtest_ndk_c++
+		"libnativehelper_lazy_mts_jni", "libnativehelper_mts_jni", // depends on unconverted modules: libnativetesthelper_jni, libgmock_ndk
+		"libnativetesthelper_jni",   // depends on unconverted modules: libgtest_ndk_c++
+		"libprotobuf-java-nano",     // b/220869005, depends on non-public_current SDK
+		"libstatslog",               // depends on unconverted modules: libstatspull, statsd-aidl-ndk, libbinder_ndk
+		"libstatslog_art",           // depends on unconverted modules: statslog_art.cpp, statslog_art.h
+		"linker_reloc_bench_main",   // depends on unconverted modules: liblinker_reloc_bench_*
+		"pbtombstone", "crash_dump", // depends on libdebuggerd, libunwindstack
+		"robolectric-sqlite4java-0.282",             // depends on unconverted modules: robolectric-sqlite4java-import, robolectric-sqlite4java-native
+		"static_crasher",                            // depends on unconverted modules: libdebuggerd_handler
+		"stats-log-api-gen",                         // depends on unconverted modules: libstats_proto_host
+		"statslog.cpp", "statslog.h", "statslog.rs", // depends on unconverted modules: stats-log-api-gen
+		"statslog_art.cpp", "statslog_art.h", "statslog_header.rs", // depends on unconverted modules: stats-log-api-gen
+		"timezone-host",       // depends on unconverted modules: art.module.api.annotations
+		"truth-host-prebuilt", // depends on unconverted modules: truth-prebuilt
+		"truth-prebuilt",      // depends on unconverted modules: asm-7.0, guava
 	}
 
 	// Per-module denylist of cc_library modules to only generate the static
diff --git a/android/neverallow.go b/android/neverallow.go
index f87cebb..aa47bca 100644
--- a/android/neverallow.go
+++ b/android/neverallow.go
@@ -57,6 +57,7 @@
 	AddNeverAllowRules(createUncompressDexRules()...)
 	AddNeverAllowRules(createMakefileGoalRules()...)
 	AddNeverAllowRules(createInitFirstStageRules()...)
+	AddNeverAllowRules(createProhibitFrameworkAccessRules()...)
 }
 
 // Add a NeverAllow rule to the set of rules to apply.
@@ -228,6 +229,15 @@
 	}
 }
 
+func createProhibitFrameworkAccessRules() []Rule {
+	return []Rule{
+		NeverAllow().
+			With("libs", "framework").
+			WithoutMatcher("sdk_version", Regexp("(core_.*|^$)")).
+			Because("framework can't be used when building against SDK"),
+	}
+}
+
 func neverallowMutator(ctx BottomUpMutatorContext) {
 	m, ok := ctx.Module().(Module)
 	if !ok {
diff --git a/android/neverallow_test.go b/android/neverallow_test.go
index 8afe9e0..86f1a37 100644
--- a/android/neverallow_test.go
+++ b/android/neverallow_test.go
@@ -327,6 +327,21 @@
 			"Only boot images may be imported as a makefile goal.",
 		},
 	},
+	// Tests for the rule prohibiting the use of framework
+	{
+		name: "prohibit framework",
+		fs: map[string][]byte{
+			"Android.bp": []byte(`
+				java_library {
+					name: "foo",
+					libs: ["framework"],
+					sdk_version: "current",
+				}`),
+		},
+		expectedErrors: []string{
+			"framework can't be used when building against SDK",
+		},
+	},
 }
 
 var prepareForNeverAllowTest = GroupFixturePreparers(
diff --git a/android/rule_builder.go b/android/rule_builder.go
index 098c1fc..11da36c 100644
--- a/android/rule_builder.go
+++ b/android/rule_builder.go
@@ -101,12 +101,7 @@
 }
 
 // Restat marks the rule as a restat rule, which will be passed to ModuleContext.Rule in BuildParams.Restat.
-//
-// Restat is not compatible with Sbox()
 func (r *RuleBuilder) Restat() *RuleBuilder {
-	if r.sbox {
-		panic("Restat() is not compatible with Sbox()")
-	}
 	r.restat = true
 	return r
 }
@@ -141,8 +136,6 @@
 // point to a location where sbox's manifest will be written and must be outside outputDir. sbox
 // will ensure that all outputs have been written, and will discard any output files that were not
 // specified.
-//
-// Sbox is not compatible with Restat()
 func (r *RuleBuilder) Sbox(outputDir WritablePath, manifestPath WritablePath) *RuleBuilder {
 	if r.sbox {
 		panic("Sbox() may not be called more than once")
@@ -150,9 +143,6 @@
 	if len(r.commands) > 0 {
 		panic("Sbox() may not be called after Command()")
 	}
-	if r.restat {
-		panic("Sbox() is not compatible with Restat()")
-	}
 	r.sbox = true
 	r.outDir = outputDir
 	r.sboxManifestPath = manifestPath
@@ -636,11 +626,14 @@
 				ctx: r.ctx,
 			},
 		}
-		sboxCmd.Text("rm -rf").Output(r.outDir)
-		sboxCmd.Text("&&")
 		sboxCmd.builtToolWithoutDeps("sbox").
-			Flag("--sandbox-path").Text(shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
-			Flag("--manifest").Input(r.sboxManifestPath)
+			FlagWithArg("--sandbox-path ", shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
+			FlagWithArg("--output-dir ", r.outDir.String()).
+			FlagWithInput("--manifest ", r.sboxManifestPath)
+
+		if r.restat {
+			sboxCmd.Flag("--write-if-changed")
+		}
 
 		// Replace the command string, and add the sbox tool and manifest textproto to the
 		// dependencies of the final sbox rule.
diff --git a/android/rule_builder_test.go b/android/rule_builder_test.go
index 3766bb0..86647eb 100644
--- a/android/rule_builder_test.go
+++ b/android/rule_builder_test.go
@@ -678,32 +678,32 @@
 	})
 	t.Run("sbox", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox"
-		outFile := filepath.Join(outDir, "gen/foo_sbox")
-		depFile := filepath.Join(outDir, "gen/foo_sbox.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 		module := result.ModuleForTests("foo_sbox", "")
 		check(t, module.Output("gen/foo_sbox"), module.Output(rspFile2),
 			cmd, outFile, depFile, rspFile, rspFile2, false, []string{manifest}, []string{sbox})
 	})
 	t.Run("sbox_inputs", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox_inputs"
-		outFile := filepath.Join(outDir, "gen/foo_sbox_inputs")
-		depFile := filepath.Join(outDir, "gen/foo_sbox_inputs.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox_inputs")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox_inputs.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 
 		module := result.ModuleForTests("foo_sbox_inputs", "")
 		check(t, module.Output("gen/foo_sbox_inputs"), module.Output(rspFile2),
diff --git a/androidmk/androidmk/androidmk_test.go b/androidmk/androidmk/androidmk_test.go
index 2176361..afde68b 100644
--- a/androidmk/androidmk/androidmk_test.go
+++ b/androidmk/androidmk/androidmk_test.go
@@ -1690,6 +1690,21 @@
 }
 `,
 	},
+	{
+		desc: "convert android_app to android_test when having test_suites",
+		in: `
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo
+LOCAL_COMPATIBILITY_SUITE := bar
+include $(BUILD_PACKAGE)
+		`,
+		expected: `
+android_test {
+	name: "foo",
+	test_suites: ["bar"],
+}
+`,
+	},
 }
 
 func TestEndToEnd(t *testing.T) {
diff --git a/apex/Android.bp b/apex/Android.bp
index b9b5428..41224ec 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -14,6 +14,7 @@
         "soong-cc",
         "soong-filesystem",
         "soong-java",
+        "soong-provenance",
         "soong-python",
         "soong-rust",
         "soong-sh",
diff --git a/apex/apex_test.go b/apex/apex_test.go
index 5706a2c..3e01f26 100644
--- a/apex/apex_test.go
+++ b/apex/apex_test.go
@@ -3889,7 +3889,7 @@
 		}),
 		withBinder32bit,
 		withTargets(map[android.OsType][]android.Target{
-			android.Android: []android.Target{
+			android.Android: {
 				{Os: android.Android, Arch: android.Arch{ArchType: android.Arm, ArchVariant: "armv7-a-neon", Abi: []string{"armeabi-v7a"}},
 					NativeBridge: android.NativeBridgeDisabled, NativeBridgeHostArchName: "", NativeBridgeRelativePath: ""},
 			},
@@ -4570,12 +4570,20 @@
 		}
 	`)
 
-	prebuilt := ctx.ModuleForTests("myapex", "android_common_myapex").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex", "android_common_myapex")
+	prebuilt := testingModule.Module().(*Prebuilt)
 
 	expectedInput := "myapex-arm64.apex"
 	if prebuilt.inputApex.String() != expectedInput {
 		t.Errorf("inputApex invalid. expected: %q, actual: %q", expectedInput, prebuilt.inputApex.String())
 	}
+	android.AssertStringDoesContain(t, "Invalid provenance metadata file",
+		prebuilt.ProvenanceMetaDataFile().String(), "soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto")
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm64.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/myapex.apex", rule.Args["install_path"])
 }
 
 func TestPrebuiltMissingSrc(t *testing.T) {
@@ -4595,12 +4603,18 @@
 		}
 	`)
 
-	p := ctx.ModuleForTests("myapex", "android_common_myapex").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex", "android_common_myapex")
+	p := testingModule.Module().(*Prebuilt)
 
 	expected := "notmyapex.apex"
 	if p.installFilename != expected {
 		t.Errorf("installFilename invalid. expected: %q, actual: %q", expected, p.installFilename)
 	}
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/notmyapex.apex", rule.Args["install_path"])
 }
 
 func TestApexSetFilenameOverride(t *testing.T) {
@@ -4643,13 +4657,19 @@
 		}
 	`)
 
-	p := ctx.ModuleForTests("myapex.prebuilt", "android_common_myapex.prebuilt").Module().(*Prebuilt)
+	testingModule := ctx.ModuleForTests("myapex.prebuilt", "android_common_myapex.prebuilt")
+	p := testingModule.Module().(*Prebuilt)
 
 	expected := []string{"myapex"}
 	actual := android.AndroidMkEntriesForTest(t, ctx, p)[0].EntryMap["LOCAL_OVERRIDES_MODULES"]
 	if !reflect.DeepEqual(actual, expected) {
 		t.Errorf("Incorrect LOCAL_OVERRIDES_MODULES value '%s', expected '%s'", actual, expected)
 	}
+	rule := testingModule.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "myapex-arm.apex", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/myapex.prebuilt/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "myapex.prebuilt", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/apex/myapex.prebuilt.apex", rule.Args["install_path"])
 }
 
 func TestPrebuiltApexName(t *testing.T) {
diff --git a/apex/prebuilt.go b/apex/prebuilt.go
index 158c804..187e0df 100644
--- a/apex/prebuilt.go
+++ b/apex/prebuilt.go
@@ -23,7 +23,7 @@
 
 	"android/soong/android"
 	"android/soong/java"
-
+	"android/soong/provenance"
 	"github.com/google/blueprint"
 	"github.com/google/blueprint/proptools"
 )
@@ -482,6 +482,8 @@
 	properties PrebuiltProperties
 
 	inputApex android.Path
+
+	provenanceMetaDataFile android.OutputPath
 }
 
 type ApexFileProperties struct {
@@ -778,9 +780,14 @@
 
 	if p.installable() {
 		p.installedFile = ctx.InstallFile(p.installDir, p.installFilename, p.inputApex, p.compatSymlinks.Paths()...)
+		p.provenanceMetaDataFile = provenance.GenerateArtifactProvenanceMetaData(ctx, p.inputApex, p.installedFile)
 	}
 }
 
+func (p *Prebuilt) ProvenanceMetaDataFile() android.OutputPath {
+	return p.provenanceMetaDataFile
+}
+
 // prebuiltApexExtractorModule is a private module type that is only created by the prebuilt_apex
 // module. It extracts the correct apex to use and makes it available for use by apex_set.
 type prebuiltApexExtractorModule struct {
diff --git a/bp2build/java_library_conversion_test.go b/bp2build/java_library_conversion_test.go
index 2f6bce2..4b75e3b 100644
--- a/bp2build/java_library_conversion_test.go
+++ b/bp2build/java_library_conversion_test.go
@@ -30,6 +30,7 @@
 }
 
 func runJavaLibraryTestCase(t *testing.T, tc bp2buildTestCase) {
+	t.Helper()
 	runJavaLibraryTestCaseWithRegistrationCtxFunc(t, tc, func(ctx android.RegistrationContext) {})
 }
 
@@ -156,3 +157,65 @@
 		ctx.RegisterModuleType("java_plugin", java.PluginFactory)
 	})
 }
+
+func TestJavaLibraryErrorproneJavacflagsEnabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        enabled: true,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `[
+        "-Xsuper-fast",
+        "-Xep:SpeedLimit:OFF",
+    ]`,
+				"srcs": `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledByDefault(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+		enabled: false,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
diff --git a/bpfix/bpfix/bpfix.go b/bpfix/bpfix/bpfix.go
index 4f7d88c..94b28dc 100644
--- a/bpfix/bpfix/bpfix.go
+++ b/bpfix/bpfix/bpfix.go
@@ -449,6 +449,7 @@
 		}
 
 		hasInstrumentationFor := hasNonEmptyLiteralStringProperty(mod, "instrumentation_for")
+		hasTestSuites := hasNonEmptyLiteralListProperty(mod, "test_suites")
 		tags, _ := getLiteralListPropertyValue(mod, "tags")
 
 		var hasTestsTag bool
@@ -458,7 +459,7 @@
 			}
 		}
 
-		isTest := hasInstrumentationFor || hasTestsTag
+		isTest := hasInstrumentationFor || hasTestsTag || hasTestSuites
 
 		if isTest {
 			switch mod.Type {
@@ -470,13 +471,7 @@
 				mod.Type = "java_test"
 			case "java_library_host":
 				mod.Type = "java_test_host"
-			}
-		}
-
-		// when a cc_binary module has a nonempty test_suites field, modify the type to cc_test
-		if mod.Type == "cc_binary" {
-			hasTestSuites := hasNonEmptyLiteralListProperty(mod, "test_suites")
-			if hasTestSuites {
+			case "cc_binary":
 				mod.Type = "cc_test"
 			}
 		}
diff --git a/bpfix/bpfix/bpfix_test.go b/bpfix/bpfix/bpfix_test.go
index 17b3c24..672e852 100644
--- a/bpfix/bpfix/bpfix_test.go
+++ b/bpfix/bpfix/bpfix_test.go
@@ -1436,6 +1436,38 @@
 				}
 			`,
 		},
+		{
+			name: "android_app with android_test",
+			in: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+					test_suites: ["test_suite1"],
+				}
+			`,
+			out: `
+				android_test {
+					name: "foo",
+					srcs: ["srcs"],
+					test_suites: ["test_suite1"],
+				}
+			`,
+		},
+		{
+			name: "android_app without test_suites",
+			in: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+				}
+			`,
+			out: `
+				android_app {
+					name: "foo",
+					srcs: ["srcs"],
+				}
+			`,
+		},
 	}
 	for _, test := range tests {
 		t.Run(test.name, func(t *testing.T) {
diff --git a/cc/config/global.go b/cc/config/global.go
index f5b8f3c..9c71683 100644
--- a/cc/config/global.go
+++ b/cc/config/global.go
@@ -225,10 +225,7 @@
 		"-Wno-misleading-indentation",               // http://b/153746954
 		"-Wno-zero-as-null-pointer-constant",        // http://b/68236239
 		"-Wno-deprecated-anon-enum-enum-conversion", // http://b/153746485
-		"-Wno-deprecated-enum-enum-conversion",      // http://b/153746563
 		"-Wno-string-compare",                       // http://b/153764102
-		"-Wno-enum-enum-conversion",                 // http://b/154138986
-		"-Wno-enum-float-conversion",                // http://b/154255917
 		"-Wno-pessimizing-move",                     // http://b/154270751
 		// New warnings to be fixed after clang-r399163
 		"-Wno-non-c-typedef-for-linkage", // http://b/161304145
diff --git a/cc/pgo.go b/cc/pgo.go
index aa0feae..0632c15 100644
--- a/cc/pgo.go
+++ b/cc/pgo.go
@@ -32,8 +32,8 @@
 	}
 
 	globalPgoProfileProjects = []string{
-		"toolchain/pgo-profiles",
-		"vendor/google_data/pgo_profile",
+		"toolchain/pgo-profiles/pgo",
+		"vendor/google_data/pgo_profile/pgo",
 	}
 )
 
diff --git a/cmd/sbox/sbox.go b/cmd/sbox/sbox.go
index 4fa7486..03ce2d5 100644
--- a/cmd/sbox/sbox.go
+++ b/cmd/sbox/sbox.go
@@ -38,9 +38,11 @@
 )
 
 var (
-	sandboxesRoot string
-	manifestFile  string
-	keepOutDir    bool
+	sandboxesRoot  string
+	outputDir      string
+	manifestFile   string
+	keepOutDir     bool
+	writeIfChanged bool
 )
 
 const (
@@ -51,10 +53,14 @@
 func init() {
 	flag.StringVar(&sandboxesRoot, "sandbox-path", "",
 		"root of temp directory to put the sandbox into")
+	flag.StringVar(&outputDir, "output-dir", "",
+		"directory which will contain all output files and only output files")
 	flag.StringVar(&manifestFile, "manifest", "",
 		"textproto manifest describing the sandboxed command(s)")
 	flag.BoolVar(&keepOutDir, "keep-out-dir", false,
 		"whether to keep the sandbox directory when done")
+	flag.BoolVar(&writeIfChanged, "write-if-changed", false,
+		"only write the output files if they have changed")
 }
 
 func usageViolation(violation string) {
@@ -197,23 +203,19 @@
 // createCommandScript will create and return an exec.Cmd that runs rawCommand.
 //
 // rawCommand is executed via a script in the sandbox.
-// tempDir is the temporary where the script is created.
-// toDirInSandBox is the path containing the script in the sbox environment.
-// toDirInSandBox is the path containing the script in the sbox environment.
-// seed is a unique integer used to distinguish different scripts that might be at location.
+// scriptPath is the temporary where the script is created.
+// scriptPathInSandbox is the path to the script in the sbox environment.
 //
 // returns an exec.Cmd that can be ran from within sbox context if no error, or nil if error.
 // caller must ensure script is cleaned up if function succeeds.
 //
-func createCommandScript(rawCommand string, tempDir, toDirInSandbox string, seed int) (*exec.Cmd, error) {
-	scriptName := fmt.Sprintf("sbox_command.%d.bash", seed)
-	scriptPathAndName := joinPath(tempDir, scriptName)
-	err := os.WriteFile(scriptPathAndName, []byte(rawCommand), 0644)
+func createCommandScript(rawCommand, scriptPath, scriptPathInSandbox string) (*exec.Cmd, error) {
+	err := os.WriteFile(scriptPath, []byte(rawCommand), 0644)
 	if err != nil {
 		return nil, fmt.Errorf("failed to write command %s... to %s",
-			rawCommand[0:40], scriptPathAndName)
+			rawCommand[0:40], scriptPath)
 	}
-	return exec.Command("bash", joinPath(toDirInSandbox, filepath.Base(scriptName))), nil
+	return exec.Command("bash", scriptPathInSandbox), nil
 }
 
 // readManifest reads an sbox manifest from a textproto file.
@@ -241,6 +243,12 @@
 		return "", fmt.Errorf("command is required")
 	}
 
+	// Remove files from the output directory
+	err = clearOutputDirectory(command.CopyAfter, outputDir, writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
+
 	pathToTempDirInSbox := tempDir
 	if command.GetChdir() {
 		pathToTempDirInSbox = "."
@@ -252,7 +260,7 @@
 	}
 
 	// Copy in any files specified by the manifest.
-	err = copyFiles(command.CopyBefore, "", tempDir, false)
+	err = copyFiles(command.CopyBefore, "", tempDir, requireFromExists, alwaysWrite)
 	if err != nil {
 		return "", err
 	}
@@ -277,7 +285,10 @@
 		return "", err
 	}
 
-	cmd, err := createCommandScript(rawCommand, tempDir, pathToTempDirInSbox, commandIndex)
+	scriptName := fmt.Sprintf("sbox_command.%d.bash", commandIndex)
+	scriptPath := joinPath(tempDir, scriptName)
+	scriptPathInSandbox := joinPath(pathToTempDirInSbox, scriptName)
+	cmd, err := createCommandScript(rawCommand, scriptPath, scriptPathInSandbox)
 	if err != nil {
 		return "", err
 	}
@@ -306,7 +317,7 @@
 		// especially useful for linters with baselines that print an error message on failure
 		// with a command to copy the output lint errors to the new baseline.  Use a copy instead of
 		// a move to leave the sandbox intact for manual inspection
-		copyFiles(command.CopyAfter, tempDir, "", true)
+		copyFiles(command.CopyAfter, tempDir, "", allowFromNotExists, writeType(writeIfChanged))
 	}
 
 	// If the command  was executed but failed with an error, print a debugging message before
@@ -315,9 +326,9 @@
 		fmt.Fprintf(os.Stderr,
 			"The failing command was run inside an sbox sandbox in temporary directory\n"+
 				"%s\n"+
-				"The failing command line was:\n"+
+				"The failing command line can be found in\n"+
 				"%s\n",
-			tempDir, rawCommand)
+			tempDir, scriptPath)
 	}
 
 	// Write the command's combined stdout/stderr.
@@ -327,39 +338,16 @@
 		return "", err
 	}
 
-	missingOutputErrors := validateOutputFiles(command.CopyAfter, tempDir)
-
-	if len(missingOutputErrors) > 0 {
-		// find all created files for making a more informative error message
-		createdFiles := findAllFilesUnder(tempDir)
-
-		// build error message
-		errorMessage := "mismatch between declared and actual outputs\n"
-		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
-		errorMessage += "in sandbox " + tempDir + ",\n"
-		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
-		for _, missingOutputError := range missingOutputErrors {
-			errorMessage += "  " + missingOutputError.Error() + "\n"
-		}
-		if len(createdFiles) < 1 {
-			errorMessage += "created 0 files."
-		} else {
-			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
-			creationMessages := createdFiles
-			maxNumCreationLines := 10
-			if len(creationMessages) > maxNumCreationLines {
-				creationMessages = creationMessages[:maxNumCreationLines]
-				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxNumCreationLines))
-			}
-			for _, creationMessage := range creationMessages {
-				errorMessage += "  " + creationMessage + "\n"
-			}
-		}
-
-		return "", errors.New(errorMessage)
+	err = validateOutputFiles(command.CopyAfter, tempDir, outputDir, rawCommand)
+	if err != nil {
+		return "", err
 	}
+
 	// the created files match the declared files; now move them
-	err = moveFiles(command.CopyAfter, tempDir, "")
+	err = moveFiles(command.CopyAfter, tempDir, "", writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
 
 	return depFile, nil
 }
@@ -380,8 +368,9 @@
 
 // validateOutputFiles verifies that all files that have a rule to be copied out of the sandbox
 // were created by the command.
-func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir string) []error {
+func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir, outputDir, rawCommand string) error {
 	var missingOutputErrors []error
+	var incorrectOutputDirectoryErrors []error
 	for _, copyPair := range copies {
 		fromPath := joinPath(sandboxDir, copyPair.GetFrom())
 		fileInfo, err := os.Stat(fromPath)
@@ -392,17 +381,91 @@
 		if fileInfo.IsDir() {
 			missingOutputErrors = append(missingOutputErrors, fmt.Errorf("%s: not a file", fromPath))
 		}
+
+		toPath := copyPair.GetTo()
+		if rel, err := filepath.Rel(outputDir, toPath); err != nil {
+			return err
+		} else if strings.HasPrefix(rel, "../") {
+			incorrectOutputDirectoryErrors = append(incorrectOutputDirectoryErrors,
+				fmt.Errorf("%s is not under %s", toPath, outputDir))
+		}
 	}
-	return missingOutputErrors
+
+	const maxErrors = 10
+
+	if len(incorrectOutputDirectoryErrors) > 0 {
+		errorMessage := ""
+		more := 0
+		if len(incorrectOutputDirectoryErrors) > maxErrors {
+			more = len(incorrectOutputDirectoryErrors) - maxErrors
+			incorrectOutputDirectoryErrors = incorrectOutputDirectoryErrors[:maxErrors]
+		}
+
+		for _, err := range incorrectOutputDirectoryErrors {
+			errorMessage += err.Error() + "\n"
+		}
+		if more > 0 {
+			errorMessage += fmt.Sprintf("...%v more", more)
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	if len(missingOutputErrors) > 0 {
+		// find all created files for making a more informative error message
+		createdFiles := findAllFilesUnder(sandboxDir)
+
+		// build error message
+		errorMessage := "mismatch between declared and actual outputs\n"
+		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
+		errorMessage += "in sandbox " + sandboxDir + ",\n"
+		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
+		for _, missingOutputError := range missingOutputErrors {
+			errorMessage += "  " + missingOutputError.Error() + "\n"
+		}
+		if len(createdFiles) < 1 {
+			errorMessage += "created 0 files."
+		} else {
+			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
+			creationMessages := createdFiles
+			if len(creationMessages) > maxErrors {
+				creationMessages = creationMessages[:maxErrors]
+				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxErrors))
+			}
+			for _, creationMessage := range creationMessages {
+				errorMessage += "  " + creationMessage + "\n"
+			}
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	return nil
 }
 
-// copyFiles copies files in or out of the sandbox.  If allowFromNotExists is true then errors
-// caused by a from path not existing are ignored.
-func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, allowFromNotExists bool) error {
+type existsType bool
+
+const (
+	requireFromExists  existsType = false
+	allowFromNotExists            = true
+)
+
+type writeType bool
+
+const (
+	alwaysWrite        writeType = false
+	onlyWriteIfChanged           = true
+)
+
+// copyFiles copies files in or out of the sandbox.  If exists is allowFromNotExists then errors
+// caused by a from path not existing are ignored.  If write is onlyWriteIfChanged then the output
+// file is compared to the input file and not written to if it is the same, avoiding updating
+// the timestamp.
+func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, exists existsType, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
-		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), allowFromNotExists)
+		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), exists, write)
 		if err != nil {
 			return fmt.Errorf("error copying %q to %q: %w", fromPath, toPath, err)
 		}
@@ -411,8 +474,11 @@
 }
 
 // copyOneFile copies a file and its permissions.  If forceExecutable is true it adds u+x to the
-// permissions.  If allowFromNotExists is true it returns nil if the from path doesn't exist.
-func copyOneFile(from string, to string, forceExecutable, allowFromNotExists bool) error {
+// permissions.  If exists is allowFromNotExists it returns nil if the from path doesn't exist.
+// If write is onlyWriteIfChanged then the output file is compared to the input file and not written to
+// if it is the same, avoiding updating the timestamp.
+func copyOneFile(from string, to string, forceExecutable bool, exists existsType,
+	write writeType) error {
 	err := os.MkdirAll(filepath.Dir(to), 0777)
 	if err != nil {
 		return err
@@ -420,7 +486,7 @@
 
 	stat, err := os.Stat(from)
 	if err != nil {
-		if os.IsNotExist(err) && allowFromNotExists {
+		if os.IsNotExist(err) && exists == allowFromNotExists {
 			return nil
 		}
 		return err
@@ -431,6 +497,10 @@
 		perm = perm | 0100 // u+x
 	}
 
+	if write == onlyWriteIfChanged && filesHaveSameContents(from, to) {
+		return nil
+	}
+
 	in, err := os.Open(from)
 	if err != nil {
 		return err
@@ -504,7 +574,7 @@
 		to := applyPathMappings(rspFile.PathMappings, from)
 
 		// Copy the file into the sandbox.
-		err := copyOneFile(from, joinPath(toDir, to), false, false)
+		err := copyOneFile(from, joinPath(toDir, to), false, requireFromExists, alwaysWrite)
 		if err != nil {
 			return err
 		}
@@ -551,9 +621,10 @@
 
 // moveFiles moves files specified by a set of copy rules.  It uses os.Rename, so it is restricted
 // to moving files where the source and destination are in the same filesystem.  This is OK for
-// sbox because the temporary directory is inside the out directory.  It updates the timestamp
-// of the new file.
-func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string) error {
+// sbox because the temporary directory is inside the out directory.  If write is onlyWriteIfChanged
+// then the output file is compared to the input file and not written to if it is the same, avoiding
+// updating the timestamp.  Otherwise it always updates the timestamp of the new file.
+func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
@@ -562,6 +633,10 @@
 			return err
 		}
 
+		if write == onlyWriteIfChanged && filesHaveSameContents(fromPath, toPath) {
+			continue
+		}
+
 		err = os.Rename(fromPath, toPath)
 		if err != nil {
 			return err
@@ -578,6 +653,37 @@
 	return nil
 }
 
+// clearOutputDirectory removes all files in the output directory if write is alwaysWrite, or
+// any files not listed in copies if write is onlyWriteIfChanged
+func clearOutputDirectory(copies []*sbox_proto.Copy, outputDir string, write writeType) error {
+	if outputDir == "" {
+		return fmt.Errorf("output directory must be set")
+	}
+
+	if write == alwaysWrite {
+		// When writing all the output files remove the whole output directory
+		return os.RemoveAll(outputDir)
+	}
+
+	outputFiles := make(map[string]bool, len(copies))
+	for _, copyPair := range copies {
+		outputFiles[copyPair.GetTo()] = true
+	}
+
+	existingFiles := findAllFilesUnder(outputDir)
+	for _, existingFile := range existingFiles {
+		fullExistingFile := filepath.Join(outputDir, existingFile)
+		if !outputFiles[fullExistingFile] {
+			err := os.Remove(fullExistingFile)
+			if err != nil {
+				return fmt.Errorf("failed to remove obsolete output file %s: %w", fullExistingFile, err)
+			}
+		}
+	}
+
+	return nil
+}
+
 // Rewrite one or more depfiles so that it doesn't include the (randomized) sandbox directory
 // to an output file.
 func rewriteDepFiles(ins []string, out string) error {
@@ -621,6 +727,66 @@
 	return filepath.Join(dir, file)
 }
 
+// filesHaveSameContents compares the contents if two files, returning true if they are the same
+// and returning false if they are different or any errors occur.
+func filesHaveSameContents(a, b string) bool {
+	// Compare the sizes of the two files
+	statA, err := os.Stat(a)
+	if err != nil {
+		return false
+	}
+	statB, err := os.Stat(b)
+	if err != nil {
+		return false
+	}
+
+	if statA.Size() != statB.Size() {
+		return false
+	}
+
+	// Open the two files
+	fileA, err := os.Open(a)
+	if err != nil {
+		return false
+	}
+	defer fileA.Close()
+	fileB, err := os.Open(a)
+	if err != nil {
+		return false
+	}
+	defer fileB.Close()
+
+	// Compare the files 1MB at a time
+	const bufSize = 1 * 1024 * 1024
+	bufA := make([]byte, bufSize)
+	bufB := make([]byte, bufSize)
+
+	remain := statA.Size()
+	for remain > 0 {
+		toRead := int64(bufSize)
+		if toRead > remain {
+			toRead = remain
+		}
+
+		_, err = io.ReadFull(fileA, bufA[:toRead])
+		if err != nil {
+			return false
+		}
+		_, err = io.ReadFull(fileB, bufB[:toRead])
+		if err != nil {
+			return false
+		}
+
+		if bytes.Compare(bufA[:toRead], bufB[:toRead]) != 0 {
+			return false
+		}
+
+		remain -= toRead
+	}
+
+	return true
+}
+
 func makeAbsPathEnv(pathEnv string) (string, error) {
 	pathEnvElements := filepath.SplitList(pathEnv)
 	for i, p := range pathEnvElements {
diff --git a/cmd/soong_build/Android.bp b/cmd/soong_build/Android.bp
index e85163e..72af3e0 100644
--- a/cmd/soong_build/Android.bp
+++ b/cmd/soong_build/Android.bp
@@ -25,6 +25,7 @@
         "golang-protobuf-android",
         "soong",
         "soong-android",
+        "soong-provenance",
         "soong-bp2build",
         "soong-ui-metrics_proto",
     ],
diff --git a/cmd/symbols_map/elf.go b/cmd/symbols_map/elf.go
index b38896a..3c8b1e4 100644
--- a/cmd/symbols_map/elf.go
+++ b/cmd/symbols_map/elf.go
@@ -18,8 +18,10 @@
 	"debug/elf"
 	"encoding/binary"
 	"encoding/hex"
+	"errors"
 	"fmt"
 	"io"
+	"os"
 )
 
 const gnuBuildID = "GNU\x00"
@@ -27,12 +29,33 @@
 // elfIdentifier extracts the elf build ID from an elf file.  If allowMissing is true it returns
 // an empty identifier if the file exists but the build ID note does not.
 func elfIdentifier(filename string, allowMissing bool) (string, error) {
-	f, err := elf.Open(filename)
+	f, err := os.Open(filename)
 	if err != nil {
 		return "", fmt.Errorf("failed to open %s: %w", filename, err)
 	}
 	defer f.Close()
 
+	return elfIdentifierFromReaderAt(f, filename, allowMissing)
+}
+
+// elfIdentifier extracts the elf build ID from a ReaderAt.  If allowMissing is true it returns
+// an empty identifier if the file exists but the build ID note does not.
+func elfIdentifierFromReaderAt(r io.ReaderAt, filename string, allowMissing bool) (string, error) {
+	f, err := elf.NewFile(r)
+	if err != nil {
+		if allowMissing {
+			if errors.Is(err, io.EOF) {
+				return "", nil
+			}
+			if _, ok := err.(*elf.FormatError); ok {
+				// The file was not an elf file.
+				return "", nil
+			}
+		}
+		return "", fmt.Errorf("failed to parse elf file %s: %w", filename, err)
+	}
+	defer f.Close()
+
 	buildIDNote := f.Section(".note.gnu.build-id")
 	if buildIDNote == nil {
 		if allowMissing {
diff --git a/cmd/symbols_map/elf_test.go b/cmd/symbols_map/elf_test.go
index e616228..b96ea59 100644
--- a/cmd/symbols_map/elf_test.go
+++ b/cmd/symbols_map/elf_test.go
@@ -16,11 +16,46 @@
 
 import (
 	"bytes"
+	"debug/elf"
 	"encoding/binary"
 	"reflect"
 	"testing"
 )
 
+func Test_elfIdentifierFromReaderAt_BadElfFile(t *testing.T) {
+	tests := []struct {
+		name     string
+		contents string
+	}{
+		{
+			name:     "empty",
+			contents: "",
+		},
+		{
+			name:     "text",
+			contents: "#!/bin/bash\necho foobar",
+		},
+		{
+			name:     "empty elf",
+			contents: emptyElfFile(),
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			buf := bytes.NewReader([]byte(tt.contents))
+			_, err := elfIdentifierFromReaderAt(buf, "<>", false)
+			if err == nil {
+				t.Errorf("expected error reading bad elf file without allowMissing")
+			}
+			_, err = elfIdentifierFromReaderAt(buf, "<>", true)
+			if err != nil {
+				t.Errorf("expected no error reading bad elf file with allowMissing, got %q", err.Error())
+			}
+		})
+	}
+}
+
 func Test_readNote(t *testing.T) {
 	note := []byte{
 		0x04, 0x00, 0x00, 0x00,
@@ -43,3 +78,36 @@
 		t.Errorf("incorrect return, want %#v got %#v", expectedDescs, descs)
 	}
 }
+
+// emptyElfFile returns an elf file header with no program headers or sections.
+func emptyElfFile() string {
+	ident := [elf.EI_NIDENT]byte{}
+	identBuf := bytes.NewBuffer(ident[0:0:elf.EI_NIDENT])
+	binary.Write(identBuf, binary.LittleEndian, []byte("\x7fELF"))
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFCLASS64)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFDATA2LSB)
+	binary.Write(identBuf, binary.LittleEndian, elf.EV_CURRENT)
+	binary.Write(identBuf, binary.LittleEndian, elf.ELFOSABI_LINUX)
+	binary.Write(identBuf, binary.LittleEndian, make([]byte, 8))
+
+	header := elf.Header64{
+		Ident:     ident,
+		Type:      uint16(elf.ET_EXEC),
+		Machine:   uint16(elf.EM_X86_64),
+		Version:   uint32(elf.EV_CURRENT),
+		Entry:     0,
+		Phoff:     uint64(binary.Size(elf.Header64{})),
+		Shoff:     uint64(binary.Size(elf.Header64{})),
+		Flags:     0,
+		Ehsize:    uint16(binary.Size(elf.Header64{})),
+		Phentsize: 0x38,
+		Phnum:     0,
+		Shentsize: 0x40,
+		Shnum:     0,
+		Shstrndx:  0,
+	}
+
+	buf := &bytes.Buffer{}
+	binary.Write(buf, binary.LittleEndian, header)
+	return buf.String()
+}
diff --git a/java/Android.bp b/java/Android.bp
index 4bcae4f..df0d1eb 100644
--- a/java/Android.bp
+++ b/java/Android.bp
@@ -15,6 +15,7 @@
         "soong-dexpreopt",
         "soong-genrule",
         "soong-java-config",
+        "soong-provenance",
         "soong-python",
         "soong-remoteexec",
         "soong-tradefed",
diff --git a/java/app_import.go b/java/app_import.go
index 3e5f972..a1c4d58 100644
--- a/java/app_import.go
+++ b/java/app_import.go
@@ -22,6 +22,7 @@
 	"github.com/google/blueprint/proptools"
 
 	"android/soong/android"
+	"android/soong/provenance"
 )
 
 func init() {
@@ -57,6 +58,8 @@
 	installPath android.InstallPath
 
 	hideApexVariantFromMake bool
+
+	provenanceMetaDataFile android.OutputPath
 }
 
 type AndroidAppImportProperties struct {
@@ -343,6 +346,8 @@
 
 	if apexInfo.IsForPlatform() {
 		a.installPath = ctx.InstallFile(installDir, apkFilename, a.outputFile)
+		artifactPath := android.PathForModuleSrc(ctx, *a.properties.Apk)
+		a.provenanceMetaDataFile = provenance.GenerateArtifactProvenanceMetaData(ctx, artifactPath, a.installPath)
 	}
 
 	// TODO: androidmk converter jni libs
@@ -368,6 +373,10 @@
 	return a.certificate
 }
 
+func (a *AndroidAppImport) ProvenanceMetaDataFile() android.OutputPath {
+	return a.provenanceMetaDataFile
+}
+
 var dpiVariantGroupType reflect.Type
 var archVariantGroupType reflect.Type
 var supportedDpis = []string{"ldpi", "mdpi", "hdpi", "xhdpi", "xxhdpi", "xxxhdpi"}
diff --git a/java/app_import_test.go b/java/app_import_test.go
index efa52c1..8f6c75f 100644
--- a/java/app_import_test.go
+++ b/java/app_import_test.go
@@ -53,6 +53,11 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_NoDexPreopt(t *testing.T) {
@@ -74,6 +79,12 @@
 		variant.MaybeOutput("dexpreopt/oat/arm64/package.odex").Rule != nil {
 		t.Errorf("dexpreopt shouldn't have run.")
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_Presigned(t *testing.T) {
@@ -102,6 +113,12 @@
 	if variant.MaybeOutput("zip-aligned/foo.apk").Rule == nil {
 		t.Errorf("can't find aligning rule")
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_SigningLineage(t *testing.T) {
@@ -137,6 +154,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_SigningLineageFilegroup(t *testing.T) {
@@ -163,6 +186,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_DefaultDevCert(t *testing.T) {
@@ -192,6 +221,12 @@
 	if expected != signingFlag {
 		t.Errorf("Incorrect signing flags, expected: %q, got: %q", expected, signingFlag)
 	}
+
+	rule := variant.Rule("genProvenanceMetaData")
+	android.AssertStringEquals(t, "Invalid input", "prebuilts/apk/app.apk", rule.Inputs[0].String())
+	android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+	android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+	android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", rule.Args["install_path"])
 }
 
 func TestAndroidAppImport_DpiVariants(t *testing.T) {
@@ -214,40 +249,46 @@
 		}
 		`
 	testCases := []struct {
-		name                string
-		aaptPreferredConfig *string
-		aaptPrebuiltDPI     []string
-		expected            string
+		name                                   string
+		aaptPreferredConfig                    *string
+		aaptPrebuiltDPI                        []string
+		expected                               string
+		expectedProvenanceMetaDataArtifactPath string
 	}{
 		{
-			name:                "no preferred",
-			aaptPreferredConfig: nil,
-			aaptPrebuiltDPI:     []string{},
-			expected:            "verify_uses_libraries/apk/app.apk",
+			name:                                   "no preferred",
+			aaptPreferredConfig:                    nil,
+			aaptPrebuiltDPI:                        []string{},
+			expected:                               "verify_uses_libraries/apk/app.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app.apk",
 		},
 		{
-			name:                "AAPTPreferredConfig matches",
-			aaptPreferredConfig: proptools.StringPtr("xhdpi"),
-			aaptPrebuiltDPI:     []string{"xxhdpi", "ldpi"},
-			expected:            "verify_uses_libraries/apk/app_xhdpi.apk",
+			name:                                   "AAPTPreferredConfig matches",
+			aaptPreferredConfig:                    proptools.StringPtr("xhdpi"),
+			aaptPrebuiltDPI:                        []string{"xxhdpi", "ldpi"},
+			expected:                               "verify_uses_libraries/apk/app_xhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xhdpi.apk",
 		},
 		{
-			name:                "AAPTPrebuiltDPI matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"xxhdpi", "xhdpi"},
-			expected:            "verify_uses_libraries/apk/app_xxhdpi.apk",
+			name:                                   "AAPTPrebuiltDPI matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"xxhdpi", "xhdpi"},
+			expected:                               "verify_uses_libraries/apk/app_xxhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xxhdpi.apk",
 		},
 		{
-			name:                "non-first AAPTPrebuiltDPI matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"ldpi", "xhdpi"},
-			expected:            "verify_uses_libraries/apk/app_xhdpi.apk",
+			name:                                   "non-first AAPTPrebuiltDPI matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"ldpi", "xhdpi"},
+			expected:                               "verify_uses_libraries/apk/app_xhdpi.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app_xhdpi.apk",
 		},
 		{
-			name:                "no matches",
-			aaptPreferredConfig: proptools.StringPtr("mdpi"),
-			aaptPrebuiltDPI:     []string{"ldpi", "xxxhdpi"},
-			expected:            "verify_uses_libraries/apk/app.apk",
+			name:                                   "no matches",
+			aaptPreferredConfig:                    proptools.StringPtr("mdpi"),
+			aaptPrebuiltDPI:                        []string{"ldpi", "xxxhdpi"},
+			expected:                               "verify_uses_libraries/apk/app.apk",
+			expectedProvenanceMetaDataArtifactPath: "prebuilts/apk/app.apk",
 		},
 	}
 
@@ -270,6 +311,12 @@
 		if strings.HasSuffix(matches[1], test.expected) {
 			t.Errorf("wrong src apk, expected: %q got: %q", test.expected, matches[1])
 		}
+
+		provenanceMetaDataRule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.expectedProvenanceMetaDataArtifactPath, provenanceMetaDataRule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", provenanceMetaDataRule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", "foo", provenanceMetaDataRule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", "/system/app/foo/foo.apk", provenanceMetaDataRule.Args["install_path"])
 	}
 }
 
@@ -290,16 +337,25 @@
 		`)
 
 	testCases := []struct {
-		name     string
-		expected string
+		name                 string
+		expected             string
+		onDevice             string
+		expectedArtifactPath string
+		expectedMetaDataPath string
 	}{
 		{
-			name:     "foo",
-			expected: "foo.apk",
+			name:                 "foo",
+			expected:             "foo.apk",
+			onDevice:             "/system/app/foo/foo.apk",
+			expectedArtifactPath: "prebuilts/apk/app.apk",
+			expectedMetaDataPath: "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto",
 		},
 		{
-			name:     "bar",
-			expected: "bar_sample.apk",
+			name:                 "bar",
+			expected:             "bar_sample.apk",
+			onDevice:             "/system/app/bar/bar_sample.apk",
+			expectedArtifactPath: "prebuilts/apk/app.apk",
+			expectedMetaDataPath: "out/soong/.intermediates/provenance_metadata/bar/provenance_metadata.textproto",
 		},
 	}
 
@@ -316,15 +372,23 @@
 			t.Errorf("Incorrect LOCAL_INSTALLED_MODULE_STEM value '%s', expected '%s'",
 				actualValues, expectedValues)
 		}
+		rule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.expectedArtifactPath, rule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", test.expectedMetaDataPath, rule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", test.name, rule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", test.onDevice, rule.Args["install_path"])
 	}
 }
 
 func TestAndroidAppImport_ArchVariants(t *testing.T) {
 	// The test config's target arch is ARM64.
 	testCases := []struct {
-		name     string
-		bp       string
-		expected string
+		name         string
+		bp           string
+		expected     string
+		artifactPath string
+		metaDataPath string
+		installPath  string
 	}{
 		{
 			name: "matching arch",
@@ -343,7 +407,9 @@
 					},
 				}
 			`,
-			expected: "verify_uses_libraries/apk/app_arm64.apk",
+			expected:     "verify_uses_libraries/apk/app_arm64.apk",
+			artifactPath: "prebuilts/apk/app_arm64.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 		{
 			name: "no matching arch",
@@ -362,7 +428,9 @@
 					},
 				}
 			`,
-			expected: "verify_uses_libraries/apk/app.apk",
+			expected:     "verify_uses_libraries/apk/app.apk",
+			artifactPath: "prebuilts/apk/app.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 		{
 			name: "no matching arch without default",
@@ -380,7 +448,9 @@
 					},
 				}
 			`,
-			expected: "",
+			expected:     "",
+			artifactPath: "prebuilts/apk/app_arm.apk",
+			installPath:  "/system/app/foo/foo.apk",
 		},
 	}
 
@@ -393,6 +463,8 @@
 			if variant.Module().Enabled() {
 				t.Error("module should have been disabled, but wasn't")
 			}
+			rule := variant.MaybeRule("genProvenanceMetaData")
+			android.AssertDeepEquals(t, "Provenance metadata is not empty", android.TestingBuildParams{}, rule)
 			continue
 		}
 		jniRuleCommand := variant.Output("jnis-uncompressed/foo.apk").RuleParams.Command
@@ -403,6 +475,11 @@
 		if strings.HasSuffix(matches[1], test.expected) {
 			t.Errorf("wrong src apk, expected: %q got: %q", test.expected, matches[1])
 		}
+		rule := variant.Rule("genProvenanceMetaData")
+		android.AssertStringEquals(t, "Invalid input", test.artifactPath, rule.Inputs[0].String())
+		android.AssertStringEquals(t, "Invalid output", "out/soong/.intermediates/provenance_metadata/foo/provenance_metadata.textproto", rule.Output.String())
+		android.AssertStringEquals(t, "Invalid args", "foo", rule.Args["module_name"])
+		android.AssertStringEquals(t, "Invalid args", test.installPath, rule.Args["install_path"])
 	}
 }
 
diff --git a/java/base.go b/java/base.go
index 2f425cd..5802099 100644
--- a/java/base.go
+++ b/java/base.go
@@ -1048,12 +1048,24 @@
 		}
 	}
 
+	// We don't currently run annotation processors in turbine, which means we can't use turbine
+	// generated header jars when an annotation processor that generates API is enabled.  One
+	// exception (handled further below) is when kotlin sources are enabled, in which case turbine
+	//  is used to run all of the annotation processors.
+	disableTurbine := deps.disableTurbine
+
 	// Collect .java files for AIDEGen
 	j.expandIDEInfoCompiledSrcs = append(j.expandIDEInfoCompiledSrcs, uniqueSrcFiles.Strings()...)
 
 	var kotlinJars android.Paths
+	var kotlinHeaderJars android.Paths
 
 	if srcFiles.HasExt(".kt") {
+		// When using kotlin sources turbine is used to generate annotation processor sources,
+		// including for annotation processors that generate API, so we can use turbine for
+		// java sources too.
+		disableTurbine = false
+
 		// user defined kotlin flags.
 		kotlincFlags := j.properties.Kotlincflags
 		CheckKotlincFlags(ctx, kotlincFlags)
@@ -1109,18 +1121,22 @@
 		}
 
 		kotlinJar := android.PathForModuleOut(ctx, "kotlin", jarName)
-		kotlinCompile(ctx, kotlinJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
+		kotlinHeaderJar := android.PathForModuleOut(ctx, "kotlin_headers", jarName)
+		kotlinCompile(ctx, kotlinJar, kotlinHeaderJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
 		if ctx.Failed() {
 			return
 		}
 
 		// Make javac rule depend on the kotlinc rule
-		flags.classpath = append(flags.classpath, kotlinJar)
+		flags.classpath = append(classpath{kotlinHeaderJar}, flags.classpath...)
 
 		kotlinJars = append(kotlinJars, kotlinJar)
+		kotlinHeaderJars = append(kotlinHeaderJars, kotlinHeaderJar)
+
 		// Jar kotlin classes into the final jar after javac
 		if BoolDefault(j.properties.Static_kotlin_stdlib, true) {
 			kotlinJars = append(kotlinJars, deps.kotlinStdlib...)
+			kotlinHeaderJars = append(kotlinHeaderJars, deps.kotlinStdlib...)
 		} else {
 			flags.dexClasspath = append(flags.dexClasspath, deps.kotlinStdlib...)
 		}
@@ -1134,7 +1150,7 @@
 
 	enableSharding := false
 	var headerJarFileWithoutDepsOrJarjar android.Path
-	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !deps.disableTurbine {
+	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !disableTurbine {
 		if j.properties.Javac_shard_size != nil && *(j.properties.Javac_shard_size) > 0 {
 			enableSharding = true
 			// Formerly, there was a check here that prevented annotation processors
@@ -1144,7 +1160,7 @@
 			// with sharding enabled. See: b/77284273.
 		}
 		headerJarFileWithoutDepsOrJarjar, j.headerJarFile =
-			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinJars)
+			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinHeaderJars)
 		if ctx.Failed() {
 			return
 		}
diff --git a/java/config/kotlin.go b/java/config/kotlin.go
index a83f87f..fc63f4d 100644
--- a/java/config/kotlin.go
+++ b/java/config/kotlin.go
@@ -34,6 +34,7 @@
 	pctx.SourcePathVariable("KotlinKaptJar", "external/kotlinc/lib/kotlin-annotation-processing.jar")
 	pctx.SourcePathVariable("KotlinAnnotationJar", "external/kotlinc/lib/annotations-13.0.jar")
 	pctx.SourcePathVariable("KotlinStdlibJar", KotlinStdlibJar)
+	pctx.SourcePathVariable("KotlinAbiGenPluginJar", "external/kotlinc/lib/jvm-abi-gen.jar")
 
 	// These flags silence "Illegal reflective access" warnings when running kapt in OpenJDK9+
 	pctx.StaticVariable("KaptSuppressJDK9Warnings", strings.Join([]string{
diff --git a/java/droidstubs.go b/java/droidstubs.go
index e7aeeb8..3b1f7c0 100644
--- a/java/droidstubs.go
+++ b/java/droidstubs.go
@@ -433,6 +433,10 @@
 	}
 }
 
+func metalavaUseRbe(ctx android.ModuleContext) bool {
+	return ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA")
+}
+
 func metalavaCmd(ctx android.ModuleContext, rule *android.RuleBuilder, javaVersion javaVersion, srcs android.Paths,
 	srcJarList android.Path, bootclasspath, classpath classpath, homeDir android.WritablePath) *android.RuleBuilderCommand {
 	rule.Command().Text("rm -rf").Flag(homeDir.String())
@@ -441,7 +445,7 @@
 	cmd := rule.Command()
 	cmd.FlagWithArg("ANDROID_PREFS_ROOT=", homeDir.String())
 
-	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA") {
+	if metalavaUseRbe(ctx) {
 		rule.Remoteable(android.RemoteRuleSupports{RBE: true})
 		execStrategy := ctx.Config().GetenvWithDefault("RBE_METALAVA_EXEC_STRATEGY", remoteexec.LocalExecStrategy)
 		labels := map[string]string{"type": "tool", "name": "metalava"}
@@ -665,7 +669,9 @@
 	}
 
 	// TODO(b/183630617): rewrapper doesn't support restat rules
-	// rule.Restat()
+	if !metalavaUseRbe(ctx) {
+		rule.Restat()
+	}
 
 	zipSyncCleanupCmd(rule, srcJarDir)
 
diff --git a/java/hiddenapi_modular.go b/java/hiddenapi_modular.go
index 95ded34..44cdfa5 100644
--- a/java/hiddenapi_modular.go
+++ b/java/hiddenapi_modular.go
@@ -994,10 +994,11 @@
 	rule := android.NewRuleBuilder(pctx, ctx)
 	command := rule.Command().
 		BuiltTool("verify_overlaps").
-		Input(monolithicFilePath)
+		FlagWithInput("--monolithic-flags ", monolithicFilePath)
 
 	for _, subset := range csvSubsets {
 		command.
+			Flag("--module-flags ").
 			Textf("%s:%s", subset.CsvFile, subset.SignaturePatternsFile).
 			Implicit(subset.CsvFile).Implicit(subset.SignaturePatternsFile)
 	}
diff --git a/java/java.go b/java/java.go
index ecbbc32..713fe94 100644
--- a/java/java.go
+++ b/java/java.go
@@ -2060,15 +2060,22 @@
 		protoSrcPartition: android.ProtoSrcLabelPartition,
 	})
 
+	var javacopts []string
+	if m.properties.Javacflags != nil {
+		javacopts = append(javacopts, m.properties.Javacflags...)
+	}
+	epEnabled := m.properties.Errorprone.Enabled
+	//TODO(b/227504307) add configuration that depends on RUN_ERROR_PRONE environment variable
+	if Bool(epEnabled) {
+		javacopts = append(javacopts, m.properties.Errorprone.Javacflags...)
+	}
+
 	commonAttrs := &javaCommonAttributes{
 		Srcs: srcPartitions[javaSrcPartition],
 		Plugins: bazel.MakeLabelListAttribute(
 			android.BazelLabelForModuleDeps(ctx, m.properties.Plugins),
 		),
-	}
-
-	if m.properties.Javacflags != nil {
-		commonAttrs.Javacopts = bazel.MakeStringListAttribute(m.properties.Javacflags)
+		Javacopts: bazel.MakeStringListAttribute(javacopts),
 	}
 
 	depLabels := &javaDependencyLabels{}
diff --git a/java/kotlin.go b/java/kotlin.go
index ce79bae..eff5bb5 100644
--- a/java/kotlin.go
+++ b/java/kotlin.go
@@ -28,17 +28,20 @@
 
 var kotlinc = pctx.AndroidRemoteStaticRule("kotlinc", android.RemoteRuleSupports{Goma: true},
 	blueprint.RuleParams{
-		Command: `rm -rf "$classesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
-			`mkdir -p "$classesDir" "$srcJarDir" "$emptyDir" && ` +
+		Command: `rm -rf "$classesDir" "$headerClassesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
+			`mkdir -p "$classesDir" "$headerClassesDir" "$srcJarDir" "$emptyDir" && ` +
 			`${config.ZipSyncCmd} -d $srcJarDir -l $srcJarDir/list -f "*.java" $srcJars && ` +
 			`${config.GenKotlinBuildFileCmd} --classpath "$classpath" --name "$name"` +
 			` --out_dir "$classesDir" --srcs "$out.rsp" --srcs "$srcJarDir/list"` +
 			` $commonSrcFilesArg --out "$kotlinBuildFile" && ` +
 			`${config.KotlincCmd} ${config.KotlincGlobalFlags} ` +
-			`${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
-			`$kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
-			`-kotlin-home $emptyDir && ` +
-			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir && ` +
+			` ${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
+			` $kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
+			` -kotlin-home $emptyDir ` +
+			` -Xplugin=${config.KotlinAbiGenPluginJar} ` +
+			` -P plugin:org.jetbrains.kotlin.jvm.abi:outputDir=$headerClassesDir && ` +
+			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir -write_if_changed && ` +
+			`${config.SoongZipCmd} -jar -o $headerJar -C $headerClassesDir -D $headerClassesDir -write_if_changed && ` +
 			`rm -rf "$srcJarDir"`,
 		CommandDeps: []string{
 			"${config.KotlincCmd}",
@@ -49,15 +52,17 @@
 			"${config.KotlinStdlibJar}",
 			"${config.KotlinTrove4jJar}",
 			"${config.KotlinAnnotationJar}",
+			"${config.KotlinAbiGenPluginJar}",
 			"${config.GenKotlinBuildFileCmd}",
 			"${config.SoongZipCmd}",
 			"${config.ZipSyncCmd}",
 		},
 		Rspfile:        "$out.rsp",
 		RspfileContent: `$in`,
+		Restat:         true,
 	},
 	"kotlincFlags", "classpath", "srcJars", "commonSrcFilesArg", "srcJarDir", "classesDir",
-	"kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
+	"headerClassesDir", "headerJar", "kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
 
 func kotlinCommonSrcsList(ctx android.ModuleContext, commonSrcFiles android.Paths) android.OptionalPath {
 	if len(commonSrcFiles) > 0 {
@@ -76,7 +81,7 @@
 }
 
 // kotlinCompile takes .java and .kt sources and srcJars, and compiles the .kt sources into a classes jar in outputFile.
-func kotlinCompile(ctx android.ModuleContext, outputFile android.WritablePath,
+func kotlinCompile(ctx android.ModuleContext, outputFile, headerOutputFile android.WritablePath,
 	srcFiles, commonSrcFiles, srcJars android.Paths,
 	flags javaBuilderFlags) {
 
@@ -97,17 +102,20 @@
 	}
 
 	ctx.Build(pctx, android.BuildParams{
-		Rule:        kotlinc,
-		Description: "kotlinc",
-		Output:      outputFile,
-		Inputs:      srcFiles,
-		Implicits:   deps,
+		Rule:           kotlinc,
+		Description:    "kotlinc",
+		Output:         outputFile,
+		ImplicitOutput: headerOutputFile,
+		Inputs:         srcFiles,
+		Implicits:      deps,
 		Args: map[string]string{
 			"classpath":         flags.kotlincClasspath.FormJavaClassPath(""),
 			"kotlincFlags":      flags.kotlincFlags,
 			"commonSrcFilesArg": commonSrcFilesArg,
 			"srcJars":           strings.Join(srcJars.Strings(), " "),
 			"classesDir":        android.PathForModuleOut(ctx, "kotlinc", "classes").String(),
+			"headerClassesDir":  android.PathForModuleOut(ctx, "kotlinc", "header_classes").String(),
+			"headerJar":         headerOutputFile.String(),
 			"srcJarDir":         android.PathForModuleOut(ctx, "kotlinc", "srcJars").String(),
 			"kotlinBuildFile":   android.PathForModuleOut(ctx, "kotlinc-build.xml").String(),
 			"emptyDir":          android.PathForModuleOut(ctx, "kotlinc", "empty").String(),
diff --git a/java/kotlin_test.go b/java/kotlin_test.go
index d51bc04..f9ff982 100644
--- a/java/kotlin_test.go
+++ b/java/kotlin_test.go
@@ -45,6 +45,10 @@
 	fooKotlinc := ctx.ModuleForTests("foo", "android_common").Rule("kotlinc")
 	fooJavac := ctx.ModuleForTests("foo", "android_common").Rule("javac")
 	fooJar := ctx.ModuleForTests("foo", "android_common").Output("combined/foo.jar")
+	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+
+	fooKotlincClasses := fooKotlinc.Output
+	fooKotlincHeaderClasses := fooKotlinc.ImplicitOutput
 
 	if len(fooKotlinc.Inputs) != 2 || fooKotlinc.Inputs[0].String() != "a.java" ||
 		fooKotlinc.Inputs[1].String() != "b.kt" {
@@ -55,17 +59,21 @@
 		t.Errorf(`foo inputs %v != ["a.java"]`, fooJavac.Inputs)
 	}
 
-	if !strings.Contains(fooJavac.Args["classpath"], fooKotlinc.Output.String()) {
+	if !strings.Contains(fooJavac.Args["classpath"], fooKotlincHeaderClasses.String()) {
 		t.Errorf("foo classpath %v does not contain %q",
-			fooJavac.Args["classpath"], fooKotlinc.Output.String())
+			fooJavac.Args["classpath"], fooKotlincHeaderClasses.String())
 	}
 
-	if !inList(fooKotlinc.Output.String(), fooJar.Inputs.Strings()) {
+	if !inList(fooKotlincClasses.String(), fooJar.Inputs.Strings()) {
 		t.Errorf("foo jar inputs %v does not contain %q",
-			fooJar.Inputs.Strings(), fooKotlinc.Output.String())
+			fooJar.Inputs.Strings(), fooKotlincClasses.String())
 	}
 
-	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+	if !inList(fooKotlincHeaderClasses.String(), fooHeaderJar.Inputs.Strings()) {
+		t.Errorf("foo header jar inputs %v does not contain %q",
+			fooHeaderJar.Inputs.Strings(), fooKotlincHeaderClasses.String())
+	}
+
 	bazHeaderJar := ctx.ModuleForTests("baz", "android_common").Output("turbine-combined/baz.jar")
 	barKotlinc := ctx.ModuleForTests("bar", "android_common").Rule("kotlinc")
 
diff --git a/mk2rbc/expr.go b/mk2rbc/expr.go
index dc16d1d..54bb6d1 100644
--- a/mk2rbc/expr.go
+++ b/mk2rbc/expr.go
@@ -221,11 +221,9 @@
 }
 
 func (xi *interpolateExpr) transform(transformer func(expr starlarkExpr) starlarkExpr) starlarkExpr {
-	argsCopy := make([]starlarkExpr, len(xi.args))
-	for i, arg := range xi.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range xi.args {
+		xi.args[i] = xi.args[i].transform(transformer)
 	}
-	xi.args = argsCopy
 	if replacement := transformer(xi); replacement != nil {
 		return replacement
 	} else {
@@ -591,11 +589,9 @@
 	if cx.object != nil {
 		cx.object = cx.object.transform(transformer)
 	}
-	argsCopy := make([]starlarkExpr, len(cx.args))
-	for i, arg := range cx.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range cx.args {
+		cx.args[i] = cx.args[i].transform(transformer)
 	}
-	cx.args = argsCopy
 	if replacement := transformer(cx); replacement != nil {
 		return replacement
 	} else {
@@ -769,3 +765,35 @@
 	x, ok := expr.(*stringLiteralExpr)
 	return ok && x.literal == ""
 }
+
+func negateExpr(expr starlarkExpr) starlarkExpr {
+	switch typedExpr := expr.(type) {
+	case *notExpr:
+		return typedExpr.expr
+	case *inExpr:
+		typedExpr.isNot = !typedExpr.isNot
+		return typedExpr
+	case *eqExpr:
+		typedExpr.isEq = !typedExpr.isEq
+		return typedExpr
+	case *binaryOpExpr:
+		switch typedExpr.op {
+		case ">":
+			typedExpr.op = "<="
+			return typedExpr
+		case "<":
+			typedExpr.op = ">="
+			return typedExpr
+		case ">=":
+			typedExpr.op = "<"
+			return typedExpr
+		case "<=":
+			typedExpr.op = ">"
+			return typedExpr
+		default:
+			return &notExpr{expr: expr}
+		}
+	default:
+		return &notExpr{expr: expr}
+	}
+}
diff --git a/mk2rbc/mk2rbc.go b/mk2rbc/mk2rbc.go
index 950a1e5..0942c28 100644
--- a/mk2rbc/mk2rbc.go
+++ b/mk2rbc/mk2rbc.go
@@ -32,6 +32,7 @@
 	"os"
 	"path/filepath"
 	"regexp"
+	"sort"
 	"strconv"
 	"strings"
 	"text/scanner"
@@ -86,7 +87,7 @@
 	"filter":                               &simpleCallParser{name: baseName + ".filter", returnType: starlarkTypeList},
 	"filter-out":                           &simpleCallParser{name: baseName + ".filter_out", returnType: starlarkTypeList},
 	"firstword":                            &firstOrLastwordCallParser{isLastWord: false},
-	"foreach":                              &foreachCallPaser{},
+	"foreach":                              &foreachCallParser{},
 	"if":                                   &ifCallParser{},
 	"info":                                 &makeControlFuncParser{name: baseName + ".mkinfo"},
 	"is-board-platform":                    &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
@@ -110,6 +111,7 @@
 	"require-artifacts-in-path-relaxed":    &simpleCallParser{name: baseName + ".require_artifacts_in_path_relaxed", returnType: starlarkTypeVoid},
 	// TODO(asmundak): remove it once all calls are removed from configuration makefiles. see b/183161002
 	"shell":    &shellCallParser{},
+	"sort":     &simpleCallParser{name: baseName + ".mksort", returnType: starlarkTypeList},
 	"strip":    &simpleCallParser{name: baseName + ".mkstrip", returnType: starlarkTypeString},
 	"subst":    &substCallParser{fname: "subst"},
 	"warning":  &makeControlFuncParser{name: baseName + ".mkwarning"},
@@ -117,6 +119,17 @@
 	"wildcard": &simpleCallParser{name: baseName + ".expand_wildcard", returnType: starlarkTypeList},
 }
 
+// The same as knownFunctions, but returns a []starlarkNode instead of a starlarkExpr
+var knownNodeFunctions = map[string]interface {
+	parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode
+}{
+	"eval":                      &evalNodeParser{},
+	"if":                        &ifCallNodeParser{},
+	"inherit-product":           &inheritProductCallParser{loadAlways: true},
+	"inherit-product-if-exists": &inheritProductCallParser{loadAlways: false},
+	"foreach":                   &foreachCallNodeParser{},
+}
+
 // These are functions that we don't implement conversions for, but
 // we allow seeing their definitions in the product config files.
 var ignoredDefines = map[string]bool{
@@ -747,6 +760,16 @@
 func (ctx *parseContext) handleSubConfig(
 	v mkparser.Node, pathExpr starlarkExpr, loadAlways bool, processModule func(inheritedModule) starlarkNode) []starlarkNode {
 
+	// Allow seeing $(sort $(wildcard realPathExpr)) or $(wildcard realPathExpr)
+	// because those are functionally the same as not having the sort/wildcard calls.
+	if ce, ok := pathExpr.(*callExpr); ok && ce.name == "rblf.mksort" && len(ce.args) == 1 {
+		if ce2, ok2 := ce.args[0].(*callExpr); ok2 && ce2.name == "rblf.expand_wildcard" && len(ce2.args) == 1 {
+			pathExpr = ce2.args[0]
+		}
+	} else if ce2, ok2 := pathExpr.(*callExpr); ok2 && ce2.name == "rblf.expand_wildcard" && len(ce2.args) == 1 {
+		pathExpr = ce2.args[0]
+	}
+
 	// In a simple case, the name of a module to inherit/include is known statically.
 	if path, ok := maybeString(pathExpr); ok {
 		// Note that even if this directive loads a module unconditionally, a module may be
@@ -754,6 +777,7 @@
 		moduleShouldExist := loadAlways && ctx.ifNestLevel == 0
 		if strings.Contains(path, "*") {
 			if paths, err := fs.Glob(ctx.script.sourceFS, path); err == nil {
+				sort.Strings(paths)
 				result := make([]starlarkNode, 0)
 				for _, p := range paths {
 					mi := ctx.newDependentModule(p, !moduleShouldExist)
@@ -846,15 +870,19 @@
 	return res
 }
 
-func (ctx *parseContext) handleInheritModule(v mkparser.Node, args *mkparser.MakeString, loadAlways bool) []starlarkNode {
+type inheritProductCallParser struct {
+	loadAlways bool
+}
+
+func (p *inheritProductCallParser) parse(ctx *parseContext, v mkparser.Node, args *mkparser.MakeString) []starlarkNode {
 	args.TrimLeftSpaces()
 	args.TrimRightSpaces()
 	pathExpr := ctx.parseMakeString(v, args)
 	if _, ok := pathExpr.(*badExpr); ok {
 		return []starlarkNode{ctx.newBadNode(v, "Unable to parse argument to inherit")}
 	}
-	return ctx.handleSubConfig(v, pathExpr, loadAlways, func(im inheritedModule) starlarkNode {
-		return &inheritNode{im, loadAlways}
+	return ctx.handleSubConfig(v, pathExpr, p.loadAlways, func(im inheritedModule) starlarkNode {
+		return &inheritNode{im, p.loadAlways}
 	})
 }
 
@@ -873,19 +901,12 @@
 	//   $(error xxx)
 	//   $(call other-custom-functions,...)
 
-	// inherit-product(-if-exists) gets converted to a series of statements,
-	// not just a single expression like parseReference returns. So handle it
-	// separately at the beginning here.
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product,", "")
-		return ctx.handleInheritModule(v, args, true)
+	if name, args, ok := ctx.maybeParseFunctionCall(v, v.Name); ok {
+		if kf, ok := knownNodeFunctions[name]; ok {
+			return kf.parse(ctx, v, args)
+		}
 	}
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product-if-exists,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product-if-exists,", "")
-		return ctx.handleInheritModule(v, args, false)
-	}
+
 	return []starlarkNode{&exprNode{expr: ctx.parseReference(v, v.Name)}}
 }
 
@@ -1030,49 +1051,19 @@
 		otherOperand = xLeft
 	}
 
-	not := func(expr starlarkExpr) starlarkExpr {
-		switch typedExpr := expr.(type) {
-		case *inExpr:
-			typedExpr.isNot = !typedExpr.isNot
-			return typedExpr
-		case *eqExpr:
-			typedExpr.isEq = !typedExpr.isEq
-			return typedExpr
-		case *binaryOpExpr:
-			switch typedExpr.op {
-			case ">":
-				typedExpr.op = "<="
-				return typedExpr
-			case "<":
-				typedExpr.op = ">="
-				return typedExpr
-			case ">=":
-				typedExpr.op = "<"
-				return typedExpr
-			case "<=":
-				typedExpr.op = ">"
-				return typedExpr
-			default:
-				return &notExpr{expr: expr}
-			}
-		default:
-			return &notExpr{expr: expr}
-		}
-	}
-
 	// If we've identified one of the operands as being a string literal, check
 	// for some special cases we can do to simplify the resulting expression.
 	if otherOperand != nil {
 		if stringOperand == "" {
 			if isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
 		}
 		if stringOperand == "true" && otherOperand.typ() == starlarkTypeBool {
 			if !isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
@@ -1228,6 +1219,37 @@
 		right: xValue, isEq: !negate}
 }
 
+func (ctx *parseContext) maybeParseFunctionCall(node mkparser.Node, ref *mkparser.MakeString) (name string, args *mkparser.MakeString, ok bool) {
+	ref.TrimLeftSpaces()
+	ref.TrimRightSpaces()
+
+	words := ref.SplitN(" ", 2)
+	if !words[0].Const() {
+		return "", nil, false
+	}
+
+	name = words[0].Dump()
+	args = mkparser.SimpleMakeString("", words[0].Pos())
+	if len(words) >= 2 {
+		args = words[1]
+	}
+	args.TrimLeftSpaces()
+	if name == "call" {
+		words = args.SplitN(",", 2)
+		if words[0].Empty() || !words[0].Const() {
+			return "", nil, false
+		}
+		name = words[0].Dump()
+		if len(words) < 2 {
+			args = &mkparser.MakeString{}
+		} else {
+			args = words[1]
+		}
+	}
+	ok = true
+	return
+}
+
 // parses $(...), returning an expression
 func (ctx *parseContext) parseReference(node mkparser.Node, ref *mkparser.MakeString) starlarkExpr {
 	ref.TrimLeftSpaces()
@@ -1242,7 +1264,7 @@
 
 	// If it is a single word, it can be a simple variable
 	// reference or a function call
-	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" {
+	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" && refDump != "eval" {
 		if strings.HasPrefix(refDump, soongNsPrefix) {
 			// TODO (asmundak): if we find many, maybe handle them.
 			return ctx.newBadExpr(node, "SOONG_CONFIG_ variables cannot be referenced, use soong_config_get instead: %s", refDump)
@@ -1281,28 +1303,14 @@
 		return ctx.newBadExpr(node, "unknown variable %s", refDump)
 	}
 
-	expr := &callExpr{name: words[0].Dump(), returnType: starlarkTypeUnknown}
-	args := mkparser.SimpleMakeString("", words[0].Pos())
-	if len(words) >= 2 {
-		args = words[1]
-	}
-	args.TrimLeftSpaces()
-	if expr.name == "call" {
-		words = args.SplitN(",", 2)
-		if words[0].Empty() || !words[0].Const() {
-			return ctx.newBadExpr(node, "cannot handle %s", refDump)
-		}
-		expr.name = words[0].Dump()
-		if len(words) < 2 {
-			args = &mkparser.MakeString{}
+	if name, args, ok := ctx.maybeParseFunctionCall(node, ref); ok {
+		if kf, found := knownFunctions[name]; found {
+			return kf.parse(ctx, node, args)
 		} else {
-			args = words[1]
+			return ctx.newBadExpr(node, "cannot handle invoking %s", name)
 		}
-	}
-	if kf, found := knownFunctions[expr.name]; found {
-		return kf.parse(ctx, node, args)
 	} else {
-		return ctx.newBadExpr(node, "cannot handle invoking %s", expr.name)
+		return ctx.newBadExpr(node, "cannot handle %s", refDump)
 	}
 }
 
@@ -1486,9 +1494,46 @@
 	}
 }
 
-type foreachCallPaser struct{}
+type ifCallNodeParser struct{}
 
-func (p *foreachCallPaser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
+func (p *ifCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 2 && len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "if function should have 2 or 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+
+	ifn := &ifNode{expr: ctx.parseMakeString(node, words[0])}
+	cases := []*switchCase{
+		{
+			gate:  ifn,
+			nodes: ctx.parseNodeMakeString(node, words[1]),
+		},
+	}
+	if len(words) == 3 {
+		cases = append(cases, &switchCase{
+			gate:  &elseNode{},
+			nodes: ctx.parseNodeMakeString(node, words[2]),
+		})
+	}
+	if len(cases) == 2 {
+		if len(cases[1].nodes) == 0 {
+			// Remove else branch if it has no contents
+			cases = cases[:1]
+		} else if len(cases[0].nodes) == 0 {
+			// If the if branch has no contents but the else does,
+			// move them to the if and negate its condition
+			ifn.expr = negateExpr(ifn.expr)
+			cases[0].nodes = cases[1].nodes
+			cases = cases[:1]
+		}
+	}
+
+	return []starlarkNode{&switchNode{ssCases: cases}}
+}
+
+type foreachCallParser struct{}
+
+func (p *foreachCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
 	words := args.Split(",")
 	if len(words) != 3 {
 		return ctx.newBadExpr(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))
@@ -1520,6 +1565,71 @@
 	}
 }
 
+func transformNode(node starlarkNode, transformer func(expr starlarkExpr) starlarkExpr) {
+	switch a := node.(type) {
+	case *ifNode:
+		a.expr = a.expr.transform(transformer)
+	case *switchCase:
+		transformNode(a.gate, transformer)
+		for _, n := range a.nodes {
+			transformNode(n, transformer)
+		}
+	case *switchNode:
+		for _, n := range a.ssCases {
+			transformNode(n, transformer)
+		}
+	case *exprNode:
+		a.expr = a.expr.transform(transformer)
+	case *assignmentNode:
+		a.value = a.value.transform(transformer)
+	case *foreachNode:
+		a.list = a.list.transform(transformer)
+		for _, n := range a.actions {
+			transformNode(n, transformer)
+		}
+	}
+}
+
+type foreachCallNodeParser struct{}
+
+func (p *foreachCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+	if !words[0].Const() || words[0].Empty() || !identifierFullMatchRegex.MatchString(words[0].Strings[0]) {
+		return []starlarkNode{ctx.newBadNode(node, "first argument to foreach function must be a simple string identifier")}
+	}
+
+	loopVarName := words[0].Strings[0]
+
+	list := ctx.parseMakeString(node, words[1])
+	if list.typ() != starlarkTypeList {
+		list = &callExpr{
+			name:       baseName + ".words",
+			returnType: starlarkTypeList,
+			args:       []starlarkExpr{list},
+		}
+	}
+
+	actions := ctx.parseNodeMakeString(node, words[2])
+	// TODO(colefaust): Replace transforming code with something more elegant
+	for _, action := range actions {
+		transformNode(action, func(expr starlarkExpr) starlarkExpr {
+			if varRefExpr, ok := expr.(*variableRefExpr); ok && varRefExpr.ref.name() == loopVarName {
+				return &identifierExpr{loopVarName}
+			}
+			return nil
+		})
+	}
+
+	return []starlarkNode{&foreachNode{
+		varName: loopVarName,
+		list:    list,
+		actions: actions,
+	}}
+}
+
 type wordCallParser struct{}
 
 func (p *wordCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
@@ -1630,6 +1740,31 @@
 	}
 }
 
+type evalNodeParser struct{}
+
+func (p *evalNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	parser := mkparser.NewParser("Eval expression", strings.NewReader(args.Dump()))
+	nodes, errs := parser.Parse()
+	if errs != nil {
+		return []starlarkNode{ctx.newBadNode(node, "Unable to parse eval statement")}
+	}
+
+	if len(nodes) == 0 {
+		return []starlarkNode{}
+	} else if len(nodes) == 1 {
+		switch n := nodes[0].(type) {
+		case *mkparser.Assignment:
+			if n.Name.Const() {
+				return ctx.handleAssignment(n)
+			}
+		case *mkparser.Comment:
+			return []starlarkNode{&commentNode{strings.TrimSpace("#" + n.Comment)}}
+		}
+	}
+
+	return []starlarkNode{ctx.newBadNode(node, "Eval expression too complex; only assignments and comments are supported")}
+}
+
 func (ctx *parseContext) parseMakeString(node mkparser.Node, mk *mkparser.MakeString) starlarkExpr {
 	if mk.Const() {
 		return &stringLiteralExpr{mk.Dump()}
@@ -1654,6 +1789,16 @@
 	return NewInterpolateExpr(parts)
 }
 
+func (ctx *parseContext) parseNodeMakeString(node mkparser.Node, mk *mkparser.MakeString) []starlarkNode {
+	// Discard any constant values in the make string, as they would be top level
+	// string literals and do nothing.
+	result := make([]starlarkNode, 0, len(mk.Variables))
+	for i := range mk.Variables {
+		result = append(result, ctx.handleVariable(&mk.Variables[i])...)
+	}
+	return result
+}
+
 // Handles the statements whose treatment is the same in all contexts: comment,
 // assignment, variable (which is a macro call in reality) and all constructs that
 // do not handle in any context ('define directive and any unrecognized stuff).
@@ -1698,6 +1843,7 @@
 	if result == nil {
 		result = []starlarkNode{}
 	}
+
 	return result
 }
 
diff --git a/mk2rbc/mk2rbc_test.go b/mk2rbc/mk2rbc_test.go
index 2b447e3..9c2b392 100644
--- a/mk2rbc/mk2rbc_test.go
+++ b/mk2rbc/mk2rbc_test.go
@@ -197,15 +197,31 @@
 		mkname: "path/product.mk",
 		in: `
 $(call inherit-product, */font.mk)
+$(call inherit-product, $(sort $(wildcard */font.mk)))
+$(call inherit-product, $(wildcard */font.mk))
+
+include */font.mk
+include $(sort $(wildcard */font.mk))
+include $(wildcard */font.mk)
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
-load("//foo:font.star", _font_init = "init")
-load("//bar:font.star", _font1_init = "init")
+load("//bar:font.star", _font_init = "init")
+load("//foo:font.star", _font1_init = "init")
 
 def init(g, handle):
   cfg = rblf.cfg(handle)
-  rblf.inherit(handle, "foo/font", _font_init)
-  rblf.inherit(handle, "bar/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  rblf.inherit(handle, "bar/font", _font_init)
+  rblf.inherit(handle, "foo/font", _font1_init)
+  _font_init(g, handle)
+  _font1_init(g, handle)
+  _font_init(g, handle)
+  _font1_init(g, handle)
+  _font_init(g, handle)
+  _font1_init(g, handle)
 `,
 	},
 	{
@@ -1022,12 +1038,13 @@
 `,
 	},
 	{
-		desc:   "strip function",
+		desc:   "strip/sort functions",
 		mkname: "product.mk",
 		in: `
 ifeq ($(filter hwaddress,$(PRODUCT_PACKAGES)),)
    PRODUCT_PACKAGES := $(strip $(PRODUCT_PACKAGES) hwaddress)
 endif
+MY_VAR := $(sort b a c)
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -1036,6 +1053,7 @@
   if "hwaddress" not in cfg.get("PRODUCT_PACKAGES", []):
     rblf.setdefault(handle, "PRODUCT_PACKAGES")
     cfg["PRODUCT_PACKAGES"] = (rblf.mkstrip("%s hwaddress" % " ".join(cfg.get("PRODUCT_PACKAGES", [])))).split()
+  g["MY_VAR"] = rblf.mksort("b a c")
 `,
 	},
 	{
@@ -1313,6 +1331,11 @@
 FOREACH_WITH_IF := $(foreach module,\
   $(BOOT_KERNEL_MODULES_LIST),\
   $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
+
+# Same as above, but not assigning it to a variable allows it to be converted to statements
+$(foreach module,\
+  $(BOOT_KERNEL_MODULES_LIST),\
+  $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -1324,6 +1347,10 @@
   g["BOOT_KERNEL_MODULES_LIST"] += ["bar.ko"]
   g["BOOT_KERNEL_MODULES_FILTER_2"] = ["%%/%s" % m for m in g["BOOT_KERNEL_MODULES_LIST"]]
   g["FOREACH_WITH_IF"] = [("" if rblf.filter(module, "foo.ko") else rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)) for module in g["BOOT_KERNEL_MODULES_LIST"]]
+  # Same as above, but not assigning it to a variable allows it to be converted to statements
+  for module in g["BOOT_KERNEL_MODULES_LIST"]:
+    if not rblf.filter(module, "foo.ko"):
+      rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)
 `,
 	},
 	{
@@ -1474,6 +1501,34 @@
   
 `,
 	},
+	{
+		desc:   "Evals",
+		mkname: "product.mk",
+		in: `
+$(eval)
+$(eval MY_VAR := foo)
+$(eval # This is a test of eval functions)
+$(eval $(TOO_COMPLICATED) := bar)
+$(foreach x,$(MY_LIST_VAR), \
+  $(eval PRODUCT_COPY_FILES += foo/bar/$(x):$(TARGET_COPY_OUT_VENDOR)/etc/$(x)) \
+  $(if $(MY_OTHER_VAR),$(eval PRODUCT_COPY_FILES += $(MY_OTHER_VAR):foo/bar/$(x))) \
+)
+
+`,
+		expected: `load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  g["MY_VAR"] = "foo"
+  # This is a test of eval functions
+  rblf.mk2rbc_error("product.mk:5", "Eval expression too complex; only assignments and comments are supported")
+  for x in rblf.words(g.get("MY_LIST_VAR", "")):
+    rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+    cfg["PRODUCT_COPY_FILES"] += ("foo/bar/%s:%s/etc/%s" % (x, g.get("TARGET_COPY_OUT_VENDOR", ""), x)).split()
+    if g.get("MY_OTHER_VAR", ""):
+      cfg["PRODUCT_COPY_FILES"] += ("%s:foo/bar/%s" % (g.get("MY_OTHER_VAR", ""), x)).split()
+`,
+	},
 }
 
 var known_variables = []struct {
diff --git a/mk2rbc/node.go b/mk2rbc/node.go
index 9d5af91..c0c4c98 100644
--- a/mk2rbc/node.go
+++ b/mk2rbc/node.go
@@ -294,3 +294,28 @@
 		ssCase.emit(gctx)
 	}
 }
+
+type foreachNode struct {
+	varName string
+	list    starlarkExpr
+	actions []starlarkNode
+}
+
+func (f *foreachNode) emit(gctx *generationContext) {
+	gctx.newLine()
+	gctx.writef("for %s in ", f.varName)
+	f.list.emit(gctx)
+	gctx.write(":")
+	gctx.indentLevel++
+	hasStatements := false
+	for _, a := range f.actions {
+		if _, ok := a.(*commentNode); !ok {
+			hasStatements = true
+		}
+		a.emit(gctx)
+	}
+	if !hasStatements {
+		gctx.emitPass()
+	}
+	gctx.indentLevel--
+}
diff --git a/provenance/Android.bp b/provenance/Android.bp
new file mode 100644
index 0000000..6fd67aa
--- /dev/null
+++ b/provenance/Android.bp
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+    name: "soong-provenance",
+    pkgPath: "android/soong/provenance",
+    srcs: [
+        "provenance_singleton.go",
+    ],
+    deps: [
+        "soong-android",
+    ],
+    testSrcs: [
+        "provenance_singleton_test.go",
+    ],
+    pluginFor: [
+        "soong_build",
+    ],
+}
diff --git a/provenance/provenance_metadata_proto/Android.bp b/provenance/provenance_metadata_proto/Android.bp
new file mode 100644
index 0000000..7fc47a9
--- /dev/null
+++ b/provenance/provenance_metadata_proto/Android.bp
@@ -0,0 +1,34 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_library_host {
+    name: "provenance_metadata_proto",
+    version: {
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+        "provenance_metadata.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
diff --git a/provenance/provenance_metadata_proto/provenance_metadata.proto b/provenance/provenance_metadata_proto/provenance_metadata.proto
new file mode 100644
index 0000000..f42aba7
--- /dev/null
+++ b/provenance/provenance_metadata_proto/provenance_metadata.proto
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+syntax = "proto3";
+
+package provenance_metadata_proto;
+option go_package = "android/soong/provenance/provenance_metadata_proto";
+
+// Provenance metadata of artifacts.
+message ProvenanceMetadata {
+  // Name of the module/target that creates the artifact.
+  // It is either a Soong module name or Bazel target label.
+  string module_name = 1;
+
+  // The path to the prebuilt artifacts, which is relative to the source tree
+  // directory. For example, “prebuilts/runtime/mainline/i18n/apex/com.android.i18n-arm.apex”.
+  string artifact_path = 2;
+
+  // The SHA256 hash of the artifact.
+  string artifact_sha256 = 3;
+
+  // The install path of the artifact in filesystem images.
+  // This is the absolute path of the artifact on the device.
+  string artifact_install_path = 4;
+
+  // Path of the attestation file of a prebuilt artifact, which is relative to
+  // the source tree directory. This is for prebuilt artifacts which have
+  // corresponding attestation files checked in the source tree.
+  string attestation_path = 5;
+}
+
+message ProvenanceMetaDataList {
+  repeated ProvenanceMetadata metadata = 1;
+}
\ No newline at end of file
diff --git a/provenance/provenance_singleton.go b/provenance/provenance_singleton.go
new file mode 100644
index 0000000..ae96e1f
--- /dev/null
+++ b/provenance/provenance_singleton.go
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package provenance
+
+import (
+	"android/soong/android"
+	"github.com/google/blueprint"
+)
+
+var (
+	pctx = android.NewPackageContext("android/soong/provenance")
+	rule = pctx.HostBinToolVariable("gen_provenance_metadata", "gen_provenance_metadata")
+
+	genProvenanceMetaData = pctx.AndroidStaticRule("genProvenanceMetaData",
+		blueprint.RuleParams{
+			Command: `rm -rf "$out" && ` +
+				`${gen_provenance_metadata} --module_name=${module_name} ` +
+				`--artifact_path=$in --install_path=${install_path} --metadata_path=$out`,
+			CommandDeps: []string{"${gen_provenance_metadata}"},
+		}, "module_name", "install_path")
+
+	mergeProvenanceMetaData = pctx.AndroidStaticRule("mergeProvenanceMetaData",
+		blueprint.RuleParams{
+			Command: `rm -rf $out $out.temp && ` +
+				`echo -e "# proto-file: build/soong/provenance/proto/provenance_metadata.proto\n# proto-message: ProvenanceMetaDataList" > $out && ` +
+				`touch $out.temp && cat $out.temp $in | grep -v "^#.*" >> $out && rm -rf $out.temp`,
+		})
+)
+
+type ProvenanceMetadata interface {
+	ProvenanceMetaDataFile() android.OutputPath
+}
+
+func init() {
+	RegisterProvenanceSingleton(android.InitRegistrationContext)
+}
+
+func RegisterProvenanceSingleton(ctx android.RegistrationContext) {
+	ctx.RegisterSingletonType("provenance_metadata_singleton", provenanceInfoSingletonFactory)
+}
+
+var PrepareForTestWithProvenanceSingleton = android.FixtureRegisterWithContext(RegisterProvenanceSingleton)
+
+func provenanceInfoSingletonFactory() android.Singleton {
+	return &provenanceInfoSingleton{}
+}
+
+type provenanceInfoSingleton struct {
+}
+
+func (b *provenanceInfoSingleton) GenerateBuildActions(context android.SingletonContext) {
+	allMetaDataFiles := make([]android.Path, 0)
+	context.VisitAllModulesIf(moduleFilter, func(module android.Module) {
+		if p, ok := module.(ProvenanceMetadata); ok {
+			allMetaDataFiles = append(allMetaDataFiles, p.ProvenanceMetaDataFile())
+		}
+	})
+	mergedMetaDataFile := android.PathForOutput(context, "provenance_metadata.textproto")
+	context.Build(pctx, android.BuildParams{
+		Rule:        mergeProvenanceMetaData,
+		Description: "merge provenance metadata",
+		Inputs:      allMetaDataFiles,
+		Output:      mergedMetaDataFile,
+	})
+
+	context.Build(pctx, android.BuildParams{
+		Rule:        blueprint.Phony,
+		Description: "phony rule of merge provenance metadata",
+		Inputs:      []android.Path{mergedMetaDataFile},
+		Output:      android.PathForPhony(context, "provenance_metadata"),
+	})
+}
+
+func moduleFilter(module android.Module) bool {
+	if !module.Enabled() || module.IsSkipInstall() {
+		return false
+	}
+	if p, ok := module.(ProvenanceMetadata); ok {
+		return p.ProvenanceMetaDataFile().String() != ""
+	}
+	return false
+}
+
+func GenerateArtifactProvenanceMetaData(ctx android.ModuleContext, artifactPath android.Path, installedFile android.InstallPath) android.OutputPath {
+	onDevicePathOfInstalledFile := android.InstallPathToOnDevicePath(ctx, installedFile)
+	artifactMetaDataFile := android.PathForIntermediates(ctx, "provenance_metadata", ctx.ModuleDir(), ctx.ModuleName(), "provenance_metadata.textproto")
+	ctx.Build(pctx, android.BuildParams{
+		Rule:        genProvenanceMetaData,
+		Description: "generate artifact provenance metadata",
+		Inputs:      []android.Path{artifactPath},
+		Output:      artifactMetaDataFile,
+		Args: map[string]string{
+			"module_name":  ctx.ModuleName(),
+			"install_path": onDevicePathOfInstalledFile,
+		}})
+
+	return artifactMetaDataFile
+}
diff --git a/provenance/provenance_singleton_test.go b/provenance/provenance_singleton_test.go
new file mode 100644
index 0000000..0f1eae2
--- /dev/null
+++ b/provenance/provenance_singleton_test.go
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package provenance
+
+import (
+	"strings"
+	"testing"
+
+	"android/soong/android"
+)
+
+func TestProvenanceSingleton(t *testing.T) {
+	result := android.GroupFixturePreparers(
+		PrepareForTestWithProvenanceSingleton,
+		android.PrepareForTestWithAndroidMk).RunTestWithBp(t, "")
+
+	outputs := result.SingletonForTests("provenance_metadata_singleton").AllOutputs()
+	for _, output := range outputs {
+		testingBuildParam := result.SingletonForTests("provenance_metadata_singleton").Output(output)
+		switch {
+		case strings.Contains(output, "soong/provenance_metadata.textproto"):
+			android.AssertStringEquals(t, "Invalid build rule", "android/soong/provenance.mergeProvenanceMetaData", testingBuildParam.Rule.String())
+			android.AssertIntEquals(t, "Invalid input", len(testingBuildParam.Inputs), 0)
+			android.AssertStringDoesContain(t, "Invalid output path", output, "soong/provenance_metadata.textproto")
+			android.AssertIntEquals(t, "Invalid args", len(testingBuildParam.Args), 0)
+
+		case strings.HasSuffix(output, "provenance_metadata"):
+			android.AssertStringEquals(t, "Invalid build rule", "<builtin>:phony", testingBuildParam.Rule.String())
+			android.AssertStringEquals(t, "Invalid input", testingBuildParam.Inputs[0].String(), "out/soong/provenance_metadata.textproto")
+			android.AssertStringEquals(t, "Invalid output path", output, "provenance_metadata")
+			android.AssertIntEquals(t, "Invalid args", len(testingBuildParam.Args), 0)
+		}
+	}
+}
diff --git a/provenance/tools/Android.bp b/provenance/tools/Android.bp
new file mode 100644
index 0000000..0eddd76
--- /dev/null
+++ b/provenance/tools/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+    name: "gen_provenance_metadata",
+    srcs: [
+        "gen_provenance_metadata.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "provenance_metadata_proto",
+        "libprotobuf-python",
+    ],
+}
+
+python_test_host {
+    name: "gen_provenance_metadata_test",
+    main: "gen_provenance_metadata_test.py",
+    srcs: [
+        "gen_provenance_metadata_test.py",
+    ],
+    data: [
+        ":gen_provenance_metadata",
+    ],
+    libs: [
+        "provenance_metadata_proto",
+        "libprotobuf-python",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/provenance/tools/gen_provenance_metadata.py b/provenance/tools/gen_provenance_metadata.py
new file mode 100644
index 0000000..b33f911
--- /dev/null
+++ b/provenance/tools/gen_provenance_metadata.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import hashlib
+import sys
+
+import google.protobuf.text_format as text_format
+import provenance_metadata_pb2
+
+def Log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+def ParseArgs(argv):
+  parser = argparse.ArgumentParser(description='Create provenance metadata for a prebuilt artifact')
+  parser.add_argument('-v', '--verbose', action='store_true', help='Print more information in execution')
+  parser.add_argument('--module_name', help='Module name', required=True)
+  parser.add_argument('--artifact_path', help='Relative path of the prebuilt artifact in source tree', required=True)
+  parser.add_argument('--install_path', help='Absolute path of the artifact in the filesystem images', required=True)
+  parser.add_argument('--metadata_path', help='Path of the provenance metadata file created for the artifact', required=True)
+  return parser.parse_args(argv)
+
+def main(argv):
+  global args
+  args = ParseArgs(argv)
+  Log("Args:", vars(args))
+
+  provenance_metadata = provenance_metadata_pb2.ProvenanceMetadata()
+  provenance_metadata.module_name = args.module_name
+  provenance_metadata.artifact_path = args.artifact_path
+  provenance_metadata.artifact_install_path = args.install_path
+
+  Log("Generating SHA256 hash")
+  h = hashlib.sha256()
+  with open(args.artifact_path, "rb") as artifact_file:
+    h.update(artifact_file.read())
+  provenance_metadata.artifact_sha256 = h.hexdigest()
+
+  text_proto = [
+      "# proto-file: build/soong/provenance/proto/provenance_metadata.proto",
+      "# proto-message: ProvenanceMetaData",
+      "",
+      text_format.MessageToString(provenance_metadata)
+  ]
+  with open(args.metadata_path, "wt") as metadata_file:
+    file_content = "\n".join(text_proto)
+    Log("Writing provenance metadata in textproto:", file_content)
+    metadata_file.write(file_content)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/provenance/tools/gen_provenance_metadata_test.py b/provenance/tools/gen_provenance_metadata_test.py
new file mode 100644
index 0000000..2fc04bf
--- /dev/null
+++ b/provenance/tools/gen_provenance_metadata_test.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import logging
+import os
+import subprocess
+import tempfile
+import unittest
+
+import google.protobuf.text_format as text_format
+import provenance_metadata_pb2
+
+logger = logging.getLogger(__name__)
+
+def run(args, verbose=None, **kwargs):
+  """Creates and returns a subprocess.Popen object.
+
+  Args:
+    args: The command represented as a list of strings.
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
+    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+        stdin, etc. stdout and stderr will default to subprocess.PIPE and
+        subprocess.STDOUT respectively unless caller specifies any of them.
+        universal_newlines will default to True, as most of the users in
+        releasetools expect string output.
+
+  Returns:
+    A subprocess.Popen object.
+  """
+  if 'stdout' not in kwargs and 'stderr' not in kwargs:
+    kwargs['stdout'] = subprocess.PIPE
+    kwargs['stderr'] = subprocess.STDOUT
+  if 'universal_newlines' not in kwargs:
+    kwargs['universal_newlines'] = True
+  if verbose:
+    logger.info("  Running: \"%s\"", " ".join(args))
+  return subprocess.Popen(args, **kwargs)
+
+
+def run_and_check_output(args, verbose=None, **kwargs):
+  """Runs the given command and returns the output.
+
+  Args:
+    args: The command represented as a list of strings.
+    verbose: Whether the commands should be shown. Default to the global
+        verbosity if unspecified.
+    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+        stdin, etc. stdout and stderr will default to subprocess.PIPE and
+        subprocess.STDOUT respectively unless caller specifies any of them.
+
+  Returns:
+    The output string.
+
+  Raises:
+    ExternalError: On non-zero exit from the command.
+  """
+  proc = run(args, verbose=verbose, **kwargs)
+  output, _ = proc.communicate()
+  if output is None:
+    output = ""
+  if verbose:
+    logger.info("%s", output.rstrip())
+  if proc.returncode != 0:
+    raise RuntimeError(
+        "Failed to run command '{}' (exit code {}):\n{}".format(
+            args, proc.returncode, output))
+  return output
+
+def run_host_command(args, verbose=None, **kwargs):
+  host_build_top = os.environ.get("ANDROID_BUILD_TOP")
+  if host_build_top:
+    host_command_dir = os.path.join(host_build_top, "out/host/linux-x86/bin")
+    args[0] = os.path.join(host_command_dir, args[0])
+  return run_and_check_output(args, verbose, **kwargs)
+
+def sha256(s):
+  h = hashlib.sha256()
+  h.update(bytearray(s, 'utf-8'))
+  return h.hexdigest()
+
+class ProvenanceMetaDataToolTest(unittest.TestCase):
+
+  def test_gen_provenance_metadata(self):
+    artifact_content = "test artifact"
+    artifact_file = tempfile.mktemp()
+    with open(artifact_file,"wt") as f:
+      f.write(artifact_content)
+    metadata_file = tempfile.mktemp()
+    cmd = ["gen_provenance_metadata"]
+    cmd.extend(["--module_name", "a"])
+    cmd.extend(["--artifact_path", artifact_file])
+    cmd.extend(["--install_path", "b"])
+    cmd.extend(["--metadata_path", metadata_file])
+    output = run_host_command(cmd)
+    self.assertEqual(output, "")
+
+    with open(metadata_file,"rt") as f:
+      data = f.read()
+      provenance_metadata = provenance_metadata_pb2.ProvenanceMetadata()
+      text_format.Parse(data, provenance_metadata)
+      self.assertEqual(provenance_metadata.module_name, "a")
+      self.assertEqual(provenance_metadata.artifact_path, artifact_file)
+      self.assertEqual(provenance_metadata.artifact_install_path, "b")
+      self.assertEqual(provenance_metadata.artifact_sha256, sha256(artifact_content))
+
+    os.remove(artifact_file)
+    os.remove(metadata_file)
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
\ No newline at end of file
diff --git a/rust/config/allowed_list.go b/rust/config/allowed_list.go
index 30700dd..802e1da 100644
--- a/rust/config/allowed_list.go
+++ b/rust/config/allowed_list.go
@@ -28,6 +28,7 @@
 		"prebuilts/rust",
 		"system/core/debuggerd/rust",
 		"system/core/libstats/pull_rust",
+		"system/core/trusty/libtrusty-rs",
 		"system/extras/profcollectd",
 		"system/extras/simpleperf",
 		"system/hardware/interfaces/keystore2",
diff --git a/scripts/hiddenapi/verify_overlaps.py b/scripts/hiddenapi/verify_overlaps.py
index e5214df..940532b 100755
--- a/scripts/hiddenapi/verify_overlaps.py
+++ b/scripts/hiddenapi/verify_overlaps.py
@@ -23,13 +23,13 @@
 from signature_trie import signature_trie
 
 
-def dict_reader(csvfile):
+def dict_reader(csv_file):
     return csv.DictReader(
-        csvfile, delimiter=",", quotechar="|", fieldnames=["signature"])
+        csv_file, delimiter=",", quotechar="|", fieldnames=["signature"])
 
 
 def read_flag_trie_from_file(file):
-    with open(file, "r") as stream:
+    with open(file, "r", encoding="utf8") as stream:
         return read_flag_trie_from_stream(stream)
 
 
@@ -43,24 +43,24 @@
 
 
 def extract_subset_from_monolithic_flags_as_dict_from_file(
-        monolithicTrie, patternsFile):
+        monolithic_trie, patterns_file):
     """Extract a subset of flags from the dict of monolithic flags.
 
-    :param monolithicFlagsDict: the dict containing all the monolithic flags.
-    :param patternsFile: a file containing a list of signature patterns that
+    :param monolithic_trie: the trie containing all the monolithic flags.
+    :param patterns_file: a file containing a list of signature patterns that
     define the subset.
     :return: the dict from signature to row.
     """
-    with open(patternsFile, "r") as stream:
+    with open(patterns_file, "r", encoding="utf8") as stream:
         return extract_subset_from_monolithic_flags_as_dict_from_stream(
-            monolithicTrie, stream)
+            monolithic_trie, stream)
 
 
 def extract_subset_from_monolithic_flags_as_dict_from_stream(
-        monolithicTrie, stream):
+        monolithic_trie, stream):
     """Extract a subset of flags from the trie of monolithic flags.
 
-    :param monolithicTrie: the trie containing all the monolithic flags.
+    :param monolithic_trie: the trie containing all the monolithic flags.
     :param stream: a stream containing a list of signature patterns that define
     the subset.
     :return: the dict from signature to row.
@@ -68,7 +68,7 @@
     dict_signature_to_row = {}
     for pattern in stream:
         pattern = pattern.rstrip()
-        rows = monolithicTrie.get_matching_rows(pattern)
+        rows = monolithic_trie.get_matching_rows(pattern)
         for row in rows:
             signature = row["signature"]
             dict_signature_to_row[signature] = row
@@ -93,86 +93,90 @@
     return dict_signature_to_row
 
 
-def read_signature_csv_from_file_as_dict(csvFile):
+def read_signature_csv_from_file_as_dict(csv_file):
     """Read the csvFile into a dict.
 
     The first column is assumed to be the signature and used as the
     key.
 
     The whole row is stored as the value.
-    :param csvFile: the csv file to read
+    :param csv_file: the csv file to read
     :return: the dict from signature to row.
     """
-    with open(csvFile, "r") as f:
+    with open(csv_file, "r", encoding="utf8") as f:
         return read_signature_csv_from_stream_as_dict(f)
 
 
-def compare_signature_flags(monolithicFlagsDict, modularFlagsDict):
+def compare_signature_flags(monolithic_flags_dict, modular_flags_dict):
     """Compare the signature flags between the two dicts.
 
-    :param monolithicFlagsDict: the dict containing the subset of the monolithic
-    flags that should be equal to the modular flags.
-    :param modularFlagsDict:the dict containing the flags produced by a single
+    :param monolithic_flags_dict: the dict containing the subset of the
+    monolithic flags that should be equal to the modular flags.
+    :param modular_flags_dict:the dict containing the flags produced by a single
     bootclasspath_fragment module.
     :return: list of mismatches., each mismatch is a tuple where the first item
     is the signature, and the second and third items are lists of the flags from
     modular dict, and monolithic dict respectively.
     """
-    mismatchingSignatures = []
+    mismatching_signatures = []
     # Create a sorted set of all the signatures from both the monolithic and
     # modular dicts.
-    allSignatures = sorted(
-        set(chain(monolithicFlagsDict.keys(), modularFlagsDict.keys())))
-    for signature in allSignatures:
-        monolithicRow = monolithicFlagsDict.get(signature, {})
-        monolithicFlags = monolithicRow.get(None, [])
-        if signature in modularFlagsDict:
-            modularRow = modularFlagsDict.get(signature, {})
-            modularFlags = modularRow.get(None, [])
+    all_signatures = sorted(
+        set(chain(monolithic_flags_dict.keys(), modular_flags_dict.keys())))
+    for signature in all_signatures:
+        monolithic_row = monolithic_flags_dict.get(signature, {})
+        monolithic_flags = monolithic_row.get(None, [])
+        if signature in modular_flags_dict:
+            modular_row = modular_flags_dict.get(signature, {})
+            modular_flags = modular_row.get(None, [])
         else:
-            modularFlags = ["blocked"]
-        if monolithicFlags != modularFlags:
-            mismatchingSignatures.append(
-                (signature, modularFlags, monolithicFlags))
-    return mismatchingSignatures
+            modular_flags = ["blocked"]
+        if monolithic_flags != modular_flags:
+            mismatching_signatures.append(
+                (signature, modular_flags, monolithic_flags))
+    return mismatching_signatures
 
 
 def main(argv):
     args_parser = argparse.ArgumentParser(
         description="Verify that sets of hidden API flags are each a subset of "
         "the monolithic flag file.")
-    args_parser.add_argument("monolithicFlags", help="The monolithic flag file")
     args_parser.add_argument(
-        "modularFlags",
-        nargs=argparse.REMAINDER,
-        help="Flags produced by individual bootclasspath_fragment modules")
+        "--monolithic-flags", help="The monolithic flag file")
+    args_parser.add_argument(
+        "--module-flags",
+        action="append",
+        help="A colon separated pair of paths. The first is a path to a "
+        "filtered set of flags, and the second is a path to a set of "
+        "signature patterns that identify the set of classes belonging to "
+        "a single bootclasspath_fragment module, ")
     args = args_parser.parse_args(argv[1:])
 
     # Read in all the flags into the trie
-    monolithicFlagsPath = args.monolithicFlags
-    monolithicTrie = read_flag_trie_from_file(monolithicFlagsPath)
+    monolithic_flags_path = args.monolithic_flags
+    monolithic_trie = read_flag_trie_from_file(monolithic_flags_path)
 
     # For each subset specified on the command line, create dicts for the flags
     # provided by the subset and the corresponding flags from the complete set
     # of flags and compare them.
     failed = False
-    for modularPair in args.modularFlags:
-        parts = modularPair.split(":")
-        modularFlagsPath = parts[0]
-        modularPatternsPath = parts[1]
-        modularFlagsDict = read_signature_csv_from_file_as_dict(
-            modularFlagsPath)
-        monolithicFlagsSubsetDict = \
+    for modular_pair in args.module_flags:
+        parts = modular_pair.split(":")
+        modular_flags_path = parts[0]
+        modular_patterns_path = parts[1]
+        modular_flags_dict = read_signature_csv_from_file_as_dict(
+            modular_flags_path)
+        monolithic_flags_subset_dict = \
             extract_subset_from_monolithic_flags_as_dict_from_file(
-            monolithicTrie, modularPatternsPath)
-        mismatchingSignatures = compare_signature_flags(
-            monolithicFlagsSubsetDict, modularFlagsDict)
-        if mismatchingSignatures:
+                monolithic_trie, modular_patterns_path)
+        mismatching_signatures = compare_signature_flags(
+            monolithic_flags_subset_dict, modular_flags_dict)
+        if mismatching_signatures:
             failed = True
             print("ERROR: Hidden API flags are inconsistent:")
-            print("< " + modularFlagsPath)
-            print("> " + monolithicFlagsPath)
-            for mismatch in mismatchingSignatures:
+            print("< " + modular_flags_path)
+            print("> " + monolithic_flags_path)
+            for mismatch in mismatching_signatures:
                 signature = mismatch[0]
                 print()
                 print("< " + ",".join([signature] + mismatch[1]))
diff --git a/scripts/hiddenapi/verify_overlaps_test.py b/scripts/hiddenapi/verify_overlaps_test.py
index 8cf2959..ead8a4e 100755
--- a/scripts/hiddenapi/verify_overlaps_test.py
+++ b/scripts/hiddenapi/verify_overlaps_test.py
@@ -17,24 +17,26 @@
 import io
 import unittest
 
-from verify_overlaps import *  #pylint: disable=unused-wildcard-import,wildcard-import
+import verify_overlaps as vo
 
 
-#pylint: disable=line-too-long
 class TestDetectOverlaps(unittest.TestCase):
 
-    def read_flag_trie_from_string(self, csvdata):
+    @staticmethod
+    def read_flag_trie_from_string(csvdata):
         with io.StringIO(csvdata) as f:
-            return read_flag_trie_from_stream(f)
+            return vo.read_flag_trie_from_stream(f)
 
-    def read_signature_csv_from_string_as_dict(self, csvdata):
+    @staticmethod
+    def read_signature_csv_from_string_as_dict(csvdata):
         with io.StringIO(csvdata) as f:
-            return read_signature_csv_from_stream_as_dict(f)
+            return vo.read_signature_csv_from_stream_as_dict(f)
 
+    @staticmethod
     def extract_subset_from_monolithic_flags_as_dict_from_string(
-            self, monolithic, patterns):
+            monolithic, patterns):
         with io.StringIO(patterns) as f:
-            return extract_subset_from_monolithic_flags_as_dict_from_stream(
+            return vo.extract_subset_from_monolithic_flags_as_dict_from_stream(
                 monolithic, f)
 
     extractInput = """
@@ -50,14 +52,14 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'Ljava/lang/Object;->hashCode()I'
+        patterns = "Ljava/lang/Object;->hashCode()I"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
         }
         self.assertEqual(expected, subset)
@@ -66,18 +68,18 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Object'
+        patterns = "java/lang/Object"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
             },
         }
         self.assertEqual(expected, subset)
@@ -86,20 +88,20 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Character'
+        patterns = "java/lang/Character"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
+            },
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
         }
         self.assertEqual(expected, subset)
@@ -108,17 +110,17 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/Character$UnicodeScript'
+        patterns = "java/lang/Character$UnicodeScript"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
+            },
         }
         self.assertEqual(expected, subset)
 
@@ -126,32 +128,32 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/lang/*'
+        patterns = "java/lang/*"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
             },
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
+            },
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
             },
         }
         self.assertEqual(expected, subset)
@@ -160,36 +162,36 @@
         monolithic = self.read_flag_trie_from_string(
             TestDetectOverlaps.extractInput)
 
-        patterns = 'java/**'
+        patterns = "java/**"
 
         subset = self.extract_subset_from_monolithic_flags_as_dict_from_string(
             monolithic, patterns)
         expected = {
-            'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;':
-                {
-                    None: ['blocked'],
-                    'signature':
-                        'Ljava/lang/Character$UnicodeScript;->of(I)Ljava/lang/Character$UnicodeScript;',
-                },
-            'Ljava/lang/Character;->serialVersionUID:J': {
-                None: ['sdk'],
-                'signature': 'Ljava/lang/Character;->serialVersionUID:J',
+            "Ljava/lang/Character$UnicodeScript;"
+            "->of(I)Ljava/lang/Character$UnicodeScript;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Character$UnicodeScript;"
+                             "->of(I)Ljava/lang/Character$UnicodeScript;",
             },
-            'Ljava/lang/Object;->hashCode()I': {
-                None: ['public-api', 'system-api', 'test-api'],
-                'signature': 'Ljava/lang/Object;->hashCode()I',
+            "Ljava/lang/Character;->serialVersionUID:J": {
+                None: ["sdk"],
+                "signature": "Ljava/lang/Character;->serialVersionUID:J",
             },
-            'Ljava/lang/Object;->toString()Ljava/lang/String;': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/Object;->toString()Ljava/lang/String;',
+            "Ljava/lang/Object;->hashCode()I": {
+                None: ["public-api", "system-api", "test-api"],
+                "signature": "Ljava/lang/Object;->hashCode()I",
             },
-            'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V',
+            "Ljava/lang/Object;->toString()Ljava/lang/String;": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/Object;->toString()Ljava/lang/String;",
             },
-            'Ljava/util/zip/ZipFile;-><clinit>()V': {
-                None: ['blocked'],
-                'signature': 'Ljava/util/zip/ZipFile;-><clinit>()V',
+            "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/lang/ProcessBuilder$Redirect$1;-><init>()V",
+            },
+            "Ljava/util/zip/ZipFile;-><clinit>()V": {
+                None: ["blocked"],
+                "signature": "Ljava/util/zip/ZipFile;-><clinit>()V",
             },
         }
         self.assertEqual(expected, subset)
@@ -200,7 +202,7 @@
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 Ljava/lang/Object;->hashCode()I,blocked
 """)
-        self.assertTrue('Duplicate signature: Ljava/lang/Object;->hashCode()I'
+        self.assertTrue("Duplicate signature: Ljava/lang/Object;->hashCode()I"
                         in str(context.exception))
 
     def test_read_trie_missing_member(self):
@@ -209,8 +211,8 @@
 Ljava/lang/Object,public-api,system-api,test-api
 """)
         self.assertTrue(
-            'Invalid signature: Ljava/lang/Object, does not identify a specific member'
-            in str(context.exception))
+            "Invalid signature: Ljava/lang/Object, "
+            "does not identify a specific member" in str(context.exception))
 
     def test_match(self):
         monolithic = self.read_signature_csv_from_string_as_dict("""
@@ -219,7 +221,7 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = []
         self.assertEqual(expected, mismatches)
 
@@ -230,12 +232,12 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
-                ['public-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
+                ["public-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -247,12 +249,12 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
-                ['blocked'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
+                ["blocked"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -264,26 +266,26 @@
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,blocked
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['blocked'],
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["blocked"],
+                ["public-api", "system-api", "test-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
 
     def test_match_treat_missing_from_modular_as_blocked(self):
-        monolithic = self.read_signature_csv_from_string_as_dict('')
+        monolithic = self.read_signature_csv_from_string_as_dict("")
         modular = self.read_signature_csv_from_string_as_dict("""
 Ljava/lang/Object;->toString()Ljava/lang/String;,public-api,system-api,test-api
 """)
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = [
             (
-                'Ljava/lang/Object;->toString()Ljava/lang/String;',
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->toString()Ljava/lang/String;",
+                ["public-api", "system-api", "test-api"],
                 [],
             ),
         ]
@@ -294,12 +296,12 @@
 Ljava/lang/Object;->hashCode()I,public-api,system-api,test-api
 """)
         modular = {}
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = [
             (
-                'Ljava/lang/Object;->hashCode()I',
-                ['blocked'],
-                ['public-api', 'system-api', 'test-api'],
+                "Ljava/lang/Object;->hashCode()I",
+                ["blocked"],
+                ["public-api", "system-api", "test-api"],
             ),
         ]
         self.assertEqual(expected, mismatches)
@@ -309,12 +311,10 @@
 Ljava/lang/Object;->hashCode()I,blocked
 """)
         modular = {}
-        mismatches = compare_signature_flags(monolithic, modular)
+        mismatches = vo.compare_signature_flags(monolithic, modular)
         expected = []
         self.assertEqual(expected, mismatches)
 
 
-#pylint: enable=line-too-long
-
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main(verbosity=2)
diff --git a/sdk/testing.go b/sdk/testing.go
index 294f1a5..062f200 100644
--- a/sdk/testing.go
+++ b/sdk/testing.go
@@ -25,6 +25,8 @@
 	"android/soong/cc"
 	"android/soong/genrule"
 	"android/soong/java"
+
+	"github.com/google/blueprint/proptools"
 )
 
 // Prepare for running an sdk test with an apex.
@@ -81,6 +83,11 @@
 		}
 	}),
 
+	// Add a build number file.
+	android.FixtureModifyProductVariables(func(variables android.FixtureProductVariables) {
+		variables.BuildNumberFile = proptools.StringPtr(BUILD_NUMBER_FILE)
+	}),
+
 	// Make sure that every test provides all the source files.
 	android.PrepareForTestDisallowNonExistentPaths,
 	android.MockFS{
@@ -143,6 +150,8 @@
 	copyRules := &strings.Builder{}
 	otherCopyRules := &strings.Builder{}
 	snapshotDirPrefix := sdk.builderForTests.snapshotDir.String() + "/"
+
+	seenBuildNumberFile := false
 	for _, bp := range buildParams {
 		switch bp.Rule.String() {
 		case android.Cp.String():
@@ -152,8 +161,14 @@
 			src := android.NormalizePathForTesting(bp.Input)
 			// We differentiate between copy rules for the snapshot, and copy rules for the install file.
 			if strings.HasPrefix(output.String(), snapshotDirPrefix) {
-				// Get source relative to build directory.
-				_, _ = fmt.Fprintf(copyRules, "%s -> %s\n", src, dest)
+				// Don't include the build-number.txt file in the copy rules as that would break lots of
+				// tests, just verify that it is copied here as it should appear in every snapshot.
+				if output.Base() == BUILD_NUMBER_FILE {
+					seenBuildNumberFile = true
+				} else {
+					// Get source relative to build directory.
+					_, _ = fmt.Fprintf(copyRules, "%s -> %s\n", src, dest)
+				}
 				info.snapshotContents = append(info.snapshotContents, dest)
 			} else {
 				_, _ = fmt.Fprintf(otherCopyRules, "%s -> %s\n", src, dest)
@@ -189,6 +204,10 @@
 		}
 	}
 
+	if !seenBuildNumberFile {
+		panic(fmt.Sprintf("Every snapshot must include the %s file", BUILD_NUMBER_FILE))
+	}
+
 	info.copyRules = copyRules.String()
 	info.otherCopyRules = otherCopyRules.String()
 
diff --git a/sdk/update.go b/sdk/update.go
index 389e845..5db604b 100644
--- a/sdk/update.go
+++ b/sdk/update.go
@@ -281,6 +281,10 @@
 	return append(variants, newVariant)
 }
 
+// BUILD_NUMBER_FILE is the name of the file in the snapshot zip that will contain the number of
+// the build from which the snapshot was produced.
+const BUILD_NUMBER_FILE = "snapshot-creation-build-number.txt"
+
 // SDK directory structure
 // <sdk_root>/
 //     Android.bp   : definition of a 'sdk' module is here. This is a hand-made one.
@@ -479,6 +483,9 @@
 
 	bp.build(pctx, ctx, nil)
 
+	// Copy the build number file into the snapshot.
+	builder.CopyToSnapshot(ctx.Config().BuildNumberFile(ctx), BUILD_NUMBER_FILE)
+
 	filesToZip := builder.filesToZip
 
 	// zip them all