Merge "Export provenance metadata for prebuilt APKs and APEXes."
diff --git a/android/bazel.go b/android/bazel.go
index e3fb0a6..af5de12 100644
--- a/android/bazel.go
+++ b/android/bazel.go
@@ -573,6 +573,8 @@
 		"prebuilt_car-ui-androidx-core-common",         // b/224773339, genrule dependency creates an .aar, not a .jar
 		"prebuilt_platform-robolectric-4.4-prebuilt",   // aosp/1999250, needs .aar support in Jars
 		"prebuilt_platform-robolectric-4.5.1-prebuilt", // aosp/1999250, needs .aar support in Jars
+
+		"libtombstoned_client_rust_bridge_code", "libtombstoned_client_wrapper", // rust conversions are not supported
 	}
 
 	// Per-module denylist of cc_library modules to only generate the static
diff --git a/android/neverallow.go b/android/neverallow.go
index f87cebb..aa47bca 100644
--- a/android/neverallow.go
+++ b/android/neverallow.go
@@ -57,6 +57,7 @@
 	AddNeverAllowRules(createUncompressDexRules()...)
 	AddNeverAllowRules(createMakefileGoalRules()...)
 	AddNeverAllowRules(createInitFirstStageRules()...)
+	AddNeverAllowRules(createProhibitFrameworkAccessRules()...)
 }
 
 // Add a NeverAllow rule to the set of rules to apply.
@@ -228,6 +229,15 @@
 	}
 }
 
+func createProhibitFrameworkAccessRules() []Rule {
+	return []Rule{
+		NeverAllow().
+			With("libs", "framework").
+			WithoutMatcher("sdk_version", Regexp("(core_.*|^$)")).
+			Because("framework can't be used when building against SDK"),
+	}
+}
+
 func neverallowMutator(ctx BottomUpMutatorContext) {
 	m, ok := ctx.Module().(Module)
 	if !ok {
diff --git a/android/neverallow_test.go b/android/neverallow_test.go
index 8afe9e0..86f1a37 100644
--- a/android/neverallow_test.go
+++ b/android/neverallow_test.go
@@ -327,6 +327,21 @@
 			"Only boot images may be imported as a makefile goal.",
 		},
 	},
+	// Tests for the rule prohibiting the use of framework
+	{
+		name: "prohibit framework",
+		fs: map[string][]byte{
+			"Android.bp": []byte(`
+				java_library {
+					name: "foo",
+					libs: ["framework"],
+					sdk_version: "current",
+				}`),
+		},
+		expectedErrors: []string{
+			"framework can't be used when building against SDK",
+		},
+	},
 }
 
 var prepareForNeverAllowTest = GroupFixturePreparers(
diff --git a/android/rule_builder.go b/android/rule_builder.go
index 098c1fc..11da36c 100644
--- a/android/rule_builder.go
+++ b/android/rule_builder.go
@@ -101,12 +101,7 @@
 }
 
 // Restat marks the rule as a restat rule, which will be passed to ModuleContext.Rule in BuildParams.Restat.
-//
-// Restat is not compatible with Sbox()
 func (r *RuleBuilder) Restat() *RuleBuilder {
-	if r.sbox {
-		panic("Restat() is not compatible with Sbox()")
-	}
 	r.restat = true
 	return r
 }
@@ -141,8 +136,6 @@
 // point to a location where sbox's manifest will be written and must be outside outputDir. sbox
 // will ensure that all outputs have been written, and will discard any output files that were not
 // specified.
-//
-// Sbox is not compatible with Restat()
 func (r *RuleBuilder) Sbox(outputDir WritablePath, manifestPath WritablePath) *RuleBuilder {
 	if r.sbox {
 		panic("Sbox() may not be called more than once")
@@ -150,9 +143,6 @@
 	if len(r.commands) > 0 {
 		panic("Sbox() may not be called after Command()")
 	}
-	if r.restat {
-		panic("Sbox() is not compatible with Restat()")
-	}
 	r.sbox = true
 	r.outDir = outputDir
 	r.sboxManifestPath = manifestPath
@@ -636,11 +626,14 @@
 				ctx: r.ctx,
 			},
 		}
-		sboxCmd.Text("rm -rf").Output(r.outDir)
-		sboxCmd.Text("&&")
 		sboxCmd.builtToolWithoutDeps("sbox").
-			Flag("--sandbox-path").Text(shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
-			Flag("--manifest").Input(r.sboxManifestPath)
+			FlagWithArg("--sandbox-path ", shared.TempDirForOutDir(PathForOutput(r.ctx).String())).
+			FlagWithArg("--output-dir ", r.outDir.String()).
+			FlagWithInput("--manifest ", r.sboxManifestPath)
+
+		if r.restat {
+			sboxCmd.Flag("--write-if-changed")
+		}
 
 		// Replace the command string, and add the sbox tool and manifest textproto to the
 		// dependencies of the final sbox rule.
diff --git a/android/rule_builder_test.go b/android/rule_builder_test.go
index 3766bb0..86647eb 100644
--- a/android/rule_builder_test.go
+++ b/android/rule_builder_test.go
@@ -678,32 +678,32 @@
 	})
 	t.Run("sbox", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox"
-		outFile := filepath.Join(outDir, "gen/foo_sbox")
-		depFile := filepath.Join(outDir, "gen/foo_sbox.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 		module := result.ModuleForTests("foo_sbox", "")
 		check(t, module.Output("gen/foo_sbox"), module.Output(rspFile2),
 			cmd, outFile, depFile, rspFile, rspFile2, false, []string{manifest}, []string{sbox})
 	})
 	t.Run("sbox_inputs", func(t *testing.T) {
 		outDir := "out/soong/.intermediates/foo_sbox_inputs"
-		outFile := filepath.Join(outDir, "gen/foo_sbox_inputs")
-		depFile := filepath.Join(outDir, "gen/foo_sbox_inputs.d")
+		sboxOutDir := filepath.Join(outDir, "gen")
+		outFile := filepath.Join(sboxOutDir, "foo_sbox_inputs")
+		depFile := filepath.Join(sboxOutDir, "foo_sbox_inputs.d")
 		rspFile := filepath.Join(outDir, "rsp")
 		rspFile2 := filepath.Join(outDir, "rsp2")
 		manifest := filepath.Join(outDir, "sbox.textproto")
 		sbox := filepath.Join("out", "soong", "host", result.Config.PrebuiltOS(), "bin/sbox")
 		sandboxPath := shared.TempDirForOutDir("out/soong")
 
-		cmd := `rm -rf ` + outDir + `/gen && ` +
-			sbox + ` --sandbox-path ` + sandboxPath + ` --manifest ` + manifest
+		cmd := sbox + ` --sandbox-path ` + sandboxPath + ` --output-dir ` + sboxOutDir + ` --manifest ` + manifest
 
 		module := result.ModuleForTests("foo_sbox_inputs", "")
 		check(t, module.Output("gen/foo_sbox_inputs"), module.Output(rspFile2),
diff --git a/bp2build/java_library_conversion_test.go b/bp2build/java_library_conversion_test.go
index 2f6bce2..4b75e3b 100644
--- a/bp2build/java_library_conversion_test.go
+++ b/bp2build/java_library_conversion_test.go
@@ -30,6 +30,7 @@
 }
 
 func runJavaLibraryTestCase(t *testing.T, tc bp2buildTestCase) {
+	t.Helper()
 	runJavaLibraryTestCaseWithRegistrationCtxFunc(t, tc, func(ctx android.RegistrationContext) {})
 }
 
@@ -156,3 +157,65 @@
 		ctx.RegisterModuleType("java_plugin", java.PluginFactory)
 	})
 }
+
+func TestJavaLibraryErrorproneJavacflagsEnabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        enabled: true,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `[
+        "-Xsuper-fast",
+        "-Xep:SpeedLimit:OFF",
+    ]`,
+				"srcs": `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledByDefault(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
+
+func TestJavaLibraryErrorproneJavacflagsErrorproneDisabledManually(t *testing.T) {
+	runJavaLibraryTestCase(t, bp2buildTestCase{
+		blueprint: `java_library {
+    name: "java-lib-1",
+    srcs: ["a.java"],
+    javacflags: ["-Xsuper-fast"],
+    errorprone: {
+		enabled: false,
+        javacflags: ["-Xep:SpeedLimit:OFF"],
+    },
+}`,
+		expectedBazelTargets: []string{
+			makeBazelTarget("java_library", "java-lib-1", attrNameToString{
+				"javacopts": `["-Xsuper-fast"]`,
+				"srcs":      `["a.java"]`,
+			}),
+		},
+	})
+}
diff --git a/cc/config/global.go b/cc/config/global.go
index fad675a..0f31931 100644
--- a/cc/config/global.go
+++ b/cc/config/global.go
@@ -225,7 +225,6 @@
 		"-Wno-deprecated-enum-enum-conversion",      // http://b/153746563
 		"-Wno-string-compare",                       // http://b/153764102
 		"-Wno-enum-enum-conversion",                 // http://b/154138986
-		"-Wno-enum-float-conversion",                // http://b/154255917
 		"-Wno-pessimizing-move",                     // http://b/154270751
 		// New warnings to be fixed after clang-r399163
 		"-Wno-non-c-typedef-for-linkage", // http://b/161304145
diff --git a/cmd/sbox/sbox.go b/cmd/sbox/sbox.go
index 4fa7486..418826c 100644
--- a/cmd/sbox/sbox.go
+++ b/cmd/sbox/sbox.go
@@ -38,9 +38,11 @@
 )
 
 var (
-	sandboxesRoot string
-	manifestFile  string
-	keepOutDir    bool
+	sandboxesRoot  string
+	outputDir      string
+	manifestFile   string
+	keepOutDir     bool
+	writeIfChanged bool
 )
 
 const (
@@ -51,10 +53,14 @@
 func init() {
 	flag.StringVar(&sandboxesRoot, "sandbox-path", "",
 		"root of temp directory to put the sandbox into")
+	flag.StringVar(&outputDir, "output-dir", "",
+		"directory which will contain all output files and only output files")
 	flag.StringVar(&manifestFile, "manifest", "",
 		"textproto manifest describing the sandboxed command(s)")
 	flag.BoolVar(&keepOutDir, "keep-out-dir", false,
 		"whether to keep the sandbox directory when done")
+	flag.BoolVar(&writeIfChanged, "write-if-changed", false,
+		"only write the output files if they have changed")
 }
 
 func usageViolation(violation string) {
@@ -241,6 +247,12 @@
 		return "", fmt.Errorf("command is required")
 	}
 
+	// Remove files from the output directory
+	err = clearOutputDirectory(command.CopyAfter, outputDir, writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
+
 	pathToTempDirInSbox := tempDir
 	if command.GetChdir() {
 		pathToTempDirInSbox = "."
@@ -252,7 +264,7 @@
 	}
 
 	// Copy in any files specified by the manifest.
-	err = copyFiles(command.CopyBefore, "", tempDir, false)
+	err = copyFiles(command.CopyBefore, "", tempDir, requireFromExists, alwaysWrite)
 	if err != nil {
 		return "", err
 	}
@@ -306,7 +318,7 @@
 		// especially useful for linters with baselines that print an error message on failure
 		// with a command to copy the output lint errors to the new baseline.  Use a copy instead of
 		// a move to leave the sandbox intact for manual inspection
-		copyFiles(command.CopyAfter, tempDir, "", true)
+		copyFiles(command.CopyAfter, tempDir, "", allowFromNotExists, writeType(writeIfChanged))
 	}
 
 	// If the command  was executed but failed with an error, print a debugging message before
@@ -327,39 +339,16 @@
 		return "", err
 	}
 
-	missingOutputErrors := validateOutputFiles(command.CopyAfter, tempDir)
-
-	if len(missingOutputErrors) > 0 {
-		// find all created files for making a more informative error message
-		createdFiles := findAllFilesUnder(tempDir)
-
-		// build error message
-		errorMessage := "mismatch between declared and actual outputs\n"
-		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
-		errorMessage += "in sandbox " + tempDir + ",\n"
-		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
-		for _, missingOutputError := range missingOutputErrors {
-			errorMessage += "  " + missingOutputError.Error() + "\n"
-		}
-		if len(createdFiles) < 1 {
-			errorMessage += "created 0 files."
-		} else {
-			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
-			creationMessages := createdFiles
-			maxNumCreationLines := 10
-			if len(creationMessages) > maxNumCreationLines {
-				creationMessages = creationMessages[:maxNumCreationLines]
-				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxNumCreationLines))
-			}
-			for _, creationMessage := range creationMessages {
-				errorMessage += "  " + creationMessage + "\n"
-			}
-		}
-
-		return "", errors.New(errorMessage)
+	err = validateOutputFiles(command.CopyAfter, tempDir, outputDir, rawCommand)
+	if err != nil {
+		return "", err
 	}
+
 	// the created files match the declared files; now move them
-	err = moveFiles(command.CopyAfter, tempDir, "")
+	err = moveFiles(command.CopyAfter, tempDir, "", writeType(writeIfChanged))
+	if err != nil {
+		return "", err
+	}
 
 	return depFile, nil
 }
@@ -380,8 +369,9 @@
 
 // validateOutputFiles verifies that all files that have a rule to be copied out of the sandbox
 // were created by the command.
-func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir string) []error {
+func validateOutputFiles(copies []*sbox_proto.Copy, sandboxDir, outputDir, rawCommand string) error {
 	var missingOutputErrors []error
+	var incorrectOutputDirectoryErrors []error
 	for _, copyPair := range copies {
 		fromPath := joinPath(sandboxDir, copyPair.GetFrom())
 		fileInfo, err := os.Stat(fromPath)
@@ -392,17 +382,91 @@
 		if fileInfo.IsDir() {
 			missingOutputErrors = append(missingOutputErrors, fmt.Errorf("%s: not a file", fromPath))
 		}
+
+		toPath := copyPair.GetTo()
+		if rel, err := filepath.Rel(outputDir, toPath); err != nil {
+			return err
+		} else if strings.HasPrefix(rel, "../") {
+			incorrectOutputDirectoryErrors = append(incorrectOutputDirectoryErrors,
+				fmt.Errorf("%s is not under %s", toPath, outputDir))
+		}
 	}
-	return missingOutputErrors
+
+	const maxErrors = 10
+
+	if len(incorrectOutputDirectoryErrors) > 0 {
+		errorMessage := ""
+		more := 0
+		if len(incorrectOutputDirectoryErrors) > maxErrors {
+			more = len(incorrectOutputDirectoryErrors) - maxErrors
+			incorrectOutputDirectoryErrors = incorrectOutputDirectoryErrors[:maxErrors]
+		}
+
+		for _, err := range incorrectOutputDirectoryErrors {
+			errorMessage += err.Error() + "\n"
+		}
+		if more > 0 {
+			errorMessage += fmt.Sprintf("...%v more", more)
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	if len(missingOutputErrors) > 0 {
+		// find all created files for making a more informative error message
+		createdFiles := findAllFilesUnder(sandboxDir)
+
+		// build error message
+		errorMessage := "mismatch between declared and actual outputs\n"
+		errorMessage += "in sbox command(" + rawCommand + ")\n\n"
+		errorMessage += "in sandbox " + sandboxDir + ",\n"
+		errorMessage += fmt.Sprintf("failed to create %v files:\n", len(missingOutputErrors))
+		for _, missingOutputError := range missingOutputErrors {
+			errorMessage += "  " + missingOutputError.Error() + "\n"
+		}
+		if len(createdFiles) < 1 {
+			errorMessage += "created 0 files."
+		} else {
+			errorMessage += fmt.Sprintf("did create %v files:\n", len(createdFiles))
+			creationMessages := createdFiles
+			if len(creationMessages) > maxErrors {
+				creationMessages = creationMessages[:maxErrors]
+				creationMessages = append(creationMessages, fmt.Sprintf("...%v more", len(createdFiles)-maxErrors))
+			}
+			for _, creationMessage := range creationMessages {
+				errorMessage += "  " + creationMessage + "\n"
+			}
+		}
+
+		return errors.New(errorMessage)
+	}
+
+	return nil
 }
 
-// copyFiles copies files in or out of the sandbox.  If allowFromNotExists is true then errors
-// caused by a from path not existing are ignored.
-func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, allowFromNotExists bool) error {
+type existsType bool
+
+const (
+	requireFromExists  existsType = false
+	allowFromNotExists            = true
+)
+
+type writeType bool
+
+const (
+	alwaysWrite        writeType = false
+	onlyWriteIfChanged           = true
+)
+
+// copyFiles copies files in or out of the sandbox.  If exists is allowFromNotExists then errors
+// caused by a from path not existing are ignored.  If write is onlyWriteIfChanged then the output
+// file is compared to the input file and not written to if it is the same, avoiding updating
+// the timestamp.
+func copyFiles(copies []*sbox_proto.Copy, fromDir, toDir string, exists existsType, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
-		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), allowFromNotExists)
+		err := copyOneFile(fromPath, toPath, copyPair.GetExecutable(), exists, write)
 		if err != nil {
 			return fmt.Errorf("error copying %q to %q: %w", fromPath, toPath, err)
 		}
@@ -411,8 +475,11 @@
 }
 
 // copyOneFile copies a file and its permissions.  If forceExecutable is true it adds u+x to the
-// permissions.  If allowFromNotExists is true it returns nil if the from path doesn't exist.
-func copyOneFile(from string, to string, forceExecutable, allowFromNotExists bool) error {
+// permissions.  If exists is allowFromNotExists it returns nil if the from path doesn't exist.
+// If write is onlyWriteIfChanged then the output file is compared to the input file and not written to
+// if it is the same, avoiding updating the timestamp.
+func copyOneFile(from string, to string, forceExecutable bool, exists existsType,
+	write writeType) error {
 	err := os.MkdirAll(filepath.Dir(to), 0777)
 	if err != nil {
 		return err
@@ -420,7 +487,7 @@
 
 	stat, err := os.Stat(from)
 	if err != nil {
-		if os.IsNotExist(err) && allowFromNotExists {
+		if os.IsNotExist(err) && exists == allowFromNotExists {
 			return nil
 		}
 		return err
@@ -431,6 +498,10 @@
 		perm = perm | 0100 // u+x
 	}
 
+	if write == onlyWriteIfChanged && filesHaveSameContents(from, to) {
+		return nil
+	}
+
 	in, err := os.Open(from)
 	if err != nil {
 		return err
@@ -504,7 +575,7 @@
 		to := applyPathMappings(rspFile.PathMappings, from)
 
 		// Copy the file into the sandbox.
-		err := copyOneFile(from, joinPath(toDir, to), false, false)
+		err := copyOneFile(from, joinPath(toDir, to), false, requireFromExists, alwaysWrite)
 		if err != nil {
 			return err
 		}
@@ -551,9 +622,10 @@
 
 // moveFiles moves files specified by a set of copy rules.  It uses os.Rename, so it is restricted
 // to moving files where the source and destination are in the same filesystem.  This is OK for
-// sbox because the temporary directory is inside the out directory.  It updates the timestamp
-// of the new file.
-func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string) error {
+// sbox because the temporary directory is inside the out directory.  If write is onlyWriteIfChanged
+// then the output file is compared to the input file and not written to if it is the same, avoiding
+// updating the timestamp.  Otherwise it always updates the timestamp of the new file.
+func moveFiles(copies []*sbox_proto.Copy, fromDir, toDir string, write writeType) error {
 	for _, copyPair := range copies {
 		fromPath := joinPath(fromDir, copyPair.GetFrom())
 		toPath := joinPath(toDir, copyPair.GetTo())
@@ -562,6 +634,10 @@
 			return err
 		}
 
+		if write == onlyWriteIfChanged && filesHaveSameContents(fromPath, toPath) {
+			continue
+		}
+
 		err = os.Rename(fromPath, toPath)
 		if err != nil {
 			return err
@@ -578,6 +654,37 @@
 	return nil
 }
 
+// clearOutputDirectory removes all files in the output directory if write is alwaysWrite, or
+// any files not listed in copies if write is onlyWriteIfChanged
+func clearOutputDirectory(copies []*sbox_proto.Copy, outputDir string, write writeType) error {
+	if outputDir == "" {
+		return fmt.Errorf("output directory must be set")
+	}
+
+	if write == alwaysWrite {
+		// When writing all the output files remove the whole output directory
+		return os.RemoveAll(outputDir)
+	}
+
+	outputFiles := make(map[string]bool, len(copies))
+	for _, copyPair := range copies {
+		outputFiles[copyPair.GetTo()] = true
+	}
+
+	existingFiles := findAllFilesUnder(outputDir)
+	for _, existingFile := range existingFiles {
+		fullExistingFile := filepath.Join(outputDir, existingFile)
+		if !outputFiles[fullExistingFile] {
+			err := os.Remove(fullExistingFile)
+			if err != nil {
+				return fmt.Errorf("failed to remove obsolete output file %s: %w", fullExistingFile, err)
+			}
+		}
+	}
+
+	return nil
+}
+
 // Rewrite one or more depfiles so that it doesn't include the (randomized) sandbox directory
 // to an output file.
 func rewriteDepFiles(ins []string, out string) error {
@@ -621,6 +728,66 @@
 	return filepath.Join(dir, file)
 }
 
+// filesHaveSameContents compares the contents if two files, returning true if they are the same
+// and returning false if they are different or any errors occur.
+func filesHaveSameContents(a, b string) bool {
+	// Compare the sizes of the two files
+	statA, err := os.Stat(a)
+	if err != nil {
+		return false
+	}
+	statB, err := os.Stat(b)
+	if err != nil {
+		return false
+	}
+
+	if statA.Size() != statB.Size() {
+		return false
+	}
+
+	// Open the two files
+	fileA, err := os.Open(a)
+	if err != nil {
+		return false
+	}
+	defer fileA.Close()
+	fileB, err := os.Open(a)
+	if err != nil {
+		return false
+	}
+	defer fileB.Close()
+
+	// Compare the files 1MB at a time
+	const bufSize = 1 * 1024 * 1024
+	bufA := make([]byte, bufSize)
+	bufB := make([]byte, bufSize)
+
+	remain := statA.Size()
+	for remain > 0 {
+		toRead := int64(bufSize)
+		if toRead > remain {
+			toRead = remain
+		}
+
+		_, err = io.ReadFull(fileA, bufA[:toRead])
+		if err != nil {
+			return false
+		}
+		_, err = io.ReadFull(fileB, bufB[:toRead])
+		if err != nil {
+			return false
+		}
+
+		if bytes.Compare(bufA[:toRead], bufB[:toRead]) != 0 {
+			return false
+		}
+
+		remain -= toRead
+	}
+
+	return true
+}
+
 func makeAbsPathEnv(pathEnv string) (string, error) {
 	pathEnvElements := filepath.SplitList(pathEnv)
 	for i, p := range pathEnvElements {
diff --git a/java/base.go b/java/base.go
index 2f425cd..cc55394 100644
--- a/java/base.go
+++ b/java/base.go
@@ -1048,12 +1048,24 @@
 		}
 	}
 
+	// We don't currently run annotation processors in turbine, which means we can't use turbine
+	// generated header jars when an annotation processor that generates API is enabled.  One
+	// exception (handled further below) is when kotlin sources are enabled, in which case turbine
+	//  is used to run all of the annotation processors.
+	disableTurbine := deps.disableTurbine
+
 	// Collect .java files for AIDEGen
 	j.expandIDEInfoCompiledSrcs = append(j.expandIDEInfoCompiledSrcs, uniqueSrcFiles.Strings()...)
 
 	var kotlinJars android.Paths
+	var kotlinHeaderJars android.Paths
 
 	if srcFiles.HasExt(".kt") {
+		// When using kotlin sources turbine is used to generate annotation processor sources,
+		// including for annotation processors that generate API, so we can use turbine for
+		// java sources too.
+		disableTurbine = false
+
 		// user defined kotlin flags.
 		kotlincFlags := j.properties.Kotlincflags
 		CheckKotlincFlags(ctx, kotlincFlags)
@@ -1096,31 +1108,47 @@
 		flags.kotlincClasspath = append(flags.kotlincClasspath, flags.bootClasspath...)
 		flags.kotlincClasspath = append(flags.kotlincClasspath, flags.classpath...)
 
-		if len(flags.processorPath) > 0 {
+		useTurbineApt := len(flags.processorPath) > 0
+		if useTurbineApt {
 			// Use kapt for annotation processing
-			kaptSrcJar := android.PathForModuleOut(ctx, "kapt", "kapt-sources.jar")
-			kaptResJar := android.PathForModuleOut(ctx, "kapt", "kapt-res.jar")
-			kotlinKapt(ctx, kaptSrcJar, kaptResJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
-			srcJars = append(srcJars, kaptSrcJar)
-			kotlinJars = append(kotlinJars, kaptResJar)
+			kotlinTurbineAptHeaderJar := android.PathForModuleOut(ctx, "turbine-apt", "stubs-header.jar")
+			kotlinTurbineAptSrcJar := android.PathForModuleOut(ctx, "turbine-apt", "anno-sources.jar")
+			kotlinTurbineAptResJar := android.PathForModuleOut(ctx, "turbine-apt", "anno-res.jar")
+			kotlinTurbineApt(ctx, kotlinTurbineAptHeaderJar, kotlinTurbineAptSrcJar, kotlinTurbineAptResJar,
+				kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
+			srcJars = append(srcJars, kotlinTurbineAptSrcJar)
+			kotlinJars = append(kotlinJars, kotlinTurbineAptResJar)
+			// When annotation processors are enabled we've already created java stubs for
+			// kotlin files using kapt and compiled them in turbine-apt while running the
+			// annotation processors, reuse the result as the kotlin header jar for the javac
+			// action.  It can't be used as the header jar for downstream modules to compile
+			// against because it doesn't contain the kotlin-specific metadata that kotlinc
+			// needs.
+			flags.classpath = append(classpath{kotlinTurbineAptHeaderJar}, flags.classpath...)
 			// Disable annotation processing in javac, it's already been handled by kapt
 			flags.processorPath = nil
 			flags.processors = nil
 		}
 
 		kotlinJar := android.PathForModuleOut(ctx, "kotlin", jarName)
-		kotlinCompile(ctx, kotlinJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
+		kotlinHeaderJar := android.PathForModuleOut(ctx, "kotlin_headers", jarName)
+		kotlinCompile(ctx, kotlinJar, kotlinHeaderJar, kotlinSrcFiles, kotlinCommonSrcFiles, srcJars, flags)
 		if ctx.Failed() {
 			return
 		}
 
-		// Make javac rule depend on the kotlinc rule
-		flags.classpath = append(flags.classpath, kotlinJar)
-
 		kotlinJars = append(kotlinJars, kotlinJar)
+		kotlinHeaderJars = append(kotlinHeaderJars, kotlinHeaderJar)
+		if !useTurbineApt {
+			// When annotation processors are not enabled use the kotlinc gen-jvm-abi plugin
+			// output as the header jar for javac in this module.
+			flags.classpath = append(classpath{kotlinHeaderJar}, flags.classpath...)
+		}
+
 		// Jar kotlin classes into the final jar after javac
 		if BoolDefault(j.properties.Static_kotlin_stdlib, true) {
 			kotlinJars = append(kotlinJars, deps.kotlinStdlib...)
+			kotlinHeaderJars = append(kotlinHeaderJars, deps.kotlinStdlib...)
 		} else {
 			flags.dexClasspath = append(flags.dexClasspath, deps.kotlinStdlib...)
 		}
@@ -1134,7 +1162,7 @@
 
 	enableSharding := false
 	var headerJarFileWithoutDepsOrJarjar android.Path
-	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !deps.disableTurbine {
+	if ctx.Device() && !ctx.Config().IsEnvFalse("TURBINE_ENABLED") && !disableTurbine {
 		if j.properties.Javac_shard_size != nil && *(j.properties.Javac_shard_size) > 0 {
 			enableSharding = true
 			// Formerly, there was a check here that prevented annotation processors
@@ -1144,7 +1172,7 @@
 			// with sharding enabled. See: b/77284273.
 		}
 		headerJarFileWithoutDepsOrJarjar, j.headerJarFile =
-			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinJars)
+			j.compileJavaHeader(ctx, uniqueSrcFiles, srcJars, deps, flags, jarName, kotlinHeaderJars)
 		if ctx.Failed() {
 			return
 		}
diff --git a/java/builder.go b/java/builder.go
index c0fadd4..0f6876e 100644
--- a/java/builder.go
+++ b/java/builder.go
@@ -415,7 +415,7 @@
 }
 
 // TurbineApt produces a rule to run annotation processors using turbine.
-func TurbineApt(ctx android.ModuleContext, outputSrcJar, outputResJar android.WritablePath,
+func TurbineApt(ctx android.ModuleContext, outputJar, outputSrcJar, outputResJar android.WritablePath,
 	srcFiles, srcJars android.Paths, flags javaBuilderFlags) {
 
 	turbineFlags, deps := turbineFlags(ctx, flags)
@@ -426,8 +426,9 @@
 	turbineFlags += " " + flags.processorPath.FormTurbineClassPath("--processorpath ")
 	turbineFlags += " --processors " + strings.Join(flags.processors, " ")
 
-	outputs := android.WritablePaths{outputSrcJar, outputResJar}
-	outputFlags := "--gensrc_output " + outputSrcJar.String() + ".tmp " +
+	outputs := android.WritablePaths{outputJar, outputSrcJar, outputResJar}
+	outputFlags := "--output " + outputJar.String() + ".tmp " +
+		"--gensrc_output " + outputSrcJar.String() + ".tmp " +
 		"--resource_output " + outputResJar.String() + ".tmp"
 
 	rule := turbine
@@ -442,7 +443,9 @@
 	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_TURBINE") {
 		rule = turbineRE
 		args["implicits"] = strings.Join(deps.Strings(), ",")
-		args["rbeOutputs"] = outputSrcJar.String() + ".tmp," + outputResJar.String() + ".tmp"
+		args["rbeOutputs"] = outputJar.String() + ".tmp," +
+			outputSrcJar.String() + ".tmp," +
+			outputResJar.String() + ".tmp"
 	}
 	ctx.Build(pctx, android.BuildParams{
 		Rule:            rule,
diff --git a/java/config/kotlin.go b/java/config/kotlin.go
index a83f87f..fc63f4d 100644
--- a/java/config/kotlin.go
+++ b/java/config/kotlin.go
@@ -34,6 +34,7 @@
 	pctx.SourcePathVariable("KotlinKaptJar", "external/kotlinc/lib/kotlin-annotation-processing.jar")
 	pctx.SourcePathVariable("KotlinAnnotationJar", "external/kotlinc/lib/annotations-13.0.jar")
 	pctx.SourcePathVariable("KotlinStdlibJar", KotlinStdlibJar)
+	pctx.SourcePathVariable("KotlinAbiGenPluginJar", "external/kotlinc/lib/jvm-abi-gen.jar")
 
 	// These flags silence "Illegal reflective access" warnings when running kapt in OpenJDK9+
 	pctx.StaticVariable("KaptSuppressJDK9Warnings", strings.Join([]string{
diff --git a/java/droidstubs.go b/java/droidstubs.go
index e7aeeb8..3b1f7c0 100644
--- a/java/droidstubs.go
+++ b/java/droidstubs.go
@@ -433,6 +433,10 @@
 	}
 }
 
+func metalavaUseRbe(ctx android.ModuleContext) bool {
+	return ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA")
+}
+
 func metalavaCmd(ctx android.ModuleContext, rule *android.RuleBuilder, javaVersion javaVersion, srcs android.Paths,
 	srcJarList android.Path, bootclasspath, classpath classpath, homeDir android.WritablePath) *android.RuleBuilderCommand {
 	rule.Command().Text("rm -rf").Flag(homeDir.String())
@@ -441,7 +445,7 @@
 	cmd := rule.Command()
 	cmd.FlagWithArg("ANDROID_PREFS_ROOT=", homeDir.String())
 
-	if ctx.Config().UseRBE() && ctx.Config().IsEnvTrue("RBE_METALAVA") {
+	if metalavaUseRbe(ctx) {
 		rule.Remoteable(android.RemoteRuleSupports{RBE: true})
 		execStrategy := ctx.Config().GetenvWithDefault("RBE_METALAVA_EXEC_STRATEGY", remoteexec.LocalExecStrategy)
 		labels := map[string]string{"type": "tool", "name": "metalava"}
@@ -665,7 +669,9 @@
 	}
 
 	// TODO(b/183630617): rewrapper doesn't support restat rules
-	// rule.Restat()
+	if !metalavaUseRbe(ctx) {
+		rule.Restat()
+	}
 
 	zipSyncCleanupCmd(rule, srcJarDir)
 
diff --git a/java/java.go b/java/java.go
index ecbbc32..713fe94 100644
--- a/java/java.go
+++ b/java/java.go
@@ -2060,15 +2060,22 @@
 		protoSrcPartition: android.ProtoSrcLabelPartition,
 	})
 
+	var javacopts []string
+	if m.properties.Javacflags != nil {
+		javacopts = append(javacopts, m.properties.Javacflags...)
+	}
+	epEnabled := m.properties.Errorprone.Enabled
+	//TODO(b/227504307) add configuration that depends on RUN_ERROR_PRONE environment variable
+	if Bool(epEnabled) {
+		javacopts = append(javacopts, m.properties.Errorprone.Javacflags...)
+	}
+
 	commonAttrs := &javaCommonAttributes{
 		Srcs: srcPartitions[javaSrcPartition],
 		Plugins: bazel.MakeLabelListAttribute(
 			android.BazelLabelForModuleDeps(ctx, m.properties.Plugins),
 		),
-	}
-
-	if m.properties.Javacflags != nil {
-		commonAttrs.Javacopts = bazel.MakeStringListAttribute(m.properties.Javacflags)
+		Javacopts: bazel.MakeStringListAttribute(javacopts),
 	}
 
 	depLabels := &javaDependencyLabels{}
diff --git a/java/kotlin.go b/java/kotlin.go
index ce79bae..818bbd5 100644
--- a/java/kotlin.go
+++ b/java/kotlin.go
@@ -28,17 +28,20 @@
 
 var kotlinc = pctx.AndroidRemoteStaticRule("kotlinc", android.RemoteRuleSupports{Goma: true},
 	blueprint.RuleParams{
-		Command: `rm -rf "$classesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
-			`mkdir -p "$classesDir" "$srcJarDir" "$emptyDir" && ` +
+		Command: `rm -rf "$classesDir" "$headerClassesDir" "$srcJarDir" "$kotlinBuildFile" "$emptyDir" && ` +
+			`mkdir -p "$classesDir" "$headerClassesDir" "$srcJarDir" "$emptyDir" && ` +
 			`${config.ZipSyncCmd} -d $srcJarDir -l $srcJarDir/list -f "*.java" $srcJars && ` +
 			`${config.GenKotlinBuildFileCmd} --classpath "$classpath" --name "$name"` +
 			` --out_dir "$classesDir" --srcs "$out.rsp" --srcs "$srcJarDir/list"` +
 			` $commonSrcFilesArg --out "$kotlinBuildFile" && ` +
 			`${config.KotlincCmd} ${config.KotlincGlobalFlags} ` +
-			`${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
-			`$kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
-			`-kotlin-home $emptyDir && ` +
-			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir && ` +
+			` ${config.KotlincSuppressJDK9Warnings} ${config.JavacHeapFlags} ` +
+			` $kotlincFlags -jvm-target $kotlinJvmTarget -Xbuild-file=$kotlinBuildFile ` +
+			` -kotlin-home $emptyDir ` +
+			` -Xplugin=${config.KotlinAbiGenPluginJar} ` +
+			` -P plugin:org.jetbrains.kotlin.jvm.abi:outputDir=$headerClassesDir && ` +
+			`${config.SoongZipCmd} -jar -o $out -C $classesDir -D $classesDir -write_if_changed && ` +
+			`${config.SoongZipCmd} -jar -o $headerJar -C $headerClassesDir -D $headerClassesDir -write_if_changed && ` +
 			`rm -rf "$srcJarDir"`,
 		CommandDeps: []string{
 			"${config.KotlincCmd}",
@@ -49,15 +52,17 @@
 			"${config.KotlinStdlibJar}",
 			"${config.KotlinTrove4jJar}",
 			"${config.KotlinAnnotationJar}",
+			"${config.KotlinAbiGenPluginJar}",
 			"${config.GenKotlinBuildFileCmd}",
 			"${config.SoongZipCmd}",
 			"${config.ZipSyncCmd}",
 		},
 		Rspfile:        "$out.rsp",
 		RspfileContent: `$in`,
+		Restat:         true,
 	},
 	"kotlincFlags", "classpath", "srcJars", "commonSrcFilesArg", "srcJarDir", "classesDir",
-	"kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
+	"headerClassesDir", "headerJar", "kotlinJvmTarget", "kotlinBuildFile", "emptyDir", "name")
 
 func kotlinCommonSrcsList(ctx android.ModuleContext, commonSrcFiles android.Paths) android.OptionalPath {
 	if len(commonSrcFiles) > 0 {
@@ -76,7 +81,7 @@
 }
 
 // kotlinCompile takes .java and .kt sources and srcJars, and compiles the .kt sources into a classes jar in outputFile.
-func kotlinCompile(ctx android.ModuleContext, outputFile android.WritablePath,
+func kotlinCompile(ctx android.ModuleContext, outputFile, headerOutputFile android.WritablePath,
 	srcFiles, commonSrcFiles, srcJars android.Paths,
 	flags javaBuilderFlags) {
 
@@ -97,17 +102,20 @@
 	}
 
 	ctx.Build(pctx, android.BuildParams{
-		Rule:        kotlinc,
-		Description: "kotlinc",
-		Output:      outputFile,
-		Inputs:      srcFiles,
-		Implicits:   deps,
+		Rule:           kotlinc,
+		Description:    "kotlinc",
+		Output:         outputFile,
+		ImplicitOutput: headerOutputFile,
+		Inputs:         srcFiles,
+		Implicits:      deps,
 		Args: map[string]string{
 			"classpath":         flags.kotlincClasspath.FormJavaClassPath(""),
 			"kotlincFlags":      flags.kotlincFlags,
 			"commonSrcFilesArg": commonSrcFilesArg,
 			"srcJars":           strings.Join(srcJars.Strings(), " "),
 			"classesDir":        android.PathForModuleOut(ctx, "kotlinc", "classes").String(),
+			"headerClassesDir":  android.PathForModuleOut(ctx, "kotlinc", "header_classes").String(),
+			"headerJar":         headerOutputFile.String(),
 			"srcJarDir":         android.PathForModuleOut(ctx, "kotlinc", "srcJars").String(),
 			"kotlinBuildFile":   android.PathForModuleOut(ctx, "kotlinc-build.xml").String(),
 			"emptyDir":          android.PathForModuleOut(ctx, "kotlinc", "empty").String(),
@@ -155,11 +163,11 @@
 	"classpath", "srcJars", "commonSrcFilesArg", "srcJarDir", "kaptDir", "kotlinJvmTarget",
 	"kotlinBuildFile", "name", "classesJarOut")
 
-// kotlinKapt performs Kotlin-compatible annotation processing.  It takes .kt and .java sources and srcjars, and runs
+// kotlinTurbineApt performs Kotlin-compatible annotation processing.  It takes .kt and .java sources and srcjars, and runs
 // annotation processors over all of them, producing a srcjar of generated code in outputFile.  The srcjar should be
 // added as an additional input to kotlinc and javac rules, and the javac rule should have annotation processing
 // disabled.
-func kotlinKapt(ctx android.ModuleContext, srcJarOutputFile, resJarOutputFile android.WritablePath,
+func kotlinTurbineApt(ctx android.ModuleContext, headerJarOutputFile, srcJarOutputFile, resJarOutputFile android.WritablePath,
 	srcFiles, commonSrcFiles, srcJars android.Paths,
 	flags javaBuilderFlags) {
 
@@ -223,7 +231,7 @@
 	// Then run turbine to perform annotation processing on the stubs and any .java srcFiles.
 	javaSrcFiles := srcFiles.FilterByExt(".java")
 	turbineSrcJars := append(android.Paths{kaptStubsJar}, srcJars...)
-	TurbineApt(ctx, srcJarOutputFile, resJarOutputFile, javaSrcFiles, turbineSrcJars, flags)
+	TurbineApt(ctx, headerJarOutputFile, srcJarOutputFile, resJarOutputFile, javaSrcFiles, turbineSrcJars, flags)
 }
 
 // kapt converts a list of key, value pairs into a base64 encoded Java serialization, which is what kapt expects.
diff --git a/java/kotlin_test.go b/java/kotlin_test.go
index d51bc04..f67a965 100644
--- a/java/kotlin_test.go
+++ b/java/kotlin_test.go
@@ -45,6 +45,10 @@
 	fooKotlinc := ctx.ModuleForTests("foo", "android_common").Rule("kotlinc")
 	fooJavac := ctx.ModuleForTests("foo", "android_common").Rule("javac")
 	fooJar := ctx.ModuleForTests("foo", "android_common").Output("combined/foo.jar")
+	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+
+	fooKotlincClasses := fooKotlinc.Output
+	fooKotlincHeaderClasses := fooKotlinc.ImplicitOutput
 
 	if len(fooKotlinc.Inputs) != 2 || fooKotlinc.Inputs[0].String() != "a.java" ||
 		fooKotlinc.Inputs[1].String() != "b.kt" {
@@ -55,17 +59,21 @@
 		t.Errorf(`foo inputs %v != ["a.java"]`, fooJavac.Inputs)
 	}
 
-	if !strings.Contains(fooJavac.Args["classpath"], fooKotlinc.Output.String()) {
+	if !strings.Contains(fooJavac.Args["classpath"], fooKotlincHeaderClasses.String()) {
 		t.Errorf("foo classpath %v does not contain %q",
-			fooJavac.Args["classpath"], fooKotlinc.Output.String())
+			fooJavac.Args["classpath"], fooKotlincHeaderClasses.String())
 	}
 
-	if !inList(fooKotlinc.Output.String(), fooJar.Inputs.Strings()) {
+	if !inList(fooKotlincClasses.String(), fooJar.Inputs.Strings()) {
 		t.Errorf("foo jar inputs %v does not contain %q",
-			fooJar.Inputs.Strings(), fooKotlinc.Output.String())
+			fooJar.Inputs.Strings(), fooKotlincClasses.String())
 	}
 
-	fooHeaderJar := ctx.ModuleForTests("foo", "android_common").Output("turbine-combined/foo.jar")
+	if !inList(fooKotlincHeaderClasses.String(), fooHeaderJar.Inputs.Strings()) {
+		t.Errorf("foo header jar inputs %v does not contain %q",
+			fooHeaderJar.Inputs.Strings(), fooKotlincHeaderClasses.String())
+	}
+
 	bazHeaderJar := ctx.ModuleForTests("baz", "android_common").Output("turbine-combined/baz.jar")
 	barKotlinc := ctx.ModuleForTests("bar", "android_common").Rule("kotlinc")
 
@@ -147,22 +155,34 @@
 			t.Errorf("expected %q in turbine-apt implicits %v", kaptStubs.Output.String(), kotlinc.Implicits.Strings())
 		}
 
+		turbineAptSrcjarOutput := turbineApt.ImplicitOutputs[0]
+
 		// Test that the turbine-apt srcjar is a dependency of kotlinc and javac rules
-		if !inList(turbineApt.Output.String(), kotlinc.Implicits.Strings()) {
-			t.Errorf("expected %q in kotlinc implicits %v", turbineApt.Output.String(), kotlinc.Implicits.Strings())
+		if !inList(turbineAptSrcjarOutput.String(), kotlinc.Implicits.Strings()) {
+			t.Errorf("expected %q in kotlinc implicits %v", turbineAptSrcjarOutput.String(), kotlinc.Implicits.Strings())
 		}
-		if !inList(turbineApt.Output.String(), javac.Implicits.Strings()) {
-			t.Errorf("expected %q in javac implicits %v", turbineApt.Output.String(), javac.Implicits.Strings())
+		if !inList(turbineAptSrcjarOutput.String(), javac.Implicits.Strings()) {
+			t.Errorf("expected %q in javac implicits %v", turbineAptSrcjarOutput.String(), javac.Implicits.Strings())
 		}
 
 		// Test that the turbine-apt srcjar is extracted by the kotlinc and javac rules
-		if kotlinc.Args["srcJars"] != turbineApt.Output.String() {
-			t.Errorf("expected %q in kotlinc srcjars %v", turbineApt.Output.String(), kotlinc.Args["srcJars"])
+		if kotlinc.Args["srcJars"] != turbineAptSrcjarOutput.String() {
+			t.Errorf("expected %q in kotlinc srcjars %v", turbineAptSrcjarOutput.String(), kotlinc.Args["srcJars"])
 		}
-		if javac.Args["srcJars"] != turbineApt.Output.String() {
-			t.Errorf("expected %q in javac srcjars %v", turbineApt.Output.String(), kotlinc.Args["srcJars"])
+		if javac.Args["srcJars"] != turbineAptSrcjarOutput.String() {
+			t.Errorf("expected %q in javac srcjars %v", turbineAptSrcjarOutput.String(), kotlinc.Args["srcJars"])
 		}
 
+		// Test that the turbine-apt header jar is a dependency of the javac rules
+		turbineAptHeaderjarOutput := turbineApt.Output
+		android.AssertStringListContains(t, "javac dependency", javac.Implicits.Strings(), turbineAptHeaderjarOutput.String())
+		android.AssertStringDoesContain(t, "javac classpath", javac.Args["classpath"], turbineAptHeaderjarOutput.String())
+
+		// Test that the kotlinc header jar is a not a dependency of the javac rules
+		kotlincHeaderJarOutput := kotlinc.ImplicitOutput
+		android.AssertStringListDoesNotContain(t, "javac dependency", javac.Implicits.Strings(), kotlincHeaderJarOutput.String())
+		android.AssertStringDoesNotContain(t, "javac classpath", javac.Args["classpath"], kotlincHeaderJarOutput.String())
+
 		// Test that the processors are passed to kapt
 		expectedProcessorPath := "-P plugin:org.jetbrains.kotlin.kapt3:apclasspath=" + bar +
 			" -P plugin:org.jetbrains.kotlin.kapt3:apclasspath=" + baz
diff --git a/mk2rbc/expr.go b/mk2rbc/expr.go
index dc16d1d..54bb6d1 100644
--- a/mk2rbc/expr.go
+++ b/mk2rbc/expr.go
@@ -221,11 +221,9 @@
 }
 
 func (xi *interpolateExpr) transform(transformer func(expr starlarkExpr) starlarkExpr) starlarkExpr {
-	argsCopy := make([]starlarkExpr, len(xi.args))
-	for i, arg := range xi.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range xi.args {
+		xi.args[i] = xi.args[i].transform(transformer)
 	}
-	xi.args = argsCopy
 	if replacement := transformer(xi); replacement != nil {
 		return replacement
 	} else {
@@ -591,11 +589,9 @@
 	if cx.object != nil {
 		cx.object = cx.object.transform(transformer)
 	}
-	argsCopy := make([]starlarkExpr, len(cx.args))
-	for i, arg := range cx.args {
-		argsCopy[i] = arg.transform(transformer)
+	for i := range cx.args {
+		cx.args[i] = cx.args[i].transform(transformer)
 	}
-	cx.args = argsCopy
 	if replacement := transformer(cx); replacement != nil {
 		return replacement
 	} else {
@@ -769,3 +765,35 @@
 	x, ok := expr.(*stringLiteralExpr)
 	return ok && x.literal == ""
 }
+
+func negateExpr(expr starlarkExpr) starlarkExpr {
+	switch typedExpr := expr.(type) {
+	case *notExpr:
+		return typedExpr.expr
+	case *inExpr:
+		typedExpr.isNot = !typedExpr.isNot
+		return typedExpr
+	case *eqExpr:
+		typedExpr.isEq = !typedExpr.isEq
+		return typedExpr
+	case *binaryOpExpr:
+		switch typedExpr.op {
+		case ">":
+			typedExpr.op = "<="
+			return typedExpr
+		case "<":
+			typedExpr.op = ">="
+			return typedExpr
+		case ">=":
+			typedExpr.op = "<"
+			return typedExpr
+		case "<=":
+			typedExpr.op = ">"
+			return typedExpr
+		default:
+			return &notExpr{expr: expr}
+		}
+	default:
+		return &notExpr{expr: expr}
+	}
+}
diff --git a/mk2rbc/mk2rbc.go b/mk2rbc/mk2rbc.go
index 950a1e5..8807437 100644
--- a/mk2rbc/mk2rbc.go
+++ b/mk2rbc/mk2rbc.go
@@ -86,7 +86,7 @@
 	"filter":                               &simpleCallParser{name: baseName + ".filter", returnType: starlarkTypeList},
 	"filter-out":                           &simpleCallParser{name: baseName + ".filter_out", returnType: starlarkTypeList},
 	"firstword":                            &firstOrLastwordCallParser{isLastWord: false},
-	"foreach":                              &foreachCallPaser{},
+	"foreach":                              &foreachCallParser{},
 	"if":                                   &ifCallParser{},
 	"info":                                 &makeControlFuncParser{name: baseName + ".mkinfo"},
 	"is-board-platform":                    &simpleCallParser{name: baseName + ".board_platform_is", returnType: starlarkTypeBool, addGlobals: true},
@@ -117,6 +117,17 @@
 	"wildcard": &simpleCallParser{name: baseName + ".expand_wildcard", returnType: starlarkTypeList},
 }
 
+// The same as knownFunctions, but returns a []starlarkNode instead of a starlarkExpr
+var knownNodeFunctions = map[string]interface {
+	parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode
+}{
+	"eval":                      &evalNodeParser{},
+	"if":                        &ifCallNodeParser{},
+	"inherit-product":           &inheritProductCallParser{loadAlways: true},
+	"inherit-product-if-exists": &inheritProductCallParser{loadAlways: false},
+	"foreach":                   &foreachCallNodeParser{},
+}
+
 // These are functions that we don't implement conversions for, but
 // we allow seeing their definitions in the product config files.
 var ignoredDefines = map[string]bool{
@@ -846,15 +857,19 @@
 	return res
 }
 
-func (ctx *parseContext) handleInheritModule(v mkparser.Node, args *mkparser.MakeString, loadAlways bool) []starlarkNode {
+type inheritProductCallParser struct {
+	loadAlways bool
+}
+
+func (p *inheritProductCallParser) parse(ctx *parseContext, v mkparser.Node, args *mkparser.MakeString) []starlarkNode {
 	args.TrimLeftSpaces()
 	args.TrimRightSpaces()
 	pathExpr := ctx.parseMakeString(v, args)
 	if _, ok := pathExpr.(*badExpr); ok {
 		return []starlarkNode{ctx.newBadNode(v, "Unable to parse argument to inherit")}
 	}
-	return ctx.handleSubConfig(v, pathExpr, loadAlways, func(im inheritedModule) starlarkNode {
-		return &inheritNode{im, loadAlways}
+	return ctx.handleSubConfig(v, pathExpr, p.loadAlways, func(im inheritedModule) starlarkNode {
+		return &inheritNode{im, p.loadAlways}
 	})
 }
 
@@ -873,19 +888,12 @@
 	//   $(error xxx)
 	//   $(call other-custom-functions,...)
 
-	// inherit-product(-if-exists) gets converted to a series of statements,
-	// not just a single expression like parseReference returns. So handle it
-	// separately at the beginning here.
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product,", "")
-		return ctx.handleInheritModule(v, args, true)
+	if name, args, ok := ctx.maybeParseFunctionCall(v, v.Name); ok {
+		if kf, ok := knownNodeFunctions[name]; ok {
+			return kf.parse(ctx, v, args)
+		}
 	}
-	if strings.HasPrefix(v.Name.Dump(), "call inherit-product-if-exists,") {
-		args := v.Name.Clone()
-		args.ReplaceLiteral("call inherit-product-if-exists,", "")
-		return ctx.handleInheritModule(v, args, false)
-	}
+
 	return []starlarkNode{&exprNode{expr: ctx.parseReference(v, v.Name)}}
 }
 
@@ -1030,49 +1038,19 @@
 		otherOperand = xLeft
 	}
 
-	not := func(expr starlarkExpr) starlarkExpr {
-		switch typedExpr := expr.(type) {
-		case *inExpr:
-			typedExpr.isNot = !typedExpr.isNot
-			return typedExpr
-		case *eqExpr:
-			typedExpr.isEq = !typedExpr.isEq
-			return typedExpr
-		case *binaryOpExpr:
-			switch typedExpr.op {
-			case ">":
-				typedExpr.op = "<="
-				return typedExpr
-			case "<":
-				typedExpr.op = ">="
-				return typedExpr
-			case ">=":
-				typedExpr.op = "<"
-				return typedExpr
-			case "<=":
-				typedExpr.op = ">"
-				return typedExpr
-			default:
-				return &notExpr{expr: expr}
-			}
-		default:
-			return &notExpr{expr: expr}
-		}
-	}
-
 	// If we've identified one of the operands as being a string literal, check
 	// for some special cases we can do to simplify the resulting expression.
 	if otherOperand != nil {
 		if stringOperand == "" {
 			if isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
 		}
 		if stringOperand == "true" && otherOperand.typ() == starlarkTypeBool {
 			if !isEq {
-				return not(otherOperand)
+				return negateExpr(otherOperand)
 			} else {
 				return otherOperand
 			}
@@ -1228,6 +1206,37 @@
 		right: xValue, isEq: !negate}
 }
 
+func (ctx *parseContext) maybeParseFunctionCall(node mkparser.Node, ref *mkparser.MakeString) (name string, args *mkparser.MakeString, ok bool) {
+	ref.TrimLeftSpaces()
+	ref.TrimRightSpaces()
+
+	words := ref.SplitN(" ", 2)
+	if !words[0].Const() {
+		return "", nil, false
+	}
+
+	name = words[0].Dump()
+	args = mkparser.SimpleMakeString("", words[0].Pos())
+	if len(words) >= 2 {
+		args = words[1]
+	}
+	args.TrimLeftSpaces()
+	if name == "call" {
+		words = args.SplitN(",", 2)
+		if words[0].Empty() || !words[0].Const() {
+			return "", nil, false
+		}
+		name = words[0].Dump()
+		if len(words) < 2 {
+			args = &mkparser.MakeString{}
+		} else {
+			args = words[1]
+		}
+	}
+	ok = true
+	return
+}
+
 // parses $(...), returning an expression
 func (ctx *parseContext) parseReference(node mkparser.Node, ref *mkparser.MakeString) starlarkExpr {
 	ref.TrimLeftSpaces()
@@ -1242,7 +1251,7 @@
 
 	// If it is a single word, it can be a simple variable
 	// reference or a function call
-	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" {
+	if len(words) == 1 && !isMakeControlFunc(refDump) && refDump != "shell" && refDump != "eval" {
 		if strings.HasPrefix(refDump, soongNsPrefix) {
 			// TODO (asmundak): if we find many, maybe handle them.
 			return ctx.newBadExpr(node, "SOONG_CONFIG_ variables cannot be referenced, use soong_config_get instead: %s", refDump)
@@ -1281,28 +1290,14 @@
 		return ctx.newBadExpr(node, "unknown variable %s", refDump)
 	}
 
-	expr := &callExpr{name: words[0].Dump(), returnType: starlarkTypeUnknown}
-	args := mkparser.SimpleMakeString("", words[0].Pos())
-	if len(words) >= 2 {
-		args = words[1]
-	}
-	args.TrimLeftSpaces()
-	if expr.name == "call" {
-		words = args.SplitN(",", 2)
-		if words[0].Empty() || !words[0].Const() {
-			return ctx.newBadExpr(node, "cannot handle %s", refDump)
-		}
-		expr.name = words[0].Dump()
-		if len(words) < 2 {
-			args = &mkparser.MakeString{}
+	if name, args, ok := ctx.maybeParseFunctionCall(node, ref); ok {
+		if kf, found := knownFunctions[name]; found {
+			return kf.parse(ctx, node, args)
 		} else {
-			args = words[1]
+			return ctx.newBadExpr(node, "cannot handle invoking %s", name)
 		}
-	}
-	if kf, found := knownFunctions[expr.name]; found {
-		return kf.parse(ctx, node, args)
 	} else {
-		return ctx.newBadExpr(node, "cannot handle invoking %s", expr.name)
+		return ctx.newBadExpr(node, "cannot handle %s", refDump)
 	}
 }
 
@@ -1486,9 +1481,46 @@
 	}
 }
 
-type foreachCallPaser struct{}
+type ifCallNodeParser struct{}
 
-func (p *foreachCallPaser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
+func (p *ifCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 2 && len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "if function should have 2 or 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+
+	ifn := &ifNode{expr: ctx.parseMakeString(node, words[0])}
+	cases := []*switchCase{
+		{
+			gate:  ifn,
+			nodes: ctx.parseNodeMakeString(node, words[1]),
+		},
+	}
+	if len(words) == 3 {
+		cases = append(cases, &switchCase{
+			gate:  &elseNode{},
+			nodes: ctx.parseNodeMakeString(node, words[2]),
+		})
+	}
+	if len(cases) == 2 {
+		if len(cases[1].nodes) == 0 {
+			// Remove else branch if it has no contents
+			cases = cases[:1]
+		} else if len(cases[0].nodes) == 0 {
+			// If the if branch has no contents but the else does,
+			// move them to the if and negate its condition
+			ifn.expr = negateExpr(ifn.expr)
+			cases[0].nodes = cases[1].nodes
+			cases = cases[:1]
+		}
+	}
+
+	return []starlarkNode{&switchNode{ssCases: cases}}
+}
+
+type foreachCallParser struct{}
+
+func (p *foreachCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
 	words := args.Split(",")
 	if len(words) != 3 {
 		return ctx.newBadExpr(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))
@@ -1520,6 +1552,71 @@
 	}
 }
 
+func transformNode(node starlarkNode, transformer func(expr starlarkExpr) starlarkExpr) {
+	switch a := node.(type) {
+	case *ifNode:
+		a.expr = a.expr.transform(transformer)
+	case *switchCase:
+		transformNode(a.gate, transformer)
+		for _, n := range a.nodes {
+			transformNode(n, transformer)
+		}
+	case *switchNode:
+		for _, n := range a.ssCases {
+			transformNode(n, transformer)
+		}
+	case *exprNode:
+		a.expr = a.expr.transform(transformer)
+	case *assignmentNode:
+		a.value = a.value.transform(transformer)
+	case *foreachNode:
+		a.list = a.list.transform(transformer)
+		for _, n := range a.actions {
+			transformNode(n, transformer)
+		}
+	}
+}
+
+type foreachCallNodeParser struct{}
+
+func (p *foreachCallNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	words := args.Split(",")
+	if len(words) != 3 {
+		return []starlarkNode{ctx.newBadNode(node, "foreach function should have 3 arguments, found "+strconv.Itoa(len(words)))}
+	}
+	if !words[0].Const() || words[0].Empty() || !identifierFullMatchRegex.MatchString(words[0].Strings[0]) {
+		return []starlarkNode{ctx.newBadNode(node, "first argument to foreach function must be a simple string identifier")}
+	}
+
+	loopVarName := words[0].Strings[0]
+
+	list := ctx.parseMakeString(node, words[1])
+	if list.typ() != starlarkTypeList {
+		list = &callExpr{
+			name:       baseName + ".words",
+			returnType: starlarkTypeList,
+			args:       []starlarkExpr{list},
+		}
+	}
+
+	actions := ctx.parseNodeMakeString(node, words[2])
+	// TODO(colefaust): Replace transforming code with something more elegant
+	for _, action := range actions {
+		transformNode(action, func(expr starlarkExpr) starlarkExpr {
+			if varRefExpr, ok := expr.(*variableRefExpr); ok && varRefExpr.ref.name() == loopVarName {
+				return &identifierExpr{loopVarName}
+			}
+			return nil
+		})
+	}
+
+	return []starlarkNode{&foreachNode{
+		varName: loopVarName,
+		list:    list,
+		actions: actions,
+	}}
+}
+
 type wordCallParser struct{}
 
 func (p *wordCallParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) starlarkExpr {
@@ -1630,6 +1727,31 @@
 	}
 }
 
+type evalNodeParser struct{}
+
+func (p *evalNodeParser) parse(ctx *parseContext, node mkparser.Node, args *mkparser.MakeString) []starlarkNode {
+	parser := mkparser.NewParser("Eval expression", strings.NewReader(args.Dump()))
+	nodes, errs := parser.Parse()
+	if errs != nil {
+		return []starlarkNode{ctx.newBadNode(node, "Unable to parse eval statement")}
+	}
+
+	if len(nodes) == 0 {
+		return []starlarkNode{}
+	} else if len(nodes) == 1 {
+		switch n := nodes[0].(type) {
+		case *mkparser.Assignment:
+			if n.Name.Const() {
+				return ctx.handleAssignment(n)
+			}
+		case *mkparser.Comment:
+			return []starlarkNode{&commentNode{strings.TrimSpace("#" + n.Comment)}}
+		}
+	}
+
+	return []starlarkNode{ctx.newBadNode(node, "Eval expression too complex; only assignments and comments are supported")}
+}
+
 func (ctx *parseContext) parseMakeString(node mkparser.Node, mk *mkparser.MakeString) starlarkExpr {
 	if mk.Const() {
 		return &stringLiteralExpr{mk.Dump()}
@@ -1654,6 +1776,16 @@
 	return NewInterpolateExpr(parts)
 }
 
+func (ctx *parseContext) parseNodeMakeString(node mkparser.Node, mk *mkparser.MakeString) []starlarkNode {
+	// Discard any constant values in the make string, as they would be top level
+	// string literals and do nothing.
+	result := make([]starlarkNode, 0, len(mk.Variables))
+	for i := range mk.Variables {
+		result = append(result, ctx.handleVariable(&mk.Variables[i])...)
+	}
+	return result
+}
+
 // Handles the statements whose treatment is the same in all contexts: comment,
 // assignment, variable (which is a macro call in reality) and all constructs that
 // do not handle in any context ('define directive and any unrecognized stuff).
@@ -1698,6 +1830,7 @@
 	if result == nil {
 		result = []starlarkNode{}
 	}
+
 	return result
 }
 
diff --git a/mk2rbc/mk2rbc_test.go b/mk2rbc/mk2rbc_test.go
index 2b447e3..3698813 100644
--- a/mk2rbc/mk2rbc_test.go
+++ b/mk2rbc/mk2rbc_test.go
@@ -1313,6 +1313,11 @@
 FOREACH_WITH_IF := $(foreach module,\
   $(BOOT_KERNEL_MODULES_LIST),\
   $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
+
+# Same as above, but not assigning it to a variable allows it to be converted to statements
+$(foreach module,\
+  $(BOOT_KERNEL_MODULES_LIST),\
+  $(if $(filter $(module),foo.ko),,$(error module "$(module)" has an error!)))
 `,
 		expected: `load("//build/make/core:product_config.rbc", "rblf")
 
@@ -1324,6 +1329,10 @@
   g["BOOT_KERNEL_MODULES_LIST"] += ["bar.ko"]
   g["BOOT_KERNEL_MODULES_FILTER_2"] = ["%%/%s" % m for m in g["BOOT_KERNEL_MODULES_LIST"]]
   g["FOREACH_WITH_IF"] = [("" if rblf.filter(module, "foo.ko") else rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)) for module in g["BOOT_KERNEL_MODULES_LIST"]]
+  # Same as above, but not assigning it to a variable allows it to be converted to statements
+  for module in g["BOOT_KERNEL_MODULES_LIST"]:
+    if not rblf.filter(module, "foo.ko"):
+      rblf.mkerror("product.mk", "module \"%s\" has an error!" % module)
 `,
 	},
 	{
@@ -1474,6 +1483,34 @@
   
 `,
 	},
+	{
+		desc:   "Evals",
+		mkname: "product.mk",
+		in: `
+$(eval)
+$(eval MY_VAR := foo)
+$(eval # This is a test of eval functions)
+$(eval $(TOO_COMPLICATED) := bar)
+$(foreach x,$(MY_LIST_VAR), \
+  $(eval PRODUCT_COPY_FILES += foo/bar/$(x):$(TARGET_COPY_OUT_VENDOR)/etc/$(x)) \
+  $(if $(MY_OTHER_VAR),$(eval PRODUCT_COPY_FILES += $(MY_OTHER_VAR):foo/bar/$(x))) \
+)
+
+`,
+		expected: `load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  g["MY_VAR"] = "foo"
+  # This is a test of eval functions
+  rblf.mk2rbc_error("product.mk:5", "Eval expression too complex; only assignments and comments are supported")
+  for x in rblf.words(g.get("MY_LIST_VAR", "")):
+    rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+    cfg["PRODUCT_COPY_FILES"] += ("foo/bar/%s:%s/etc/%s" % (x, g.get("TARGET_COPY_OUT_VENDOR", ""), x)).split()
+    if g.get("MY_OTHER_VAR", ""):
+      cfg["PRODUCT_COPY_FILES"] += ("%s:foo/bar/%s" % (g.get("MY_OTHER_VAR", ""), x)).split()
+`,
+	},
 }
 
 var known_variables = []struct {
diff --git a/mk2rbc/node.go b/mk2rbc/node.go
index 9d5af91..c0c4c98 100644
--- a/mk2rbc/node.go
+++ b/mk2rbc/node.go
@@ -294,3 +294,28 @@
 		ssCase.emit(gctx)
 	}
 }
+
+type foreachNode struct {
+	varName string
+	list    starlarkExpr
+	actions []starlarkNode
+}
+
+func (f *foreachNode) emit(gctx *generationContext) {
+	gctx.newLine()
+	gctx.writef("for %s in ", f.varName)
+	f.list.emit(gctx)
+	gctx.write(":")
+	gctx.indentLevel++
+	hasStatements := false
+	for _, a := range f.actions {
+		if _, ok := a.(*commentNode); !ok {
+			hasStatements = true
+		}
+		a.emit(gctx)
+	}
+	if !hasStatements {
+		gctx.emitPass()
+	}
+	gctx.indentLevel--
+}
diff --git a/rust/config/allowed_list.go b/rust/config/allowed_list.go
index 30700dd..802e1da 100644
--- a/rust/config/allowed_list.go
+++ b/rust/config/allowed_list.go
@@ -28,6 +28,7 @@
 		"prebuilts/rust",
 		"system/core/debuggerd/rust",
 		"system/core/libstats/pull_rust",
+		"system/core/trusty/libtrusty-rs",
 		"system/extras/profcollectd",
 		"system/extras/simpleperf",
 		"system/hardware/interfaces/keystore2",