Merge changes from topic "am-258f4bedd45345c3b5a7f530457db827" into android15-tests-dev am: b2113b08ef

Original change: https://android-review.googlesource.com/c/platform/build/+/3364093

Change-Id: If0df0a48e1bccae93d48843438af6a83924491ca
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/Changes.md b/Changes.md
index fc15e60..9f2449c 100644
--- a/Changes.md
+++ b/Changes.md
@@ -43,14 +43,9 @@
 The path set when running builds now makes the `python` executable point to python 3,
 whereas on previous versions it pointed to python 2. If you still have python 2 scripts,
 you can change the shebang line to use `python2` explicitly. This only applies for
-scripts run directly from makefiles, or from soong genrules. This behavior can be
-temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment
-variable to `true`. It's only an environment variable and not a product config variable
-because product config sometimes calls python code.
+scripts run directly from makefiles, or from soong genrules.
 
-In addition, `python_*` soong modules no longer allow python 2. This can be temporarily
-overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration
-variable to `true`.
+In addition, `python_*` soong modules no longer allow python 2.
 
 Python 2 is slated for complete removal in V.
 
diff --git a/backported_fixes/Android.bp b/backported_fixes/Android.bp
new file mode 100644
index 0000000..a20f3fc
--- /dev/null
+++ b/backported_fixes/Android.bp
@@ -0,0 +1,115 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_android_media_reliability",
+}
+
+genrule {
+    name: "applied_backported_fixes",
+    tools: ["applied_backported_fixes_main"],
+    srcs: [":applied_backported_fix_binpbs"],
+    out: ["applied_backported_fixes.prop"],
+    cmd: "$(location applied_backported_fixes_main)" +
+        " -p $(location applied_backported_fixes.prop)" +
+        " $(in)",
+}
+
+java_library {
+    name: "backported_fixes_proto",
+    srcs: [
+        "backported_fixes.proto",
+    ],
+    host_supported: true,
+}
+
+java_library {
+    name: "backported_fixes_common",
+    srcs: ["src/java/com/android/build/backportedfixes/common/*.java"],
+    static_libs: [
+        "backported_fixes_proto",
+        "guava",
+    ],
+    host_supported: true,
+}
+
+java_test_host {
+    name: "backported_fixes_common_test",
+    srcs: ["tests/java/com/android/build/backportedfixes/common/*.java"],
+    static_libs: [
+        "backported_fixes_common",
+        "backported_fixes_proto",
+        "junit",
+        "truth",
+        "truth-liteproto-extension",
+        "truth-proto-extension",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    test_suites: ["general-tests"],
+}
+
+java_library {
+    name: "applied_backported_fixes_lib",
+    srcs: ["src/java/com/android/build/backportedfixes/*.java"],
+    static_libs: [
+        "backported_fixes_common",
+        "backported_fixes_proto",
+        "jcommander",
+        "guava",
+    ],
+    host_supported: true,
+}
+
+java_binary_host {
+    name: "applied_backported_fixes_main",
+    main_class: "com.android.build.backportedfixes.Main",
+    static_libs: [
+        "applied_backported_fixes_lib",
+    ],
+}
+
+java_test_host {
+    name: "applied_backported_fixes_test",
+    srcs: ["tests/java/com/android/build/backportedfixes/*.java"],
+    static_libs: [
+        "applied_backported_fixes_lib",
+        "backported_fixes_proto",
+        "junit",
+        "truth",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    test_suites: ["general-tests"],
+}
+
+gensrcs {
+    name: "applied_backported_fix_binpbs",
+    tools: ["aprotoc"],
+    srcs: [
+        "applied_fixes/*.txtpb",
+    ],
+    tool_files: [
+        "backported_fixes.proto",
+    ],
+    output_extension: "binpb",
+    cmd: "$(location aprotoc)  " +
+        " --encode=com.android.build.backportedfixes.BackportedFix" +
+        "  $(location backported_fixes.proto)" +
+        " < $(in)" +
+        " > $(out); echo $(out)",
+}
diff --git a/backported_fixes/OWNERS b/backported_fixes/OWNERS
new file mode 100644
index 0000000..ac176bf
--- /dev/null
+++ b/backported_fixes/OWNERS
@@ -0,0 +1,3 @@
+essick@google.com
+nchalko@google.com
+portmannc@google.com
diff --git a/backported_fixes/applied_fixes/ki350037023.txtpb b/backported_fixes/applied_fixes/ki350037023.txtpb
new file mode 100644
index 0000000..456a7ae
--- /dev/null
+++ b/backported_fixes/applied_fixes/ki350037023.txtpb
@@ -0,0 +1,19 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# proto-file: ../backported_fixes.proto
+# proto-message: BackportedFix
+
+known_issue: 350037023
+alias: 1
diff --git a/backported_fixes/backported_fixes.proto b/backported_fixes/backported_fixes.proto
new file mode 100644
index 0000000..91618ee
--- /dev/null
+++ b/backported_fixes/backported_fixes.proto
@@ -0,0 +1,37 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package com.android.build.backportedfixes;
+
+option java_multiple_files = true;
+
+// A list of backported fixes.
+message BackportedFixes {
+  repeated BackportedFix fixes = 1;
+}
+
+// A known issue approved for reporting Build.getBackportedFixStatus
+message BackportedFix {
+
+  // The issue id from the public bug tracker
+  // https://issuetracker.google.com/issues/{known_issue}
+  optional int64 known_issue = 1;
+  // The alias for the known issue.
+  // 1 - 1023 are valid aliases
+  // Must be unique across all backported fixes.
+  optional int32 alias = 2;
+}
+
diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/Main.java b/backported_fixes/src/java/com/android/build/backportedfixes/Main.java
new file mode 100644
index 0000000..79148cc
--- /dev/null
+++ b/backported_fixes/src/java/com/android/build/backportedfixes/Main.java
@@ -0,0 +1,79 @@
+
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import com.android.build.backportedfixes.common.ClosableCollection;
+import com.android.build.backportedfixes.common.Parser;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import com.beust.jcommander.converters.FileConverter;
+import com.google.common.io.Files;
+
+import java.io.File;
+import java.io.PrintWriter;
+import java.io.Writer;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public final class Main {
+    @Parameter(description = "BackportedFix proto binary files", converter = FileConverter.class,
+            required = true)
+    List<File> fixFiles;
+    @Parameter(description = "The file to write the property value to.",
+            names = {"--property_file", "-p"}, converter = FileConverter.class, required = true)
+    File propertyFile;
+
+    public static void main(String... argv) throws Exception {
+        Main main = new Main();
+        JCommander.newBuilder().addObject(main).build().parse(argv);
+        main.run();
+    }
+
+    Main() {
+    }
+
+    private void run() throws Exception {
+        try (var fixStreams = ClosableCollection.wrap(Parser.getFileInputStreams(fixFiles));
+             var out = Files.newWriter(propertyFile, UTF_8)) {
+            var fixes = Parser.parseBackportedFixes(fixStreams.getCollection());
+            writeFixesAsAliasBitSet(fixes, out);
+        }
+    }
+
+    static void writeFixesAsAliasBitSet(BackportedFixes fixes, Writer out) {
+        PrintWriter printWriter = new PrintWriter(out);
+        printWriter.println("# The following backported fixes have been applied");
+        for (var f : fixes.getFixesList()) {
+            printWriter.printf("# https://issuetracker.google.com/issues/%d with alias %d",
+                    f.getKnownIssue(), f.getAlias());
+            printWriter.println();
+        }
+        var bsArray = Parser.getBitSetArray(
+                fixes.getFixesList().stream().mapToInt(BackportedFix::getAlias).toArray());
+        String bsString = Arrays.stream(bsArray).mapToObj(Long::toString).collect(
+                Collectors.joining(","));
+        printWriter.printf("ro.build.backported_fixes.alias_bitset.long_list=%s", bsString);
+        printWriter.println();
+        if (printWriter.checkError()) {
+            throw new RuntimeException("There was an error writing to " + out.toString());
+        }
+    }
+}
diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/common/ClosableCollection.java b/backported_fixes/src/java/com/android/build/backportedfixes/common/ClosableCollection.java
new file mode 100644
index 0000000..75b6730
--- /dev/null
+++ b/backported_fixes/src/java/com/android/build/backportedfixes/common/ClosableCollection.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes.common;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/** An AutoCloseable holder for a collection of AutoCloseables. */
+public final class ClosableCollection<T extends AutoCloseable, C extends Collection<T>> implements
+        AutoCloseable {
+    C source;
+
+    /** Makes the collection AutoCloseable. */
+    public static <T extends AutoCloseable, C extends Collection<T>> ClosableCollection<T, C> wrap(
+            C source) {
+        return new ClosableCollection<>(source);
+    }
+
+    private ClosableCollection(C source) {
+        this.source = source;
+    }
+
+    /** Get the source collection. */
+    public C getCollection() {
+        return source;
+    }
+
+    /**
+     * Closes each item in the collection.
+     *
+     * @throws Exception if any close throws an an exception, a new exception is thrown with
+     *                   all the exceptions thrown closing the streams added as a suppressed
+     *                   exceptions.
+     */
+    @Override
+    public void close() throws Exception {
+        var failures = new ArrayList<Exception>();
+        for (T t : source) {
+            try {
+                t.close();
+            } catch (Exception e) {
+                failures.add(e);
+            }
+        }
+        if (!failures.isEmpty()) {
+            Exception e = new Exception(
+                    "%d of %d failed while closing".formatted(failures.size(), source.size()));
+            failures.forEach(e::addSuppressed);
+            throw e;
+        }
+    }
+}
diff --git a/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java b/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java
new file mode 100644
index 0000000..6b08b8f
--- /dev/null
+++ b/backported_fixes/src/java/com/android/build/backportedfixes/common/Parser.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes.common;
+
+import com.android.build.backportedfixes.BackportedFix;
+import com.android.build.backportedfixes.BackportedFixes;
+
+import com.google.common.collect.ImmutableList;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.BitSet;
+import java.util.List;
+
+
+/** Static utilities for working with {@link BackportedFixes}. */
+public final class Parser {
+
+    /** Creates list of FileInputStreams for a list of files. */
+    public static ImmutableList<FileInputStream> getFileInputStreams(List<File> fixFiles) throws
+            FileNotFoundException {
+        var streams = ImmutableList.<FileInputStream>builder();
+        for (var f : fixFiles) {
+            streams.add(new FileInputStream(f));
+        }
+        return streams.build();
+    }
+
+    /** Converts a list of backported fix aliases into a long array representing a {@link BitSet} */
+    public static long[] getBitSetArray(int[] aliases) {
+        BitSet bs = new BitSet();
+        for (int a : aliases) {
+            bs.set(a);
+        }
+        return bs.toLongArray();
+    }
+
+    /**
+     * Creates a {@link BackportedFixes} from a list of {@link BackportedFix} binary proto streams.
+     */
+    public static BackportedFixes parseBackportedFixes(List<? extends InputStream> fixStreams)
+            throws
+            IOException {
+        var fixes = BackportedFixes.newBuilder();
+        for (var s : fixStreams) {
+            BackportedFix fix = BackportedFix.parseFrom(s);
+            fixes.addFixes(fix);
+            s.close();
+        }
+        return fixes.build();
+    }
+
+    private Parser() {
+    }
+}
diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/MainTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/MainTest.java
new file mode 100644
index 0000000..84061e1
--- /dev/null
+++ b/backported_fixes/tests/java/com/android/build/backportedfixes/MainTest.java
@@ -0,0 +1,64 @@
+
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes;
+
+import com.google.common.truth.Truth;
+
+import org.junit.Test;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+/** Tests for {@link Main}. */
+public class MainTest {
+
+
+    @Test
+    public void writeFixesAsAliasBitSet_default() {
+        BackportedFixes fixes = BackportedFixes.newBuilder().build();
+        var result = new StringWriter();
+
+        Main.writeFixesAsAliasBitSet(fixes, new PrintWriter(result));
+
+        Truth.assertThat(result.toString())
+                .isEqualTo("""
+                        # The following backported fixes have been applied
+                        ro.build.backported_fixes.alias_bitset.long_list=
+                        """);
+    }
+
+    @Test
+    public void writeFixesAsAliasBitSet_some() {
+        BackportedFixes fixes = BackportedFixes.newBuilder()
+                .addFixes(BackportedFix.newBuilder().setKnownIssue(1234L).setAlias(1))
+                .addFixes(BackportedFix.newBuilder().setKnownIssue(3L).setAlias(65))
+                .addFixes(BackportedFix.newBuilder().setKnownIssue(4L).setAlias(67))
+                .build();
+        var result = new StringWriter();
+
+        Main.writeFixesAsAliasBitSet(fixes, new PrintWriter(result));
+
+        Truth.assertThat(result.toString())
+                .isEqualTo("""
+                        # The following backported fixes have been applied
+                        # https://issuetracker.google.com/issues/1234 with alias 1
+                        # https://issuetracker.google.com/issues/3 with alias 65
+                        # https://issuetracker.google.com/issues/4 with alias 67
+                        ro.build.backported_fixes.alias_bitset.long_list=2,10
+                        """);
+    }
+}
diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/common/CloseableCollectionTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/common/CloseableCollectionTest.java
new file mode 100644
index 0000000..d3d84a8
--- /dev/null
+++ b/backported_fixes/tests/java/com/android/build/backportedfixes/common/CloseableCollectionTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes.common;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.truth.Correspondence;
+import com.google.common.truth.Truth;
+
+import org.junit.Test;
+
+/** Tests for {@link ClosableCollection}. */
+public class CloseableCollectionTest {
+
+    private static class FakeCloseable implements AutoCloseable {
+        private final boolean throwOnClose;
+        private final String name;
+
+
+        private boolean isClosed = false;
+
+        private FakeCloseable(String name, boolean throwOnClose) {
+            this.name = name;
+            this.throwOnClose = throwOnClose;
+
+        }
+
+        private static FakeCloseable named(String name) {
+            return new FakeCloseable(name, false);
+        }
+
+        private static FakeCloseable failing(String name) {
+            return new FakeCloseable(name, true);
+        }
+
+        public boolean isClosed() {
+            return isClosed;
+        }
+
+        @Override
+        public void close() throws Exception {
+            if (throwOnClose) {
+                throw new Exception(name + " close failed");
+            }
+            isClosed = true;
+        }
+    }
+
+
+    @Test
+    public void bothClosed() throws Exception {
+        var c = ImmutableSet.of(FakeCloseable.named("foo"), FakeCloseable.named("bar"));
+        try (var cc = ClosableCollection.wrap(c);) {
+            Truth.assertThat(cc.getCollection()).isSameInstanceAs(c);
+        }
+        Truth.assertThat(c)
+                .comparingElementsUsing(
+                        Correspondence.transforming(FakeCloseable::isClosed, "is closed"))
+                .containsExactly(true, true);
+    }
+
+    @Test
+    public void bothFailed() {
+        var c = ImmutableSet.of(FakeCloseable.failing("foo"), FakeCloseable.failing("bar"));
+
+        try {
+            try (var cc = ClosableCollection.wrap(c);) {
+                Truth.assertThat(cc.getCollection()).isSameInstanceAs(c);
+            }
+        } catch (Exception e) {
+            Truth.assertThat(e).hasMessageThat().isEqualTo("2 of 2 failed while closing");
+            Truth.assertThat(e.getSuppressed())
+                    .asList()
+                    .comparingElementsUsing(
+                            Correspondence.transforming(Exception::getMessage, "has a message of "))
+                    .containsExactly("foo close failed", "bar close failed");
+        }
+    }
+}
diff --git a/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java b/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java
new file mode 100644
index 0000000..444e694
--- /dev/null
+++ b/backported_fixes/tests/java/com/android/build/backportedfixes/common/ParserTest.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.build.backportedfixes.common;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+
+import com.android.build.backportedfixes.BackportedFix;
+import com.android.build.backportedfixes.BackportedFixes;
+
+import com.google.common.collect.ImmutableList;
+
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+
+/** Tests for {@link Parser}.*/
+public class ParserTest {
+
+    @Test
+    public void getFileInputStreams() throws IOException {
+        var results = Parser.getFileInputStreams(
+                ImmutableList.of(Files.createTempFile("test", null).toFile()));
+        assertThat(results).isNotEmpty();
+    }
+
+
+    @Test
+    public void getBitSetArray_empty() {
+        var results = Parser.getBitSetArray(new int[]{});
+        assertThat(results).isEmpty();
+    }
+
+    @Test
+    public void getBitSetArray_2_3_64() {
+        var results = Parser.getBitSetArray(new int[]{2,3,64});
+        assertThat(results).asList().containsExactly(12L,1L).inOrder();
+    }
+
+    @Test
+    public void parseBackportedFixes_empty() throws IOException {
+        var result = Parser.parseBackportedFixes(ImmutableList.of());
+        assertThat(result).isEqualTo(BackportedFixes.getDefaultInstance());
+    }
+
+    @Test
+    public void parseBackportedFixes_oneBlank() throws IOException {
+        var result = Parser.parseBackportedFixes(
+                ImmutableList.of(inputStream(BackportedFix.getDefaultInstance())));
+
+        assertThat(result).isEqualTo(
+                BackportedFixes.newBuilder()
+                        .addFixes(BackportedFix.getDefaultInstance())
+                        .build());
+    }
+
+    @Test
+    public void parseBackportedFixes_two() throws IOException {
+        BackportedFix ki123 = BackportedFix.newBuilder()
+                .setKnownIssue(123)
+                .setAlias(1)
+                .build();
+        BackportedFix ki456 = BackportedFix.newBuilder()
+                .setKnownIssue(456)
+                .setAlias(2)
+                .build();
+        var result = Parser.parseBackportedFixes(
+                ImmutableList.of(inputStream(ki123), inputStream(ki456)));
+        assertThat(result).isEqualTo(
+                BackportedFixes.newBuilder()
+                        .addFixes(ki123)
+                        .addFixes(ki456)
+                        .build());
+    }
+
+    private static ByteArrayInputStream inputStream(BackportedFix f) {
+        return new ByteArrayInputStream(f.toByteArray());
+    }
+}
diff --git a/ci/Android.bp b/ci/Android.bp
index 104f517..3f28be4 100644
--- a/ci/Android.bp
+++ b/ci/Android.bp
@@ -25,7 +25,7 @@
         "build_test_suites_test.py",
     ],
     libs: [
-        "build_test_suites",
+        "build_test_suites_lib",
         "pyfakefs",
         "ci_test_lib",
     ],
@@ -56,7 +56,7 @@
         "build_test_suites_local_test.py",
     ],
     libs: [
-        "build_test_suites",
+        "build_test_suites_lib",
         "pyfakefs",
         "ci_test_lib",
     ],
@@ -71,11 +71,60 @@
     },
 }
 
-python_library_host {
+python_test_host {
+    name: "optimized_targets_test",
+    main: "optimized_targets_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "optimized_targets_test.py",
+    ],
+    libs: [
+        "build_test_suites_lib",
+        "pyfakefs",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":py3-cmd",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+python_binary_host {
     name: "build_test_suites",
     srcs: [
         "build_test_suites.py",
         "optimized_targets.py",
+        "test_mapping_module_retriever.py",
+        "build_context.py",
+        "test_discovery_agent.py",
+        "metrics_agent.py",
+        "buildbot.py",
+    ],
+    main: "build_test_suites.py",
+    libs: [
+        "soong-metrics-proto-py",
+    ],
+}
+
+python_library_host {
+    name: "build_test_suites_lib",
+    srcs: [
+        "build_test_suites.py",
+        "optimized_targets.py",
+        "test_mapping_module_retriever.py",
+        "build_context.py",
+        "test_discovery_agent.py",
+        "metrics_agent.py",
+        "buildbot.py",
+    ],
+    libs: [
+        "soong-metrics-proto-py",
     ],
 }
 
diff --git a/ci/build_context.py b/ci/build_context.py
new file mode 100644
index 0000000..c7a1def
--- /dev/null
+++ b/ci/build_context.py
@@ -0,0 +1,67 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Container class for build context with utility functions."""
+
+import re
+
+
+class BuildContext:
+
+  def __init__(self, build_context_dict: dict[str, any]):
+    self.enabled_build_features = set()
+    for opt in build_context_dict.get('enabledBuildFeatures', []):
+      self.enabled_build_features.add(opt.get('name'))
+    self.test_infos = set()
+    for test_info_dict in build_context_dict.get('testContext', dict()).get(
+        'testInfos', []
+    ):
+      self.test_infos.add(self.TestInfo(test_info_dict))
+
+  def build_target_used(self, target: str) -> bool:
+    return any(test.build_target_used(target) for test in self.test_infos)
+
+  class TestInfo:
+
+    _DOWNLOAD_OPTS = {
+        'test-config-only-zip',
+        'test-zip-file-filter',
+        'extra-host-shared-lib-zip',
+        'sandbox-tests-zips',
+        'additional-files-filter',
+        'cts-package-name',
+    }
+
+    def __init__(self, test_info_dict: dict[str, any]):
+      self.is_test_mapping = False
+      self.test_mapping_test_groups = set()
+      self.file_download_options = set()
+      self.name = test_info_dict.get('name')
+      self.command = test_info_dict.get('command')
+      self.extra_options = test_info_dict.get('extraOptions')
+      for opt in test_info_dict.get('extraOptions', []):
+        key = opt.get('key')
+        if key == 'test-mapping-test-group':
+          self.is_test_mapping = True
+          self.test_mapping_test_groups.update(opt.get('values', set()))
+
+        if key in self._DOWNLOAD_OPTS:
+          self.file_download_options.update(opt.get('values', set()))
+
+    def build_target_used(self, target: str) -> bool:
+      # For all of a targets' outputs, check if any of the regexes used by tests
+      # to download artifacts would match it. If any of them do then this target
+      # is necessary.
+      regex = r'\b(%s)\b' % re.escape(target)
+      return any(re.search(regex, opt) for opt in self.file_download_options)
diff --git a/ci/build_device_and_tests b/ci/build_device_and_tests
new file mode 100755
index 0000000..9d11268
--- /dev/null
+++ b/ci/build_device_and_tests
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+#
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build/soong/soong_ui.bash --make-mode build_test_suites || exit $?
+$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@ || exit $?
diff --git a/ci/build_metadata b/ci/build_metadata
new file mode 100755
index 0000000..cd011c8
--- /dev/null
+++ b/ci/build_metadata
@@ -0,0 +1,28 @@
+#/bin/bash
+
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+export TARGET_PRODUCT=aosp_arm64
+export TARGET_RELEASE=trunk_staging
+export TARGET_BUILD_VARIANT=eng
+
+TARGETS=(
+    all_teams
+    release_config_metadata
+)
+
+build/soong/bin/m dist ${TARGETS[@]}
diff --git a/ci/build_test_suites b/ci/build_test_suites
index 5aaf2f4..9d11268 100755
--- a/ci/build_test_suites
+++ b/ci/build_test_suites
@@ -1,4 +1,4 @@
-#!prebuilts/build-tools/linux-x86/bin/py3-cmd -B
+#!/usr/bin/env bash
 #
 # Copyright 2024, The Android Open Source Project
 #
@@ -14,7 +14,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import build_test_suites
-import sys
-
-build_test_suites.main(sys.argv[1:])
+build/soong/soong_ui.bash --make-mode build_test_suites || exit $?
+$(build/soong/soong_ui.bash --dumpvar-mode HOST_OUT)/bin/build_test_suites $@ || exit $?
diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py
index 6e1f88c..4d3b546 100644
--- a/ci/build_test_suites.py
+++ b/ci/build_test_suites.py
@@ -20,14 +20,19 @@
 import logging
 import os
 import pathlib
+import re
 import subprocess
 import sys
 from typing import Callable
+from build_context import BuildContext
 import optimized_targets
+import metrics_agent
+import test_discovery_agent
 
 
 REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP'])
 SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash'
+LOG_PATH = 'logs/build_test_suites.log'
 
 
 class Error(Exception):
@@ -53,7 +58,7 @@
 
   def __init__(
       self,
-      build_context: dict[str, any],
+      build_context: BuildContext,
       args: argparse.Namespace,
       target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget],
   ):
@@ -63,12 +68,36 @@
 
   def create_build_plan(self):
 
-    if 'optimized_build' not in self.build_context['enabled_build_features']:
+    if 'optimized_build' not in self.build_context.enabled_build_features:
       return BuildPlan(set(self.args.extra_targets), set())
 
     build_targets = set()
-    packaging_functions = set()
+    packaging_commands_getters = []
+    test_discovery_zip_regexes = set()
+    optimization_rationale = ''
+    try:
+      # Do not use these regexes for now, only run this to collect data on what
+      # would be optimized.
+      test_discovery_zip_regexes = self._get_test_discovery_zip_regexes()
+      logging.info(f'Discovered test discovery regexes: {test_discovery_zip_regexes}')
+    except test_discovery_agent.TestDiscoveryError as e:
+      optimization_rationale = e.message
+      logging.warning(f'Unable to perform test discovery: {optimization_rationale}')
     for target in self.args.extra_targets:
+      if optimization_rationale:
+        get_metrics_agent().report_unoptimized_target(target, optimization_rationale)
+      else:
+        regex = r'\b(%s)\b' % re.escape(target)
+        if any(re.search(regex, opt) for opt in test_discovery_zip_regexes):
+          get_metrics_agent().report_unoptimized_target(target, 'Test artifact used.')
+        else:
+          get_metrics_agent().report_optimized_target(target)
+
+      if self._unused_target_exclusion_enabled(
+          target
+      ) and not self.build_context.build_target_used(target):
+        continue
+
       target_optimizer_getter = self.target_optimizations.get(target, None)
       if not target_optimizer_getter:
         build_targets.add(target)
@@ -78,15 +107,51 @@
           target, self.build_context, self.args
       )
       build_targets.update(target_optimizer.get_build_targets())
-      packaging_functions.add(target_optimizer.package_outputs)
+      packaging_commands_getters.append(
+          target_optimizer.get_package_outputs_commands
+      )
 
-    return BuildPlan(build_targets, packaging_functions)
+    return BuildPlan(build_targets, packaging_commands_getters)
 
+  def _unused_target_exclusion_enabled(self, target: str) -> bool:
+    return (
+        f'{target}_unused_exclusion'
+        in self.build_context.enabled_build_features
+    )
+
+  def _get_test_discovery_zip_regexes(self) -> set[str]:
+    build_target_regexes = set()
+    for test_info in self.build_context.test_infos:
+      tf_command = self._build_tf_command(test_info)
+      discovery_agent = test_discovery_agent.TestDiscoveryAgent(tradefed_args=tf_command)
+      for regex in discovery_agent.discover_test_zip_regexes():
+        build_target_regexes.add(regex)
+    return build_target_regexes
+
+
+  def _build_tf_command(self, test_info) -> list[str]:
+    command = [test_info.command]
+    for extra_option in test_info.extra_options:
+      if not extra_option.get('key'):
+        continue
+      arg_key = '--' + extra_option.get('key')
+      if arg_key == '--build-id':
+        command.append(arg_key)
+        command.append(os.environ.get('BUILD_NUMBER'))
+        continue
+      if extra_option.get('values'):
+        for value in extra_option.get('values'):
+          command.append(arg_key)
+          command.append(value)
+      else:
+        command.append(arg_key)
+
+    return command
 
 @dataclass(frozen=True)
 class BuildPlan:
   build_targets: set[str]
-  packaging_functions: set[Callable[..., None]]
+  packaging_commands_getters: list[Callable[[], list[list[str]]]]
 
 
 def build_test_suites(argv: list[str]) -> int:
@@ -98,19 +163,27 @@
   Returns:
     The exit code of the build.
   """
-  args = parse_args(argv)
-  check_required_env()
-  build_context = load_build_context()
-  build_planner = BuildPlanner(
-      build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
-  )
-  build_plan = build_planner.create_build_plan()
+  get_metrics_agent().analysis_start()
+  try:
+    args = parse_args(argv)
+    check_required_env()
+    build_context = BuildContext(load_build_context())
+    build_planner = BuildPlanner(
+        build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS
+    )
+    build_plan = build_planner.create_build_plan()
+  except:
+    raise
+  finally:
+    get_metrics_agent().analysis_end()
 
   try:
     execute_build_plan(build_plan)
   except BuildFailureError as e:
     logging.error('Build command failed! Check build_log for details.')
     return e.return_code
+  finally:
+    get_metrics_agent().end_reporting()
 
   return 0
 
@@ -154,7 +227,7 @@
 
 
 def empty_build_context():
-  return {'enabled_build_features': []}
+  return {'enabledBuildFeatures': []}
 
 
 def execute_build_plan(build_plan: BuildPlan):
@@ -168,8 +241,15 @@
   except subprocess.CalledProcessError as e:
     raise BuildFailureError(e.returncode) from e
 
-  for packaging_function in build_plan.packaging_functions:
-    packaging_function()
+  get_metrics_agent().packaging_start()
+  try:
+    for packaging_commands_getter in build_plan.packaging_commands_getters:
+      for packaging_command in packaging_commands_getter():
+        run_command(packaging_command)
+  except subprocess.CalledProcessError as e:
+    raise BuildFailureError(e.returncode) from e
+  finally:
+    get_metrics_agent().packaging_end()
 
 
 def get_top() -> pathlib.Path:
@@ -180,5 +260,21 @@
   subprocess.run(args=args, check=True, stdout=stdout)
 
 
+def get_metrics_agent():
+  return metrics_agent.MetricsAgent.instance()
+
+
 def main(argv):
+  dist_dir = os.environ.get('DIST_DIR')
+  if dist_dir:
+    log_file = pathlib.Path(dist_dir) / LOG_PATH
+    logging.basicConfig(
+        level=logging.DEBUG,
+        format='%(asctime)s %(levelname)s %(message)s',
+        filename=log_file,
+    )
   sys.exit(build_test_suites(argv))
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py
index a9ff3fb..26f4316 100644
--- a/ci/build_test_suites_test.py
+++ b/ci/build_test_suites_test.py
@@ -15,6 +15,7 @@
 """Tests for build_test_suites.py"""
 
 import argparse
+import functools
 from importlib import resources
 import json
 import multiprocessing
@@ -31,10 +32,13 @@
 from typing import Callable
 import unittest
 from unittest import mock
+from build_context import BuildContext
 import build_test_suites
 import ci_test_lib
 import optimized_targets
 from pyfakefs import fake_filesystem_unittest
+import metrics_agent
+import test_discovery_agent
 
 
 class BuildTestSuitesTest(fake_filesystem_unittest.TestCase):
@@ -50,6 +54,10 @@
     self.addCleanup(subprocess_run_patcher.stop)
     self.mock_subprocess_run = subprocess_run_patcher.start()
 
+    metrics_agent_finalize_patcher = mock.patch('metrics_agent.MetricsAgent.end_reporting')
+    self.addCleanup(metrics_agent_finalize_patcher.stop)
+    self.mock_metrics_agent_end = metrics_agent_finalize_patcher.start()
+
     self._setup_working_build_env()
 
   def test_missing_target_release_env_var_raises(self):
@@ -238,14 +246,27 @@
 
   class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget):
 
-    def __init__(self, output_targets):
+    def __init__(
+        self, target, build_context, args, output_targets, packaging_commands
+    ):
+      super().__init__(target, build_context, args)
       self.output_targets = output_targets
+      self.packaging_commands = packaging_commands
 
-    def get_build_targets(self):
+    def get_build_targets_impl(self):
       return self.output_targets
 
-    def package_outputs(self):
-      return f'packaging {" ".join(self.output_targets)}'
+    def get_package_outputs_commands_impl(self):
+      return self.packaging_commands
+
+    def get_enabled_flag(self):
+      return f'{self.target}_enabled'
+
+  def setUp(self):
+    test_discovery_agent_patcher = mock.patch('test_discovery_agent.TestDiscoveryAgent.discover_test_zip_regexes')
+    self.addCleanup(test_discovery_agent_patcher.stop)
+    self.mock_test_discovery_agent_end = test_discovery_agent_patcher.start()
+
 
   def test_build_optimization_off_builds_everything(self):
     build_targets = {'target_1', 'target_2'}
@@ -267,14 +288,15 @@
 
     build_plan = build_planner.create_build_plan()
 
-    self.assertEqual(len(build_plan.packaging_functions), 0)
+    for packaging_command in self.run_packaging_commands(build_plan):
+      self.assertEqual(len(packaging_command), 0)
 
   def test_build_optimization_on_optimizes_target(self):
     build_targets = {'target_1', 'target_2'}
     build_planner = self.create_build_planner(
         build_targets=build_targets,
         build_context=self.create_build_context(
-            enabled_build_features={self.get_target_flag('target_1')}
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
         ),
     )
 
@@ -285,20 +307,19 @@
 
   def test_build_optimization_on_packages_target(self):
     build_targets = {'target_1', 'target_2'}
+    optimized_target_name = self.get_optimized_target_name('target_1')
+    packaging_commands = [[f'packaging {optimized_target_name}']]
     build_planner = self.create_build_planner(
         build_targets=build_targets,
         build_context=self.create_build_context(
-            enabled_build_features={self.get_target_flag('target_1')}
+            enabled_build_features=[{'name': self.get_target_flag('target_1')}]
         ),
+        packaging_commands=packaging_commands,
     )
 
     build_plan = build_planner.create_build_plan()
 
-    optimized_target_name = self.get_optimized_target_name('target_1')
-    self.assertIn(
-        f'packaging {optimized_target_name}',
-        self.run_packaging_functions(build_plan),
-    )
+    self.assertIn(packaging_commands, self.run_packaging_commands(build_plan))
 
   def test_individual_build_optimization_off_doesnt_optimize(self):
     build_targets = {'target_1', 'target_2'}
@@ -312,26 +333,94 @@
 
   def test_individual_build_optimization_off_doesnt_package(self):
     build_targets = {'target_1', 'target_2'}
+    packaging_commands = [['packaging command']]
     build_planner = self.create_build_planner(
         build_targets=build_targets,
+        packaging_commands=packaging_commands,
     )
 
     build_plan = build_planner.create_build_plan()
 
-    expected_packaging_function_outputs = {None, None}
-    self.assertSetEqual(
-        expected_packaging_function_outputs,
-        self.run_packaging_functions(build_plan),
+    for packaging_command in self.run_packaging_commands(build_plan):
+      self.assertEqual(len(packaging_command), 0)
+
+  def test_target_output_used_target_built(self):
+    build_target = 'test_target'
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=self.get_test_context(build_target),
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
     )
 
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, {build_target})
+
+  def test_target_regex_used_target_built(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': [f'.*{build_target}.*\.zip'],
+    }]
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, {build_target})
+
+  def test_target_output_not_used_target_not_built(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = []
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, set())
+
+  def test_target_regex_matching_not_too_broad(self):
+    build_target = 'test_target'
+    test_context = self.get_test_context(build_target)
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': [f'.*a{build_target}.*\.zip'],
+    }]
+    build_planner = self.create_build_planner(
+        build_targets={build_target},
+        build_context=self.create_build_context(
+            test_context=test_context,
+            enabled_build_features=[{'name': 'test_target_unused_exclusion'}],
+        ),
+    )
+
+    build_plan = build_planner.create_build_plan()
+
+    self.assertSetEqual(build_plan.build_targets, set())
+
   def create_build_planner(
       self,
       build_targets: set[str],
-      build_context: dict[str, any] = None,
+      build_context: BuildContext = None,
       args: argparse.Namespace = None,
       target_optimizations: dict[
           str, optimized_targets.OptimizedBuildTarget
       ] = None,
+      packaging_commands: list[list[str]] = [],
   ) -> build_test_suites.BuildPlanner:
     if not build_context:
       build_context = self.create_build_context()
@@ -339,7 +428,9 @@
       args = self.create_args(extra_build_targets=build_targets)
     if not target_optimizations:
       target_optimizations = self.create_target_optimizations(
-          build_context, build_targets
+          build_context,
+          build_targets,
+          packaging_commands,
       )
     return build_test_suites.BuildPlanner(
         build_context, args, target_optimizations
@@ -348,15 +439,17 @@
   def create_build_context(
       self,
       optimized_build_enabled: bool = True,
-      enabled_build_features: set[str] = set(),
+      enabled_build_features: list[dict[str, str]] = [],
       test_context: dict[str, any] = {},
-  ) -> dict[str, any]:
-    build_context = {}
-    build_context['enabled_build_features'] = enabled_build_features
+  ) -> BuildContext:
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = enabled_build_features
     if optimized_build_enabled:
-      build_context['enabled_build_features'].add('optimized_build')
-    build_context['test_context'] = test_context
-    return build_context
+      build_context_dict['enabledBuildFeatures'].append(
+          {'name': 'optimized_build'}
+      )
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
 
   def create_args(
       self, extra_build_targets: set[str] = set()
@@ -366,19 +459,17 @@
     return parser.parse_args(extra_build_targets)
 
   def create_target_optimizations(
-      self, build_context: dict[str, any], build_targets: set[str]
+      self,
+      build_context: BuildContext,
+      build_targets: set[str],
+      packaging_commands: list[list[str]] = [],
   ):
     target_optimizations = dict()
     for target in build_targets:
-      target_optimizations[target] = (
-          lambda target, build_context, args: optimized_targets.get_target_optimizer(
-              target,
-              self.get_target_flag(target),
-              build_context,
-              self.TestOptimizedBuildTarget(
-                  {self.get_optimized_target_name(target)}
-              ),
-          )
+      target_optimizations[target] = functools.partial(
+          self.TestOptimizedBuildTarget,
+          output_targets={self.get_optimized_target_name(target)},
+          packaging_commands=packaging_commands,
       )
 
     return target_optimizations
@@ -389,14 +480,30 @@
   def get_optimized_target_name(self, target: str):
     return f'{target}_optimized'
 
-  def run_packaging_functions(
-      self, build_plan: build_test_suites.BuildPlan
-  ) -> set[str]:
-    output = set()
-    for packaging_function in build_plan.packaging_functions:
-      output.add(packaging_function())
+  def get_test_context(self, target: str):
+    return {
+        'testInfos': [
+            {
+                'name': 'atp_test',
+                'target': 'test_target',
+                'branch': 'branch',
+                'extraOptions': [{
+                    'key': 'additional-files-filter',
+                    'values': [f'{target}.zip'],
+                }],
+                'command': '/tf/command',
+                'extraBuildTargets': [
+                    'extra_build_target',
+                ],
+            },
+        ],
+    }
 
-    return output
+  def run_packaging_commands(self, build_plan: build_test_suites.BuildPlan):
+    return [
+        packaging_command_getter()
+        for packaging_command_getter in build_plan.packaging_commands_getters
+    ]
 
 
 def wait_until(
diff --git a/ci/buildbot.py b/ci/buildbot.py
new file mode 100644
index 0000000..97097be
--- /dev/null
+++ b/ci/buildbot.py
@@ -0,0 +1,43 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for interacting with buildbot, with a simulation in a local environment"""
+
+import os
+import sys
+
+# Check that the script is running from the root of the tree. Prevents subtle
+# errors later, and CI always runs from the root of the tree.
+if not os.path.exists("build/make/ci/buildbot.py"):
+    raise Exception("CI script must be run from the root of the tree instead of: "
+                    + os.getcwd())
+
+# Check that we are using the hermetic interpreter
+if "prebuilts/build-tools/" not in sys.executable:
+    raise Exception("CI script must be run using the hermetic interpreter from "
+                    + "prebuilts/build-tools instead of: " + sys.executable)
+
+
+def OutDir():
+    "Get the out directory. Will create it if needed."
+    result = os.environ.get("OUT_DIR", "out")
+    os.makedirs(result, exist_ok=True)
+    return result
+
+def DistDir():
+    "Get the dist directory. Will create it if needed."
+    result = os.environ.get("DIST_DIR", os.path.join(OutDir(), "dist"))
+    os.makedirs(result, exist_ok=True)
+    return result
+
diff --git a/ci/dump_product_config b/ci/dump_product_config
new file mode 100755
index 0000000..77b51dd
--- /dev/null
+++ b/ci/dump_product_config
@@ -0,0 +1,353 @@
+#!prebuilts/build-tools/linux-x86/bin/py3-cmd -B
+
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Script to collect all of the make variables from all product config combos.
+
+This script must be run from the root of the source tree.
+
+See GetArgs() below or run dump_product_config for more information.
+"""
+
+import argparse
+import asyncio
+import contextlib
+import csv
+import dataclasses
+import json
+import multiprocessing
+import os
+import subprocess
+import sys
+import time
+from typing import List, Dict, Tuple, Optional
+
+import buildbot
+
+# We have some BIG variables
+csv.field_size_limit(sys.maxsize)
+
+
+class DataclassJSONEncoder(json.JSONEncoder):
+    """JSONEncoder for our custom types."""
+    def default(self, o):
+        if dataclasses.is_dataclass(o):
+            return dataclasses.asdict(o)
+        return super().default(o)
+
+
+def GetProducts():
+    """Get the all of the available TARGET_PRODUCT values."""
+    try:
+        stdout = subprocess.check_output(["build/soong/bin/list_products"], text=True)
+    except subprocess.CalledProcessError:
+        sys.exit(1)
+    return [s.strip() for s in stdout.splitlines() if s.strip()]
+
+
+def GetReleases(product):
+    """For a given product, get the release configs available to it."""
+    if True:
+        # Hard code the list
+        mainline_products = [
+            "module_arm",
+            "module_x86",
+            "module_arm64",
+            "module_riscv64",
+            "module_x86_64",
+            "module_arm64only",
+            "module_x86_64only",
+        ]
+        if product in mainline_products:
+            return ["trunk_staging", "trunk", "mainline"]
+        else:
+            return ["trunk_staging", "trunk", "next"]
+    else:
+        # Get it from the build system
+        try:
+            stdout = subprocess.check_output(["build/soong/bin/list_releases", product], text=True)
+        except subprocess.CalledProcessError:
+            sys.exit(1)
+        return [s.strip() for s in stdout.splitlines() if s.strip()]
+
+
+def GenerateAllLunchTargets():
+    """Generate the full list of lunch targets."""
+    for product in GetProducts():
+        for release in GetReleases(product):
+            for variant in ["user", "userdebug", "eng"]:
+                yield (product, release, variant)
+
+
+async def ParallelExec(parallelism, tasks):
+    '''
+    ParallelExec takes a parallelism number, and an iterator of tasks to run.
+    Then it will run all the tasks, but a maximum of parallelism will be run at
+    any given time. The tasks must be async functions that accept one argument,
+    which will be an integer id of the worker that they're running on.
+    '''
+    tasks = iter(tasks)
+
+    overall_start = time.monotonic()
+    # lists so they can be modified from the inner function
+    total_duration = [0]
+    count = [0]
+    async def dispatch(worker):
+        while True:
+            try:
+                task = next(tasks)
+                item_start = time.monotonic()
+                await task(worker)
+                now = time.monotonic()
+                item_duration = now - item_start
+                count[0] += 1
+                total_duration[0] += item_duration
+                sys.stderr.write(f"Timing: Items processed: {count[0]}, Wall time: {now-overall_start:0.1f} sec, Throughput: {(now-overall_start)/count[0]:0.3f} sec per item, Average duration: {total_duration[0]/count[0]:0.1f} sec\n")
+            except StopIteration:
+                return
+
+    await asyncio.gather(*[dispatch(worker) for worker in range(parallelism)])
+
+
+async def DumpProductConfigs(out, generator, out_dir):
+    """Collects all of the product config data and store it in file."""
+    # Write the outer json list by hand so we can stream it
+    out.write("[")
+    try:
+        first_result = [True] # a list so it can be modified from the inner function
+        def run(lunch):
+            async def curried(worker):
+                sys.stderr.write(f"running: {'-'.join(lunch)}\n")
+                result = await DumpOneProductConfig(lunch, os.path.join(out_dir, f"lunchable_{worker}"))
+                if first_result[0]:
+                    out.write("\n")
+                    first_result[0] = False
+                else:
+                    out.write(",\n")
+                result.dumpToFile(out)
+                sys.stderr.write(f"finished: {'-'.join(lunch)}\n")
+            return curried
+
+        await ParallelExec(multiprocessing.cpu_count(), (run(lunch) for lunch in generator))
+    finally:
+        # Close the json regardless of how we exit
+        out.write("\n]\n")
+
+
+@dataclasses.dataclass(frozen=True)
+class Variable:
+    """A variable name, value and where it was set."""
+    name: str
+    value: str
+    location: str
+
+
+@dataclasses.dataclass(frozen=True)
+class ProductResult:
+    product: str
+    release: str
+    variant: str
+    board_includes: List[str]
+    product_includes: Dict[str, List[str]]
+    product_graph: List[Tuple[str, str]]
+    board_vars: List[Variable]
+    product_vars: List[Variable]
+
+    def dumpToFile(self, f):
+        json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder)
+
+
+@dataclasses.dataclass(frozen=True)
+class ProductError:
+    product: str
+    release: str
+    variant: str
+    error: str
+
+    def dumpToFile(self, f):
+        json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder)
+
+
+def NormalizeInheritGraph(lists):
+    """Flatten the inheritance graph to a simple list for easier querying."""
+    result = set()
+    for item in lists:
+        for i in range(len(item)):
+            result.add((item[i+1] if i < len(item)-1 else "", item[i]))
+    return sorted(list(result))
+
+
+def ParseDump(lunch, filename) -> ProductResult:
+    """Parses the csv and returns a tuple of the data."""
+    def diff(initial, final):
+        return [after for after in final.values() if
+                initial.get(after.name, Variable(after.name, "", "<unset>")).value != after.value]
+    product_initial = {}
+    product_final = {}
+    board_initial = {}
+    board_final = {}
+    inherit_product = [] # The stack of inherit-product calls
+    product_includes = {} # Other files included by each of the properly imported files
+    board_includes = [] # Files included by boardconfig
+    with open(filename) as f:
+        phase = ""
+        for line in csv.reader(f):
+            if line[0] == "phase":
+                phase = line[1]
+            elif line[0] == "val":
+                # TOOD: We should skip these somewhere else.
+                if line[3].startswith("_ALL_RELEASE_FLAGS"):
+                    continue
+                if line[3].startswith("PRODUCTS."):
+                    continue
+                if phase == "PRODUCTS":
+                    if line[2] == "initial":
+                        product_initial[line[3]] = Variable(line[3], line[4], line[5])
+                if phase == "PRODUCT-EXPAND":
+                    if line[2] == "final":
+                        product_final[line[3]] = Variable(line[3], line[4], line[5])
+                if phase == "BOARD":
+                    if line[2] == "initial":
+                        board_initial[line[3]] = Variable(line[3], line[4], line[5])
+                    if line[2] == "final":
+                        board_final[line[3]] = Variable(line[3], line[4], line[5])
+            elif line[0] == "imported":
+                imports = [s.strip() for s in line[1].split()]
+                if imports:
+                    inherit_product.append(imports)
+                    inc = [s.strip() for s in line[2].split()]
+                    for f in inc:
+                        product_includes.setdefault(imports[0], []).append(f)
+            elif line[0] == "board_config_files":
+                board_includes += [s.strip() for s in line[1].split()]
+    return ProductResult(
+        product = lunch[0],
+        release = lunch[1],
+        variant = lunch[2],
+        product_vars = diff(product_initial, product_final),
+        board_vars = diff(board_initial, board_final),
+        product_graph = NormalizeInheritGraph(inherit_product),
+        product_includes = product_includes,
+        board_includes = board_includes
+    )
+
+
+async def DumpOneProductConfig(lunch, out_dir) -> ProductResult | ProductError:
+    """Print a single config's lunch info to stdout."""
+    product, release, variant = lunch
+
+    dumpconfig_file = os.path.join(out_dir, f"{product}-{release}-{variant}.csv")
+
+    # Run get_build_var to bootstrap soong_ui for this target
+    env = dict(os.environ)
+    env["TARGET_PRODUCT"] = product
+    env["TARGET_RELEASE"] = release
+    env["TARGET_BUILD_VARIANT"] = variant
+    env["OUT_DIR"] = out_dir
+    process = await asyncio.create_subprocess_exec(
+        "build/soong/bin/get_build_var",
+        "TARGET_PRODUCT",
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+        env=env
+    )
+    stdout, _ = await process.communicate()
+    stdout = stdout.decode()
+
+    if process.returncode != 0:
+        return ProductError(
+            product = product,
+            release = release,
+            variant = variant,
+            error = stdout
+        )
+    else:
+        # Run kati to extract the data
+        process = await asyncio.create_subprocess_exec(
+            "prebuilts/build-tools/linux-x86/bin/ckati",
+            "-f",
+            "build/make/core/dumpconfig.mk",
+            f"TARGET_PRODUCT={product}",
+            f"TARGET_RELEASE={release}",
+            f"TARGET_BUILD_VARIANT={variant}",
+            f"DUMPCONFIG_FILE={dumpconfig_file}",
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+            env=env
+        )
+        stdout, _ = await process.communicate()
+        if process.returncode != 0:
+            stdout = stdout.decode()
+            return ProductError(
+                product = product,
+                release = release,
+                variant = variant,
+                error = stdout
+            )
+        else:
+            # Parse and record the output
+            return ParseDump(lunch, dumpconfig_file)
+
+
+def GetArgs():
+    """Parse command line arguments."""
+    parser = argparse.ArgumentParser(
+            description="Collect all of the make variables from product config.",
+            epilog="NOTE: This script must be run from the root of the source tree.")
+    parser.add_argument("--lunch", nargs="*")
+    parser.add_argument("--dist", action="store_true")
+
+    return parser.parse_args()
+
+
+async def main():
+    args = GetArgs()
+
+    out_dir = buildbot.OutDir()
+
+    if args.dist:
+        cm = open(os.path.join(buildbot.DistDir(), "all_product_config.json"), "w")
+    else:
+        cm = contextlib.nullcontext(sys.stdout)
+
+
+    with cm as out:
+        if args.lunch:
+            lunches = [lunch.split("-") for lunch in args.lunch]
+            fail = False
+            for i in range(len(lunches)):
+                if len(lunches[i]) != 3:
+                    sys.stderr.write(f"Malformed lunch targets: {args.lunch[i]}\n")
+                    fail = True
+            if fail:
+                sys.exit(1)
+            if len(lunches) == 1:
+                result = await DumpOneProductConfig(lunches[0], out_dir)
+                result.dumpToFile(out)
+                out.write("\n")
+            else:
+                await DumpProductConfigs(out, lunches, out_dir)
+        else:
+            # All configs mode. This will exec single config mode in parallel
+            # for each lunch combo. Write output to $DIST_DIR.
+            await DumpProductConfigs(out, GenerateAllLunchTargets(), out_dir)
+
+
+if __name__ == "__main__":
+    asyncio.run(main())
+
+
+# vim: set syntax=python ts=4 sw=4 sts=4:
+
diff --git a/ci/metrics_agent.py b/ci/metrics_agent.py
new file mode 100644
index 0000000..bc2479e
--- /dev/null
+++ b/ci/metrics_agent.py
@@ -0,0 +1,116 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""MetricsAgent is a singleton class that collects metrics for optimized build."""
+
+from enum import Enum
+import time
+import metrics_pb2
+import os
+import logging
+
+
+class MetricsAgent:
+  _SOONG_METRICS_PATH = 'logs/soong_metrics'
+  _DIST_DIR = 'DIST_DIR'
+  _instance = None
+
+  def __init__(self):
+    raise RuntimeError(
+        'MetricsAgent cannot be instantialized, use instance() instead'
+    )
+
+  @classmethod
+  def instance(cls):
+    if not cls._instance:
+      cls._instance = cls.__new__(cls)
+      cls._instance._proto = metrics_pb2.OptimizedBuildMetrics()
+      cls._instance._init_proto()
+      cls._instance._target_results = dict()
+
+    return cls._instance
+
+  def _init_proto(self):
+    self._proto.analysis_perf.name = 'Optimized build analysis time.'
+    self._proto.packaging_perf.name = 'Optimized build total packaging time.'
+
+  def analysis_start(self):
+    self._proto.analysis_perf.start_time = time.time_ns()
+
+  def analysis_end(self):
+    self._proto.analysis_perf.real_time = (
+        time.time_ns() - self._proto.analysis_perf.start_time
+    )
+
+  def packaging_start(self):
+    self._proto.packaging_perf.start_time = time.time_ns()
+
+  def packaging_end(self):
+    self._proto.packaging_perf.real_time = (
+        time.time_ns() - self._proto.packaging_perf.start_time
+    )
+
+  def report_optimized_target(self, name: str):
+    target_result = metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult()
+    target_result.name = name
+    target_result.optimized = True
+    self._target_results[name] = target_result
+
+  def report_unoptimized_target(self, name: str, optimization_rationale: str):
+    target_result = metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult()
+    target_result.name = name
+    target_result.optimization_rationale = optimization_rationale
+    target_result.optimized = False
+    self._target_results[name] = target_result
+
+  def target_packaging_start(self, name: str):
+    target_result = self._target_results.get(name)
+    target_result.packaging_perf.start_time = time.time_ns()
+    self._target_results[name] = target_result
+
+  def target_packaging_end(self, name: str):
+    target_result = self._target_results.get(name)
+    target_result.packaging_perf.real_time = (
+        time.time_ns() - target_result.packaging_perf.start_time
+    )
+
+  def add_target_artifact(
+      self,
+      target_name: str,
+      artifact_name: str,
+      size: int,
+      included_modules: set[str],
+  ):
+    target_result = self.target_results.get(target_name)
+    artifact = (
+        metrics_pb2.OptimizedBuildMetrics.TargetOptimizationResult.OutputArtifact()
+    )
+    artifact.name = artifact_name
+    artifact.size = size
+    for module in included_modules:
+      artifact.included_modules.add(module)
+    target_result.output_artifacts.add(artifact)
+
+  def end_reporting(self):
+    for target_result in self._target_results.values():
+      self._proto.target_result.append(target_result)
+    soong_metrics_proto = metrics_pb2.MetricsBase()
+    # Read in existing metrics that should have been written out by the soong
+    # build command so that we don't overwrite them.
+    with open(os.path.join(os.environ[self._DIST_DIR], self._SOONG_METRICS_PATH), 'rb') as f:
+      soong_metrics_proto.ParseFromString(f.read())
+    soong_metrics_proto.optimized_build_metrics.CopyFrom(self._proto)
+    logging.info(soong_metrics_proto)
+    with open(os.path.join(os.environ[self._DIST_DIR], self._SOONG_METRICS_PATH), 'wb') as f:
+      f.write(soong_metrics_proto.SerializeToString())
diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py
index 224c8c0..688bdd8 100644
--- a/ci/optimized_targets.py
+++ b/ci/optimized_targets.py
@@ -14,6 +14,16 @@
 # limitations under the License.
 
 from abc import ABC
+import argparse
+import functools
+import json
+import logging
+import os
+import pathlib
+import subprocess
+
+from build_context import BuildContext
+import test_mapping_module_retriever
 
 
 class OptimizedBuildTarget(ABC):
@@ -24,15 +34,132 @@
   build.
   """
 
-  def __init__(self, build_context, args):
+  _SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash'
+  _PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip'
+
+  def __init__(
+      self,
+      target: str,
+      build_context: BuildContext,
+      args: argparse.Namespace,
+  ):
+    self.target = target
     self.build_context = build_context
     self.args = args
 
-  def get_build_targets(self):
-    pass
+  def get_build_targets(self) -> set[str]:
+    features = self.build_context.enabled_build_features
+    if self.get_enabled_flag() in features:
+      self.modules_to_build = self.get_build_targets_impl()
+      return self.modules_to_build
 
-  def package_outputs(self):
-    pass
+    self.modules_to_build = {self.target}
+    return {self.target}
+
+  def get_package_outputs_commands(self) -> list[list[str]]:
+    features = self.build_context.enabled_build_features
+    if self.get_enabled_flag() in features:
+      return self.get_package_outputs_commands_impl()
+
+    return []
+
+  def get_package_outputs_commands_impl(self) -> list[list[str]]:
+    raise NotImplementedError(
+        'get_package_outputs_commands_impl not implemented in'
+        f' {type(self).__name__}'
+    )
+
+  def get_enabled_flag(self):
+    raise NotImplementedError(
+        f'get_enabled_flag not implemented in {type(self).__name__}'
+    )
+
+  def get_build_targets_impl(self) -> set[str]:
+    raise NotImplementedError(
+        f'get_build_targets_impl not implemented in {type(self).__name__}'
+    )
+
+  def _generate_zip_options_for_items(
+      self,
+      prefix: str = '',
+      relative_root: str = '',
+      list_files: list[str] | None = None,
+      files: list[str] | None = None,
+      directories: list[str] | None = None,
+  ) -> list[str]:
+    if not list_files and not files and not directories:
+      raise RuntimeError(
+          f'No items specified to be added to zip! Prefix: {prefix}, Relative'
+          f' root: {relative_root}'
+      )
+    command_segment = []
+    # These are all soong_zip options so consult soong_zip --help for specifics.
+    if prefix:
+      command_segment.append('-P')
+      command_segment.append(prefix)
+    if relative_root:
+      command_segment.append('-C')
+      command_segment.append(relative_root)
+    if list_files:
+      for list_file in list_files:
+        command_segment.append('-l')
+        command_segment.append(list_file)
+    if files:
+      for file in files:
+        command_segment.append('-f')
+        command_segment.append(file)
+    if directories:
+      for directory in directories:
+        command_segment.append('-D')
+        command_segment.append(directory)
+
+    return command_segment
+
+  def _query_soong_vars(
+      self, src_top: pathlib.Path, soong_vars: list[str]
+  ) -> dict[str, str]:
+    process_result = subprocess.run(
+        args=[
+            f'{src_top / self._SOONG_UI_BASH_PATH}',
+            '--dumpvars-mode',
+            f'--abs-vars={" ".join(soong_vars)}',
+        ],
+        env=os.environ,
+        check=False,
+        capture_output=True,
+        text=True,
+    )
+    if not process_result.returncode == 0:
+      logging.error('soong dumpvars command failed! stderr:')
+      logging.error(process_result.stderr)
+      raise RuntimeError('Soong dumpvars failed! See log for stderr.')
+
+    if not process_result.stdout:
+      raise RuntimeError(
+          'Necessary soong variables ' + soong_vars + ' not found.'
+      )
+
+    try:
+      return {
+          line.split('=')[0]: line.split('=')[1].strip("'")
+          for line in process_result.stdout.strip().split('\n')
+      }
+    except IndexError as e:
+      raise RuntimeError(
+          'Error parsing soong dumpvars output! See output here:'
+          f' {process_result.stdout}',
+          e,
+      )
+
+  def _base_zip_command(
+      self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str
+  ) -> list[str]:
+    return [
+        f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }',
+        '-d',
+        '-o',
+        f'{dist_dir / name}',
+    ]
 
 
 class NullOptimizer(OptimizedBuildTarget):
@@ -48,22 +175,308 @@
   def get_build_targets(self):
     return {self.target}
 
-  def package_outputs(self):
-    pass
+  def get_package_outputs_commands(self):
+    return []
 
 
-def get_target_optimizer(target, enabled_flag, build_context, optimizer):
-  if enabled_flag in build_context['enabled_build_features']:
-    return optimizer
+class ChangeInfo:
 
-  return NullOptimizer(target)
+  def __init__(self, change_info_file_path):
+    try:
+      with open(change_info_file_path) as change_info_file:
+        change_info_contents = json.load(change_info_file)
+    except json.decoder.JSONDecodeError:
+      logging.error(f'Failed to load CHANGE_INFO: {change_info_file_path}')
+      raise
+
+    self._change_info_contents = change_info_contents
+
+  def find_changed_files(self) -> set[str]:
+    changed_files = set()
+
+    for change in self._change_info_contents['changes']:
+      project_path = change.get('projectPath') + '/'
+
+      for revision in change.get('revisions'):
+        for file_info in revision.get('fileInfos'):
+          changed_files.add(project_path + file_info.get('path'))
+
+    return changed_files
 
 
-# To be written as:
-#    'target': lambda target, build_context, args: get_target_optimizer(
-#        target,
-#        'target_enabled_flag',
-#        build_context,
-#        TargetOptimizer(build_context, args),
-#    )
-OPTIMIZED_BUILD_TARGETS = dict()
+class GeneralTestsOptimizer(OptimizedBuildTarget):
+  """general-tests optimizer
+
+  This optimizer reads in the list of changed files from the file located in
+  env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to
+  determine what test mapping modules will run for the given changes. It then
+  builds those modules and packages them in the same way general-tests.zip is
+  normally built.
+  """
+
+  # List of modules that are built alongside general-tests as dependencies.
+  _REQUIRED_MODULES = frozenset([
+      'cts-tradefed',
+      'vts-tradefed',
+      'compatibility-host-util',
+      'general-tests-shared-libs',
+  ])
+
+  def get_build_targets_impl(self) -> set[str]:
+    change_info_file_path = os.environ.get('CHANGE_INFO')
+    if not change_info_file_path:
+      logging.info(
+          'No CHANGE_INFO env var found, general-tests optimization disabled.'
+      )
+      return {'general-tests'}
+
+    test_infos = self.build_context.test_infos
+    test_mapping_test_groups = set()
+    for test_info in test_infos:
+      is_test_mapping = test_info.is_test_mapping
+      current_test_mapping_test_groups = test_info.test_mapping_test_groups
+      uses_general_tests = test_info.build_target_used('general-tests')
+
+      if uses_general_tests and not is_test_mapping:
+        logging.info(
+            'Test uses general-tests.zip but is not test-mapping, general-tests'
+            ' optimization disabled.'
+        )
+        return {'general-tests'}
+
+      if is_test_mapping:
+        test_mapping_test_groups.update(current_test_mapping_test_groups)
+
+    change_info = ChangeInfo(change_info_file_path)
+    changed_files = change_info.find_changed_files()
+
+    test_mappings = test_mapping_module_retriever.GetTestMappings(
+        changed_files, set()
+    )
+
+    modules_to_build = set(self._REQUIRED_MODULES)
+
+    modules_to_build.update(
+        test_mapping_module_retriever.FindAffectedModules(
+            test_mappings, changed_files, test_mapping_test_groups
+        )
+    )
+
+    return modules_to_build
+
+  def get_package_outputs_commands_impl(self):
+    src_top = pathlib.Path(os.environ.get('TOP', os.getcwd()))
+    dist_dir = pathlib.Path(os.environ.get('DIST_DIR'))
+
+    soong_vars = self._query_soong_vars(
+        src_top,
+        [
+            'HOST_OUT_TESTCASES',
+            'TARGET_OUT_TESTCASES',
+            'PRODUCT_OUT',
+            'SOONG_HOST_OUT',
+            'HOST_OUT',
+        ],
+    )
+    host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES'))
+    target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES'))
+    product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT'))
+    soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT'))
+    host_out = pathlib.Path(soong_vars.get('HOST_OUT'))
+
+    host_paths = []
+    target_paths = []
+    host_config_files = []
+    target_config_files = []
+    for module in self.modules_to_build:
+      # The required modules are handled separately, no need to package.
+      if module in self._REQUIRED_MODULES:
+        continue
+
+      host_path = host_out_testcases / module
+      if os.path.exists(host_path):
+        host_paths.append(host_path)
+        self._collect_config_files(src_top, host_path, host_config_files)
+
+      target_path = target_out_testcases / module
+      if os.path.exists(target_path):
+        target_paths.append(target_path)
+        self._collect_config_files(src_top, target_path, target_config_files)
+
+      if not os.path.exists(host_path) and not os.path.exists(target_path):
+        logging.info(f'No host or target build outputs found for {module}.')
+
+    zip_commands = []
+
+    zip_commands.extend(
+        self._get_zip_test_configs_zips_commands(
+            src_top,
+            dist_dir,
+            host_out,
+            product_out,
+            host_config_files,
+            target_config_files,
+        )
+    )
+
+    zip_command = self._base_zip_command(src_top, dist_dir, 'general-tests.zip')
+
+    # Add host testcases.
+    if host_paths:
+      zip_command.extend(
+          self._generate_zip_options_for_items(
+              prefix='host',
+              relative_root=f'{src_top / soong_host_out}',
+              directories=host_paths,
+          )
+      )
+
+    # Add target testcases.
+    if target_paths:
+      zip_command.extend(
+          self._generate_zip_options_for_items(
+              prefix='target',
+              relative_root=f'{src_top / product_out}',
+              directories=target_paths,
+          )
+      )
+
+    # TODO(lucafarsi): Push this logic into a general-tests-minimal build command
+    # Add necessary tools. These are also hardcoded in general-tests.mk.
+    framework_path = soong_host_out / 'framework'
+
+    zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='host/tools',
+            relative_root=str(framework_path),
+            files=[
+                f"{framework_path / 'cts-tradefed.jar'}",
+                f"{framework_path / 'compatibility-host-util.jar'}",
+                f"{framework_path / 'vts-tradefed.jar'}",
+            ],
+        )
+    )
+
+    zip_commands.append(zip_command)
+    return zip_commands
+
+  def _collect_config_files(
+      self,
+      src_top: pathlib.Path,
+      root_dir: pathlib.Path,
+      config_files: list[str],
+  ):
+    for root, dirs, files in os.walk(src_top / root_dir):
+      for file in files:
+        if file.endswith('.config'):
+          config_files.append(root_dir / file)
+
+  def _get_zip_test_configs_zips_commands(
+      self,
+      src_top: pathlib.Path,
+      dist_dir: pathlib.Path,
+      host_out: pathlib.Path,
+      product_out: pathlib.Path,
+      host_config_files: list[str],
+      target_config_files: list[str],
+  ) -> tuple[list[str], list[str]]:
+    """Generate general-tests_configs.zip and general-tests_list.zip.
+
+    general-tests_configs.zip contains all of the .config files that were
+    built and general-tests_list.zip contains a text file which lists
+    all of the .config files that are in general-tests_configs.zip.
+
+    general-tests_configs.zip is organized as follows:
+    /
+      host/
+        testcases/
+          test_1.config
+          test_2.config
+          ...
+      target/
+        testcases/
+          test_1.config
+          test_2.config
+          ...
+
+    So the process is we write out the paths to all the host config files into
+    one
+    file and all the paths to the target config files in another. We also write
+    the paths to all the config files into a third file to use for
+    general-tests_list.zip.
+
+    Args:
+      dist_dir: dist directory.
+      host_out: host out directory.
+      product_out: product out directory.
+      host_config_files: list of all host config files.
+      target_config_files: list of all target config files.
+
+    Returns:
+      The commands to generate general-tests_configs.zip and
+      general-tests_list.zip
+    """
+    with open(
+        f"{host_out / 'host_general-tests_list'}", 'w'
+    ) as host_list_file, open(
+        f"{product_out / 'target_general-tests_list'}", 'w'
+    ) as target_list_file, open(
+        f"{host_out / 'general-tests_list'}", 'w'
+    ) as list_file:
+
+      for config_file in host_config_files:
+        host_list_file.write(f'{config_file}' + '\n')
+        list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
+
+      for config_file in target_config_files:
+        target_list_file.write(f'{config_file}' + '\n')
+        list_file.write(
+            'target/' + os.path.relpath(config_file, product_out) + '\n'
+        )
+
+    zip_commands = []
+
+    tests_config_zip_command = self._base_zip_command(
+        src_top, dist_dir, 'general-tests_configs.zip'
+    )
+    tests_config_zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='host',
+            relative_root=str(host_out),
+            list_files=[f"{host_out / 'host_general-tests_list'}"],
+        )
+    )
+
+    tests_config_zip_command.extend(
+        self._generate_zip_options_for_items(
+            prefix='target',
+            relative_root=str(product_out),
+            list_files=[f"{product_out / 'target_general-tests_list'}"],
+        ),
+    )
+
+    zip_commands.append(tests_config_zip_command)
+
+    tests_list_zip_command = self._base_zip_command(
+        src_top, dist_dir, 'general-tests_list.zip'
+    )
+    tests_list_zip_command.extend(
+        self._generate_zip_options_for_items(
+            relative_root=str(host_out),
+            files=[f"{host_out / 'general-tests_list'}"],
+        )
+    )
+    zip_commands.append(tests_list_zip_command)
+
+    return zip_commands
+
+  def get_enabled_flag(self):
+    return 'general_tests_optimized'
+
+  @classmethod
+  def get_optimized_targets(cls) -> dict[str, OptimizedBuildTarget]:
+    return {'general-tests': functools.partial(cls)}
+
+
+OPTIMIZED_BUILD_TARGETS = {}
+OPTIMIZED_BUILD_TARGETS.update(GeneralTestsOptimizer.get_optimized_targets())
diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py
new file mode 100644
index 0000000..0b0c0ec
--- /dev/null
+++ b/ci/optimized_targets_test.py
@@ -0,0 +1,350 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for optimized_targets.py"""
+
+import json
+import logging
+import os
+import pathlib
+import re
+import subprocess
+import textwrap
+import unittest
+from unittest import mock
+from build_context import BuildContext
+import optimized_targets
+from pyfakefs import fake_filesystem_unittest
+
+
+class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
+
+  def setUp(self):
+    self.setUpPyfakefs()
+
+    os_environ_patcher = mock.patch.dict('os.environ', {})
+    self.addCleanup(os_environ_patcher.stop)
+    self.mock_os_environ = os_environ_patcher.start()
+
+    self._setup_working_build_env()
+    self._write_change_info_file()
+    test_mapping_dir = pathlib.Path('/project/path/file/path')
+    test_mapping_dir.mkdir(parents=True)
+    self._write_test_mapping_file()
+
+  def _setup_working_build_env(self):
+    self.change_info_file = pathlib.Path('/tmp/change_info')
+    self._write_soong_ui_file()
+    self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases')
+    self._host_out_testcases.mkdir(parents=True)
+    self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases')
+    self._target_out_testcases.mkdir(parents=True)
+    self._product_out = pathlib.Path('/tmp/top/product_out')
+    self._product_out.mkdir(parents=True)
+    self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out')
+    self._soong_host_out.mkdir(parents=True)
+    self._host_out = pathlib.Path('/tmp/top/host_out')
+    self._host_out.mkdir(parents=True)
+
+    self._dist_dir = pathlib.Path('/tmp/top/out/dist')
+    self._dist_dir.mkdir(parents=True)
+
+    self.mock_os_environ.update({
+        'CHANGE_INFO': str(self.change_info_file),
+        'TOP': '/tmp/top',
+        'DIST_DIR': '/tmp/top/out/dist',
+    })
+
+  def _write_soong_ui_file(self):
+    soong_path = pathlib.Path('/tmp/top/build/soong')
+    soong_path.mkdir(parents=True)
+    with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f:
+      f.write("""
+              #/bin/bash
+              echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases'
+              echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases'
+              echo PRODUCT_OUT='/tmp/top/product_out'
+              echo SOONG_HOST_OUT='/tmp/top/soong_host_out'
+              echo HOST_OUT='/tmp/top/host_out'
+              """)
+    os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666)
+
+  def _write_change_info_file(self):
+    change_info_contents = {
+        'changes': [{
+            'projectPath': '/project/path',
+            'revisions': [{
+                'fileInfos': [{
+                    'path': 'file/path/file_name',
+                }],
+            }],
+        }]
+    }
+
+    with open(self.change_info_file, 'w') as f:
+      json.dump(change_info_contents, f)
+
+  def _write_test_mapping_file(self):
+    test_mapping_contents = {
+        'test-mapping-group': [
+            {
+                'name': 'test_mapping_module',
+            },
+        ],
+    }
+
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      json.dump(test_mapping_contents, f)
+
+  def test_general_tests_optimized(self):
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    expected_build_targets.add('test_mapping_module')
+
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_no_change_info_no_optimization(self):
+    del os.environ['CHANGE_INFO']
+
+    optimizer = self._create_general_tests_optimizer()
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_mapping_groups_unused_module_not_built(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [
+        {
+            'key': 'additional-files-filter',
+            'values': ['general-tests.zip'],
+        },
+        {
+            'key': 'test-mapping-test-group',
+            'values': ['unused-test-mapping-group'],
+        },
+    ]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    expected_build_targets = set(
+        optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES
+    )
+    self.assertSetEqual(build_targets, expected_build_targets)
+
+  def test_general_tests_used_by_non_test_mapping_test_no_optimization(self):
+    test_context = self._create_test_context()
+    test_context['testInfos'][0]['extraOptions'] = [{
+        'key': 'additional-files-filter',
+        'values': ['general-tests.zip'],
+    }]
+    optimizer = self._create_general_tests_optimizer(
+        build_context=self._create_build_context(test_context=test_context)
+    )
+
+    build_targets = optimizer.get_build_targets()
+
+    self.assertSetEqual(build_targets, {'general-tests'})
+
+  def test_malformed_change_info_raises(self):
+    with open(self.change_info_file, 'w') as f:
+      f.write('not change info')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  def test_malformed_test_mapping_raises(self):
+    with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
+      f.write('not test mapping')
+
+    optimizer = self._create_general_tests_optimizer()
+
+    with self.assertRaises(json.decoder.JSONDecodeError):
+      build_targets = optimizer.get_build_targets()
+
+  @mock.patch('subprocess.run')
+  def test_packaging_outputs_success(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output()
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
+
+    targets = optimizer.get_build_targets()
+    package_commands = optimizer.get_package_outputs_commands()
+
+    self._verify_soong_zip_commands(package_commands, ['test_mapping_module'])
+
+  @mock.patch('subprocess.run')
+  def test_get_soong_dumpvars_fails_raises(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output(return_code=-1)
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
+
+    targets = optimizer.get_build_targets()
+
+    with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'):
+      package_commands = optimizer.get_package_outputs_commands()
+
+  @mock.patch('subprocess.run')
+  def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run):
+    subprocess_run.return_value = self._get_soong_vars_output(
+        stdout='This output is bad'
+    )
+    optimizer = self._create_general_tests_optimizer()
+    self._set_up_build_outputs(['test_mapping_module'])
+
+    targets = optimizer.get_build_targets()
+
+    with self.assertRaisesRegex(
+        RuntimeError, 'Error parsing soong dumpvars output'
+    ):
+      package_commands = optimizer.get_package_outputs_commands()
+
+  def _create_general_tests_optimizer(self, build_context: BuildContext = None):
+    if not build_context:
+      build_context = self._create_build_context()
+    return optimized_targets.GeneralTestsOptimizer(
+        'general-tests', build_context, None
+    )
+
+  def _create_build_context(
+      self,
+      general_tests_optimized: bool = True,
+      test_context: dict[str, any] = None,
+  ) -> BuildContext:
+    if not test_context:
+      test_context = self._create_test_context()
+    build_context_dict = {}
+    build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
+    if general_tests_optimized:
+      build_context_dict['enabledBuildFeatures'].append(
+          {'name': 'general_tests_optimized'}
+      )
+    build_context_dict['testContext'] = test_context
+    return BuildContext(build_context_dict)
+
+  def _create_test_context(self):
+    return {
+        'testInfos': [
+            {
+                'name': 'atp_test',
+                'target': 'test_target',
+                'branch': 'branch',
+                'extraOptions': [
+                    {
+                        'key': 'additional-files-filter',
+                        'values': ['general-tests.zip'],
+                    },
+                    {
+                        'key': 'test-mapping-test-group',
+                        'values': ['test-mapping-group'],
+                    },
+                ],
+                'command': '/tf/command',
+                'extraBuildTargets': [
+                    'extra_build_target',
+                ],
+            },
+        ],
+    }
+
+  def _get_soong_vars_output(
+      self, return_code: int = 0, stdout: str = ''
+  ) -> subprocess.CompletedProcess:
+    return_value = subprocess.CompletedProcess(args=[], returncode=return_code)
+    if not stdout:
+      stdout = textwrap.dedent(f"""\
+                               HOST_OUT_TESTCASES='{self._host_out_testcases}'
+                               TARGET_OUT_TESTCASES='{self._target_out_testcases}'
+                               PRODUCT_OUT='{self._product_out}'
+                               SOONG_HOST_OUT='{self._soong_host_out}'
+                               HOST_OUT='{self._host_out}'""")
+
+    return_value.stdout = stdout
+    return return_value
+
+  def _set_up_build_outputs(self, targets: list[str]):
+    for target in targets:
+      host_dir = self._host_out_testcases / target
+      host_dir.mkdir()
+      (host_dir / f'{target}.config').touch()
+      (host_dir / f'test_file').touch()
+
+      target_dir = self._target_out_testcases / target
+      target_dir.mkdir()
+      (target_dir / f'{target}.config').touch()
+      (target_dir / f'test_file').touch()
+
+  def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]):
+    """Verify the structure of the zip commands.
+
+    Zip commands have to start with the soong_zip binary path, then are followed
+    by a couple of options and the name of the file being zipped. Depending on
+    which zip we are creating look for a few essential items being added in
+    those zips.
+
+    Args:
+      commands: list of command lists
+      targets: list of targets expected to be in general-tests.zip
+    """
+    for command in commands:
+      self.assertEqual(
+          '/tmp/top/prebuilts/build-tools/linux-x86/bin/soong_zip',
+          command[0],
+      )
+      self.assertEqual('-d', command[1])
+      self.assertEqual('-o', command[2])
+      match (command[3]):
+        case '/tmp/top/out/dist/general-tests_configs.zip':
+          self.assertIn(f'{self._host_out}/host_general-tests_list', command)
+          self.assertIn(
+              f'{self._product_out}/target_general-tests_list', command
+          )
+          return
+        case '/tmp/top/out/dist/general-tests_list.zip':
+          self.assertIn('-f', command)
+          self.assertIn(f'{self._host_out}/general-tests_list', command)
+          return
+        case '/tmp/top/out/dist/general-tests.zip':
+          for target in targets:
+            self.assertIn(f'{self._host_out_testcases}/{target}', command)
+            self.assertIn(f'{self._target_out_testcases}/{target}', command)
+          self.assertIn(
+              f'{self._soong_host_out}/framework/cts-tradefed.jar', command
+          )
+          self.assertIn(
+              f'{self._soong_host_out}/framework/compatibility-host-util.jar',
+              command,
+          )
+          self.assertIn(
+              f'{self._soong_host_out}/framework/vts-tradefed.jar', command
+          )
+          return
+        case _:
+          self.fail(f'malformed command: {command}')
+
+
+if __name__ == '__main__':
+  # Setup logging to be silent so unit tests can pass through TF.
+  logging.disable(logging.ERROR)
+  unittest.main()
diff --git a/ci/test_discovery_agent.py b/ci/test_discovery_agent.py
new file mode 100644
index 0000000..008ee47
--- /dev/null
+++ b/ci/test_discovery_agent.py
@@ -0,0 +1,120 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test discovery agent that uses TradeFed to discover test artifacts."""
+import glob
+import json
+import logging
+import os
+import subprocess
+
+
+class TestDiscoveryAgent:
+  """Test discovery agent."""
+
+  _TRADEFED_PREBUILT_JAR_RELATIVE_PATH = (
+      "vendor/google_tradefederation/prebuilts/filegroups/google-tradefed/"
+  )
+
+  _TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY = "NoPossibleTestDiscovery"
+
+  _TRADEFED_TEST_ZIP_REGEXES_LIST_KEY = "TestZipRegexes"
+
+  _TRADEFED_DISCOVERY_OUTPUT_FILE_NAME = "test_discovery_agent.txt"
+
+  def __init__(
+      self,
+      tradefed_args: list[str],
+      test_mapping_zip_path: str = "",
+      tradefed_jar_revelant_files_path: str = _TRADEFED_PREBUILT_JAR_RELATIVE_PATH,
+  ):
+    self.tradefed_args = tradefed_args
+    self.test_mapping_zip_path = test_mapping_zip_path
+    self.tradefed_jar_relevant_files_path = tradefed_jar_revelant_files_path
+
+  def discover_test_zip_regexes(self) -> list[str]:
+    """Discover test zip regexes from TradeFed.
+
+    Returns:
+      A list of test zip regexes that TF is going to try to pull files from.
+    """
+    test_discovery_output_file_name = os.path.join(
+        os.environ.get('TOP'), 'out', self._TRADEFED_DISCOVERY_OUTPUT_FILE_NAME
+    )
+    with open(
+        test_discovery_output_file_name, mode="w+t"
+    ) as test_discovery_output_file:
+      java_args = []
+      java_args.append("prebuilts/jdk/jdk21/linux-x86/bin/java")
+      java_args.append("-cp")
+      java_args.append(
+          self.create_classpath(self.tradefed_jar_relevant_files_path)
+      )
+      java_args.append(
+          "com.android.tradefed.observatory.TestZipDiscoveryExecutor"
+      )
+      java_args.extend(self.tradefed_args)
+      env = os.environ.copy()
+      env.update({"DISCOVERY_OUTPUT_FILE": test_discovery_output_file.name})
+      logging.info(f"Calling test discovery with args: {java_args}")
+      try:
+        result = subprocess.run(args=java_args, env=env, text=True, check=True)
+        logging.info(f"Test zip discovery output: {result.stdout}")
+      except subprocess.CalledProcessError as e:
+        raise TestDiscoveryError(
+            f"Failed to run test discovery, strout: {e.stdout}, strerr:"
+            f" {e.stderr}, returncode: {e.returncode}"
+        )
+      data = json.loads(test_discovery_output_file.read())
+      logging.info(f"Test discovery result file content: {data}")
+      if (
+          self._TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY in data
+          and data[self._TRADEFED_NO_POSSIBLE_TEST_DISCOVERY_KEY]
+      ):
+        raise TestDiscoveryError("No possible test discovery")
+      if (
+          data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY] is None
+          or data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY] is []
+      ):
+        raise TestDiscoveryError("No test zip regexes returned")
+      return data[self._TRADEFED_TEST_ZIP_REGEXES_LIST_KEY]
+
+  def discover_test_modules(self) -> list[str]:
+    """Discover test modules from TradeFed.
+
+    Returns:
+      A list of test modules that TradeFed is going to execute based on the
+      TradeFed test args.
+    """
+    return []
+
+  def create_classpath(self, directory):
+    """Creates a classpath string from all .jar files in the given directory.
+
+    Args:
+      directory: The directory to search for .jar files.
+
+    Returns:
+      A string representing the classpath, with jar files separated by the
+      OS-specific path separator (e.g., ':' on Linux/macOS, ';' on Windows).
+    """
+    jar_files = glob.glob(os.path.join(directory, "*.jar"))
+    return os.pathsep.join(jar_files)
+
+
+class TestDiscoveryError(Exception):
+  """A TestDiscoveryErrorclass."""
+
+  def __init__(self, message):
+    super().__init__(message)
+    self.message = message
diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py
index d2c13c0..c93cdd5 100644
--- a/ci/test_mapping_module_retriever.py
+++ b/ci/test_mapping_module_retriever.py
@@ -17,11 +17,13 @@
 modules are needed to build for the given list of changed files.
 TODO(lucafarsi): Deduplicate from artifact_helper.py
 """
+# TODO(lucafarsi): Share this logic with the original logic in
+# test_mapping_test_retriever.py
 
-from typing import Any, Dict, Set, Text
 import json
 import os
 import re
+from typing import Any
 
 # Regex to extra test name from the path of test config file.
 TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config'
@@ -39,7 +41,7 @@
 _COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?')
 
 
-def FilterComments(test_mapping_file: Text) -> Text:
+def FilterComments(test_mapping_file: str) -> str:
   """Remove comments in TEST_MAPPING file to valid format.
 
   Only '//' is regarded as comments.
@@ -52,8 +54,8 @@
   """
   return re.sub(_COMMENTS_RE, r'\1', test_mapping_file)
 
-def GetTestMappings(paths: Set[Text],
-                    checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]:
+def GetTestMappings(paths: set[str],
+                    checked_paths: set[str]) -> dict[str, dict[str, Any]]:
   """Get the affected TEST_MAPPING files.
 
   TEST_MAPPING files in source code are packaged into a build artifact
@@ -123,3 +125,68 @@
       pass
 
   return test_mappings
+
+
+def FindAffectedModules(
+    test_mappings: dict[str, Any],
+    changed_files: set[str],
+    test_mapping_test_groups: set[str],
+) -> set[str]:
+  """Find affected test modules.
+
+  Find the affected set of test modules that would run in a test mapping run based on the given test mappings, changed files, and test mapping test group.
+
+  Args:
+    test_mappings: A set of test mappings returned by GetTestMappings in the following format:
+      {
+        'test_mapping_file_path': {
+          'group_name' : [
+            'name': 'module_name',
+          ],
+        }
+      }
+    changed_files: A set of files changed for the given run.
+    test_mapping_test_groups: A set of test mapping test groups that are being considered for the given run.
+
+  Returns:
+    A set of test module names which would run for a test mapping test run with the given parameters.
+  """
+
+  modules = set()
+
+  for test_mapping in test_mappings.values():
+    for group_name, group in test_mapping.items():
+      # If a module is not in any of the test mapping groups being tested skip
+      # it.
+      if group_name not in test_mapping_test_groups:
+        continue
+
+      for entry in group:
+        module_name = entry.get('name')
+
+        if not module_name:
+          continue
+
+        file_patterns = entry.get('file_patterns')
+        if not file_patterns:
+          modules.add(module_name)
+          continue
+
+        if matches_file_patterns(file_patterns, changed_files):
+          modules.add(module_name)
+
+  return modules
+
+def MatchesFilePatterns(
+    file_patterns: list[set], changed_files: set[str]
+) -> bool:
+  """Checks if any of the changed files match any of the file patterns.
+
+  Args:
+    file_patterns: A list of file patterns to match against.
+    changed_files: A set of files to check against the file patterns.
+
+  Returns:
+    True if any of the changed files match any of the file patterns.
+  """
+  return any(re.search(pattern, "|".join(changed_files)) for pattern in file_patterns)
diff --git a/cogsetup.sh b/cogsetup.sh
deleted file mode 100644
index ef1485d..0000000
--- a/cogsetup.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This file is executed by build/envsetup.sh, and can use anything
-# defined in envsetup.sh.
-function _create_out_symlink_for_cog() {
-  if [[ "${OUT_DIR}" == "" ]]; then
-    OUT_DIR="out"
-  fi
-
-  # getoutdir ensures paths are absolute. envsetup could be called from a
-  # directory other than the root of the source tree
-  local outdir=$(getoutdir)
-  if [[ -L "${outdir}" ]]; then
-    return
-  fi
-  if [ -d "${outdir}" ]; then
-    echo -e "\tOutput directory ${outdir} cannot be present in a Cog workspace."
-    echo -e "\tDelete \"${outdir}\" or create a symlink from \"${outdir}\" to a directory outside your workspace."
-    return 1
-  fi
-
-  DEFAULT_OUTPUT_DIR="${HOME}/.cog/android-build-out"
-  mkdir -p ${DEFAULT_OUTPUT_DIR}
-  ln -s ${DEFAULT_OUTPUT_DIR} ${outdir}
-}
-
-# This function sets up the build environment to be appropriate for Cog.
-function _setup_cog_env() {
-  _create_out_symlink_for_cog
-  if [ "$?" -eq "1" ]; then
-    echo -e "\e[0;33mWARNING:\e[00m Cog environment setup failed!"
-    return 1
-  fi
-
-  export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog"
-
-  # Running repo command within Cog workspaces is not supported, so override
-  # it with this function. If the user is running repo within a Cog workspace,
-  # we'll fail with an error, otherwise, we run the original repo command with
-  # the given args.
-  if ! ORIG_REPO_PATH=`which repo`; then
-    return 0
-  fi
-  function repo {
-    if [[ "${PWD}" == /google/cog/* ]]; then
-      echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
-      return 1
-    fi
-    ${ORIG_REPO_PATH} "$@"
-  }
-}
-
-if [[ "${PWD}" != /google/cog/* ]]; then
-  echo -e "\e[01;31mERROR:\e[0m This script must be run from a Cog workspace."
-fi
-
-_setup_cog_env
diff --git a/core/Makefile b/core/Makefile
index 7d7b9e7..92dd86d 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -192,6 +192,34 @@
 unique_product_copy_files_pairs :=
 unique_product_copy_files_destinations :=
 
+
+# Returns a list of EXTRA_INSTALL_ZIPS trios whose primary file is contained within $(1)
+# The trios will contain the primary installed file : the directory to unzip the zip to : the zip
+define relevant-extra-install-zips
+$(strip $(foreach p,$(EXTRA_INSTALL_ZIPS), \
+  $(if $(filter $(call word-colon,1,$(p)),$(1)), \
+    $(p))))
+endef
+
+# Writes a text file that contains all of the files that will be inside a partition.
+# All the file paths will be relative to the partition's staging directory.
+# It will also take into account files inside zips listed in EXTRA_INSTALL_ZIPS.
+#
+# Arguments:
+#   $(1): Output file
+#   $(2): The partition's staging directory
+#   $(3): Files to include in the partition
+define write-partition-file-list
+$(1): PRIVATE_FILES := $(subst $(2)/,,$(filter $(2)/%,$(3)))
+$(1): PRIVATE_EXTRA_INSTALL_ZIPS := $(call relevant-extra-install-zips,$(filter $(2)/%,$(3)))
+$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p)))
+	@echo Writing $$@
+	rm -f $$@
+	echo -n > $$@
+	$$(foreach f,$$(PRIVATE_FILES),echo "$$(f)" >> $$@$$(newline))
+	$$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $$(PRIVATE_EXTRA_INSTALL_ZIPS) >> $$@
+endef
+
 # -----------------------------------------------------------------
 # Returns the max allowed size for an image suitable for hash verification
 # (e.g., boot.img, recovery.img, etc).
@@ -692,7 +720,7 @@
 
 BOARD_KERNEL_MODULE_DIRS += top
 
-# Default to not generating modules.dep for kernel modules on system
+# Default to not generating modules.load for kernel modules on system
 # side. We should only load these modules if they are depended by vendor
 # side modules.
 ifeq ($(BOARD_SYSTEM_KERNEL_MODULES_LOAD),)
@@ -717,7 +745,7 @@
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
-  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT)),system,modules.load,,$(kmd))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,$(GENERIC_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd)))))
@@ -1268,8 +1296,11 @@
 endif
 
 
+ramdisk_intermediates :=$= $(call intermediates-dir-for,PACKAGING,ramdisk)
+$(eval $(call write-partition-file-list,$(ramdisk_intermediates)/file_list.txt,$(TARGET_RAMDISK_OUT),$(INTERNAL_RAMDISK_FILES)))
+
+# The value of RAMDISK_NODE_LIST is defined in system/core/rootdir/Android.bp.
 # This file contains /dev nodes description added to the generic ramdisk
-RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list
 
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
@@ -1623,6 +1654,8 @@
     $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot)/vendor_ramdisk.cpio$(RAMDISK_EXT)
+vendor_ramdisk_intermediates :=$= $(call intermediates-dir-for,PACKAGING,vendor_ramdisk)
+$(eval $(call write-partition-file-list,$(vendor_ramdisk_intermediates)/file_list.txt,$(TARGET_VENDOR_RAMDISK_OUT),$(INTERNAL_VENDOR_RAMDISK_FILES)))
 
 # Exclude recovery files in the default vendor ramdisk if including a standalone
 # recovery ramdisk in vendor_boot.
@@ -1677,12 +1710,13 @@
   INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
 endif
 
-ifdef INTERNAL_BOOTCONFIG
+ifneq (, $(INTERNAL_BOOTCONFIG)$(INTERNAL_BOOTCONFIG_FILE))
   INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img
   $(INTERNAL_VENDOR_BOOTCONFIG_TARGET):
 	rm -f $@
 	$(foreach param,$(INTERNAL_BOOTCONFIG), \
 	 printf "%s\n" $(param) >> $@;)
+	cat $(INTERNAL_BOOTCONFIG_FILE) >> $@
   INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
 endif
 
@@ -1965,7 +1999,7 @@
 installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
 
 ALL_INSTALLED_NOTICE_FILES := \
-  $(installed_notice_html_or_xml_gz) \
+  $(if $(USE_SOONG_DEFINED_SYSTEM_IMAGE),,$(installed_notice_html_or_xml_gz)) \
   $(installed_vendor_notice_xml_gz) \
   $(installed_product_notice_xml_gz) \
   $(installed_system_ext_notice_xml_gz) \
@@ -2052,7 +2086,9 @@
 
 endif # PRODUCT_NOTICE_SPLIT
 
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
+endif
 
 need_vendor_notice:=false
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
@@ -3417,8 +3453,10 @@
 # system image
 
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_SYSTEM_IMAGE
 INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES)))
+endif
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
@@ -3468,31 +3506,6 @@
 
 FULL_SYSTEMIMAGE_DEPS += $(INTERNAL_ROOT_FILES) $(INSTALLED_FILES_FILE_ROOT)
 
-# Returns a list of EXTRA_INSTALL_ZIPS trios whose primary file is contained within $(1)
-# The trios will contain the primary installed file : the directory to unzip the zip to : the zip
-define relevant-extra-install-zips
-$(strip $(foreach p,$(EXTRA_INSTALL_ZIPS), \
-  $(if $(filter $(call word-colon,1,$(p)),$(1)), \
-    $(p))))
-endef
-
-# Writes a text file that contains all of the files that will be inside a partition.
-# All the file paths will be relative to the partition's staging directory.
-# It will also take into account files inside zips listed in EXTRA_INSTALL_ZIPS.
-#
-# Arguments:
-#   $(1): Output file
-#   $(2): The partition's staging directory
-#   $(3): Files to include in the partition
-define write-partition-file-list
-$(1): $$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(foreach p,$(call relevant-extra-install-zips,$(filter $(2)/%,$(3))),$(call word-colon,3,$(p)))
-	@echo Writing $$@
-	rm -f $$@
-	echo -n > $$@
-	$$(foreach f,$(subst $(2)/,,$(filter $(2)/%,$(3))),echo "$$(f)" >> $$@$$(newline))
-	$$(HOST_OUT_EXECUTABLES)/extra_install_zips_file_list $(2) $(call relevant-extra-install-zips,$(filter $(2)/%,$(3))) >> $$@
-endef
-
 # -----------------------------------------------------------------
 ifdef BUILDING_SYSTEM_IMAGE
 
@@ -3510,6 +3523,8 @@
 		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \
 		--value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key provideLibs \
+		--value "$(foreach lib,$(PRODUCT_EXTRA_STUB_LIBRARIES), $(lib).so)"
 
 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
 $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE))
@@ -3562,14 +3577,24 @@
 file_list_diff := $(HOST_OUT_EXECUTABLES)/file_list_diff$(HOST_EXECUTABLE_SUFFIX)
 system_file_diff_timestamp := $(systemimage_intermediates)/file_diff.timestamp
 
+# The build configuration to build the REL version may have more files to allow.
+# Use allowlist_next in addition to the allowlist in this case.
+system_file_diff_allowlist_next :=
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+system_file_diff_allowlist_next := $(ALL_MODULES.system_image_diff_allowlist_next.INSTALLED)
+$(system_file_diff_timestamp): PRIVATE_ALLOWLIST_NEXT := $(system_file_diff_allowlist_next)
+endif
 $(system_file_diff_timestamp): \
 	    $(systemimage_intermediates)/file_list.txt \
 	    $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
 	    $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \
+	    $(system_file_diff_allowlist_next) \
 	    $(file_list_diff)
 	$(file_list_diff) $(systemimage_intermediates)/file_list.txt \
 	  $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \
-	  $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+	  $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) \
+	  --allowlists $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \
+	  $(PRIVATE_ALLOWLIST_NEXT)
 	touch $@
 
 $(BUILT_SYSTEMIMAGE): $(system_file_diff_timestamp)
@@ -3587,10 +3612,10 @@
 ifeq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),)
 $(error PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE must be set if USE_SOONG_DEFINED_SYSTEM_IMAGE is true)
 endif
-soong_defined_system_image := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
-$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(soong_defined_system_image)
-$(eval $(call copy-one-file, $(soong_defined_system_image), $(BUILT_SYSTEMIMAGE)))
-soong_defined_system_image :=
+SOONG_DEFINED_SYSTEM_IMAGE_PATH := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)
+SOONG_DEFINED_SYSTEM_IMAGE_BASE := $(dir $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST))
+$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(SOONG_DEFINED_SYSTEM_IMAGE_PATH)
+$(eval $(call copy-one-file, $(SOONG_DEFINED_SYSTEM_IMAGE_PATH), $(BUILT_SYSTEMIMAGE)))
 else
 $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt
 	$(call build-systemimage-target,$@)
@@ -3675,10 +3700,10 @@
 # -----------------------------------------------------------------
 # data partition image
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT_DATA)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
+ifdef BUILDING_USERDATA_IMAGE
 INTERNAL_USERDATAIMAGE_FILES := \
     $(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
 
-ifdef BUILDING_USERDATA_IMAGE
 userdataimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,userdata)
 BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img
@@ -3995,6 +4020,21 @@
     $(filter $(TARGET_OUT_PRODUCT)/%,\
       $(ALL_DEFAULT_INSTALLED_MODULES))
 
+# Install product/etc/linker.config.pb with PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS
+product_linker_config_file := $(TARGET_OUT_PRODUCT)/etc/linker.config.pb
+$(product_linker_config_file): private_linker_config_fragments := $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS)
+$(product_linker_config_file): $(INTERNAL_PRODUCTIMAGE_FILES) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config proto \
+		--source $(call normalize-path-list,$(private_linker_config_fragments)) \
+		--output $@
+$(call define declare-1p-target,$(product_linker_config_file),)
+INTERNAL_PRODUCTIMAGE_FILES += $(product_linker_config_file)
+ALL_DEFAULT_INSTALLED_MODULES += $(product_linker_config_file)
+
+
 INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
 INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json)
 $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
@@ -5136,6 +5176,7 @@
   $(TARGET_OUT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
   $(TARGET_OUT_PRODUCT)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
@@ -5188,6 +5229,7 @@
   $(TARGET_OUT_PRODUCT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
 
 apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
 
@@ -5206,6 +5248,7 @@
 	   --system_ext_path $(TARGET_OUT_SYSTEM_EXT) \
 	   --product_path $(TARGET_OUT_PRODUCT) \
 	   --vendor_path $(TARGET_OUT_VENDOR) \
+	   --odm_path $(TARGET_OUT_ODM) \
 	   --apex_path $(APEX_OUT)
 
 apex_files :=
@@ -5262,7 +5305,7 @@
 	  --dirmap /system_ext:$(TARGET_OUT_SYSTEM_EXT) \
 	  --dirmap /product:$(TARGET_OUT_PRODUCT) \
 	  --dirmap /apex:$(APEX_OUT) \
-	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+	  system/libhidl/vintfdata/frozen > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
 $(call declare-1p-target,$(vintffm_log))
 
@@ -6130,6 +6173,9 @@
 $(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name)
 $(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates)
 
+ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),)
+  $(BUILT_TARGET_FILES_DIR): $(SOONG_DEFINED_SYSTEM_IMAGE_PATH)
+endif
 
 # $(1): Directory to copy
 # $(2): Location to copy it to
@@ -6344,6 +6390,10 @@
   endif
 endif
 
+ifdef BUILDING_VENDOR_KERNEL_BOOT_IMAGE
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_KERNEL_RAMDISK_FILES)
+endif
+
 ifdef BUILDING_RECOVERY_IMAGE
   # TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
   # BUILT_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
@@ -6458,8 +6508,11 @@
 	    $(INSTALLED_RAMDISK_TARGET) \
 	    $(INSTALLED_DTBIMAGE_TARGET) \
 	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
+	    $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
 	    $(BUILT_RAMDISK_16K_TARGET) \
 	    $(BUILT_KERNEL_16K_TARGET) \
+	    $(BUILT_BOOTIMAGE_16K_TARGET) \
+	    $(INSTALLED_DTBOIMAGE_16KB_TARGET) \
 	    $(BOARD_PREBUILT_DTBOIMAGE) \
 	    $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
 	    $(BOARD_RECOVERY_ACPIO) \
@@ -6613,8 +6666,13 @@
 endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET
 ifdef BUILDING_SYSTEM_IMAGE
 	@# Contents of the system image
+ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),)
+	$(hide) $(call package_files-copy-root, \
+	    $(SOONG_DEFINED_SYSTEM_IMAGE_BASE)/root/system,$(zip_root)/SYSTEM)
+else
 	$(hide) $(call package_files-copy-root, \
 	    $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM)
+endif
 else ifdef INSTALLED_BUILD_PROP_TARGET
 	@# Copy the system build.prop even if not building a system image
 	@# because add_img_to_target_files may need it to build other partition
@@ -6808,14 +6866,22 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
-ifdef BUILT_KERNEL_16K_TARGET
+ifdef BOARD_KERNEL_PATH_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_KERNEL_16K_TARGET
-ifdef BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_PATH_16K
+ifdef BOARD_KERNEL_MODULES_16K
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
-endif # BUILT_RAMDISK_16K_TARGET
+endif # BOARD_KERNEL_MODULES_16K
+ifdef BUILT_BOOTIMAGE_16K_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(BUILT_BOOTIMAGE_16K_TARGET) $(zip_root)/IMAGES/
+endif # BUILT_BOOTIMAGE_16K_TARGET
+ifdef INSTALLED_DTBOIMAGE_16KB_TARGET
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_DTBOIMAGE_16KB_TARGET) $(zip_root)/IMAGES/
+endif # INSTALLED_DTBOIMAGE_16KB_TARGET
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
@@ -6883,6 +6949,33 @@
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize
 endif # BOARD_KERNEL_PAGESIZE
 endif # BUILDING_INIT_BOOT_IMAGE
+ifdef BOARD_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_EROFS_COMPRESS_HINTS) $(zip_root)/META/erofs_default_compress_hints.txt
+endif
+ifdef BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_ext_erofs_compress_hints.txt
+endif
+ifdef BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/product_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_erofs_compress_hints.txt
+endif
+ifdef BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_dlkm_erofs_compress_hints.txt
+endif
+ifdef BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS
+	$(hide) cp $(BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_dlkm_erofs_compress_hints.txt
+endif
 ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 	$(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt
 endif
@@ -7859,13 +7952,11 @@
 $(call dist-for-goals,haiku-presubmit,$(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES))
 
 # -----------------------------------------------------------------
-# Extract platform fonts used in Layoutlib
+# Extract additional data files used in Layoutlib
 include $(BUILD_SYSTEM)/layoutlib_data.mk
 
 # -----------------------------------------------------------------
-# Desktop pack image hook.
-ifneq (,$(strip $(PACK_DESKTOP_FILESYSTEM_IMAGES)))
-PACK_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_image.bin
+# Desktop pack common variables.
 PACK_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_image
 IMAGES := $(INSTALLED_BOOTIMAGE_TARGET) \
 	$(INSTALLED_SUPERIMAGE_TARGET) \
@@ -7874,6 +7965,11 @@
 	$(INSTALLED_VBMETAIMAGE_TARGET) \
 	$(INSTALLED_USERDATAIMAGE_TARGET)
 
+# -----------------------------------------------------------------
+# Desktop pack image hook.
+ifneq (,$(strip $(PACK_DESKTOP_FILESYSTEM_IMAGES)))
+PACK_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_image.bin
+
 $(PACK_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
 	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive
 
@@ -7882,11 +7978,86 @@
 $(PACKED_IMAGE_ARCHIVE_TARGET): $(PACK_IMAGE_TARGET) | $(GZIP)
 	$(GZIP) -fk $(PACK_IMAGE_TARGET)
 
-droidcore-unbundled: $(PACKED_IMAGE_ARCHIVE_TARGET)
+$(call dist-for-goals,dist_files,$(PACKED_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-image
+pack-image: $(PACK_IMAGE_TARGET)
 
 endif # PACK_DESKTOP_FILESYSTEM_IMAGES
 
 # -----------------------------------------------------------------
+# Desktop pack recovery image hook.
+ifeq ($(BOARD_USES_DESKTOP_RECOVERY_IMAGE),true)
+PACK_RECOVERY_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_recovery_image.bin
+PACK_RECOVERY_IMAGE_ARGS := --noarchive --recovery
+
+ifneq (,$(strip $(PACK_RECOVERY_IMAGE_EXPERIMENTAL)))
+PACK_RECOVERY_IMAGE_ARGS += --experimental
+endif # PACK_RECOVERY_IMAGE_EXPERIMENTAL
+
+$(PACK_RECOVERY_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_RECOVERY_IMAGE_ARGS)
+
+PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET := $(PACK_RECOVERY_IMAGE_TARGET).gz
+
+$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET): $(PACK_RECOVERY_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_RECOVERY_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-recovery-image
+pack-recovery-image: $(PACK_RECOVERY_IMAGE_TARGET)
+
+endif # BOARD_USES_DESKTOP_RECOVERY_IMAGE
+
+# -----------------------------------------------------------------
+# Desktop pack update image hook.
+ifeq ($(BOARD_USES_DESKTOP_UPDATE_IMAGE),true)
+PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin
+PACK_UPDATE_IMAGE_ARGS := --noarchive --update
+
+ifneq (,$(strip $(PACK_UPDATE_IMAGE_EXPERIMENTAL)))
+PACK_UPDATE_IMAGE_ARGS += --experimental
+endif # PACK_UPDATE_IMAGE_EXPERIMENTAL
+
+$(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT)
+	$(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_UPDATE_IMAGE_ARGS)
+
+PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz
+
+$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET): $(PACK_UPDATE_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_UPDATE_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-update-image
+pack-update-image: $(PACK_UPDATE_IMAGE_TARGET)
+
+endif # BOARD_USES_DESKTOP_UPDATE_IMAGE
+
+PACK_MIGRATION_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_migration_image
+
+# -----------------------------------------------------------------
+# Desktop pack migration image hook.
+ifeq ($(ANDROID_DESKTOP_MIGRATION_IMAGE),true)
+PACK_MIGRATION_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_migration_image.bin
+
+$(PACK_MIGRATION_IMAGE_TARGET): $(IMAGES) $(PACK_MIGRATION_IMAGE_SCRIPT)
+	$(PACK_MIGRATION_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive
+
+PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET := $(PACK_MIGRATION_IMAGE_TARGET).gz
+
+$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET): $(PACK_MIGRATION_IMAGE_TARGET) | $(GZIP)
+	$(GZIP) -fk $(PACK_MIGRATION_IMAGE_TARGET)
+
+$(call dist-for-goals,dist_files,$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET))
+
+.PHONY: pack-migration-image
+pack-migration-image: $(PACK_MIGRATION_IMAGE_TARGET)
+
+endif # ANDROID_DESKTOP_MIGRATION_IMAGE
+
+# -----------------------------------------------------------------
 # OS Licensing
 
 include $(BUILD_SYSTEM)/os_licensing.mk
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index f2ff286..769a6f7 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -28,20 +28,54 @@
 
 $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
 $(call soong_config_set_bool,ANDROID,BOARD_USES_RECOVERY_AS_BOOT,$(BOARD_USES_RECOVERY_AS_BOOT))
+$(call soong_config_set_bool,ANDROID,BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT,$(BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT))
 $(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS)
+$(call soong_config_set_bool,ANDROID,HAS_BOARD_SYSTEM_EXT_PREBUILT_DIR,$(if $(BOARD_SYSTEM_EXT_PREBUILT_DIR),true,false))
+$(call soong_config_set_bool,ANDROID,HAS_BOARD_PRODUCT_PREBUILT_DIR,$(if $(BOARD_PRODUCT_PREBUILT_DIR),true,false))
 $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_VERSION)
 $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_COMPAT_VERSIONS)
 $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
+$(call soong_config_set_bool,ANDROID,RELEASE_BOARD_API_LEVEL_FROZEN,$(RELEASE_BOARD_API_LEVEL_FROZEN))
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
+$(call add_soong_config_var,ANDROID,BOARD_GENFS_LABELS_VERSION)
+
+$(call add_soong_config_var,ANDROID,ADDITIONAL_M4DEFS,$(if $(BOARD_SEPOLICY_M4DEFS),$(addprefix -D,$(BOARD_SEPOLICY_M4DEFS))))
+
+# For bootable/recovery
+RECOVERY_API_VERSION := 3
+RECOVERY_FSTAB_VERSION := 2
+$(call soong_config_set, recovery, recovery_api_version, $(RECOVERY_API_VERSION))
+$(call soong_config_set, recovery, recovery_fstab_version, $(RECOVERY_FSTAB_VERSION))
+
+# For Sanitizers
+$(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false))
+$(call soong_config_set_bool,ANDROID,HWASAN_ENABLED,$(if $(filter hwaddress,$(SANITIZE_TARGET)),true,false))
+$(call soong_config_set_bool,ANDROID,SANITIZE_TARGET_SYSTEM_ENABLED,$(if $(filter true,$(SANITIZE_TARGET_SYSTEM)),true,false))
+
+# For init.environ.rc
+$(call soong_config_set_bool,ANDROID,GCOV_COVERAGE,$(NATIVE_COVERAGE))
+$(call soong_config_set_bool,ANDROID,CLANG_COVERAGE,$(CLANG_COVERAGE))
+$(call soong_config_set,ANDROID,SCUDO_ALLOCATION_RING_BUFFER_SIZE,$(PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE))
+
+$(call soong_config_set_bool,ANDROID,EMMA_INSTRUMENT,$(if $(filter true,$(EMMA_INSTRUMENT)),true,false))
 
 # PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not.
 $(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true))
 
+# For art modules
+$(call soong_config_set_bool,art_module,host_prefer_32_bit,$(if $(filter true,$(HOST_PREFER_32_BIT)),true,false))
 ifdef ART_DEBUG_OPT_FLAG
 $(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
 endif
+# The default value of ART_BUILD_HOST_DEBUG is true
+$(call soong_config_set_bool,art_module,art_build_host_debug,$(if $(filter false,$(ART_BUILD_HOST_DEBUG)),false,true))
+
+# For chre
+$(call soong_config_set_bool,chre,chre_daemon_lama_enabled,$(if $(filter true,$(CHRE_DAEMON_LPMA_ENABLED)),true,false))
+$(call soong_config_set_bool,chre,chre_dedicated_transport_channel_enabled,$(if $(filter true,$(CHRE_DEDICATED_TRANSPORT_CHANNEL_ENABLED)),true,false))
+$(call soong_config_set_bool,chre,chre_log_atom_extension_enabled,$(if $(filter true,$(CHRE_LOG_ATOM_EXTENSION_ENABLED)),true,false))
 
 ifdef TARGET_BOARD_AUTO
   $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
@@ -56,11 +90,6 @@
 endif
 endif
 
-# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module,
-# and set that to false when `ANDROID.module_build_from_source` is true.
-# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800
-$(call add_soong_config_var_value,ANDROID,module_build_from_source,true)
-
 # Enable SystemUI optimizations by default unless explicitly set.
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
@@ -79,6 +108,10 @@
 $(call add_soong_config_var_value,ANDROID,avf_microdroid_guest_gki_version,$(PRODUCT_AVF_MICRODROID_GUEST_GKI_VERSION))
 endif
 
+ifdef TARGET_BOOTS_16K
+$(call soong_config_set_bool,ANDROID,target_boots_16k,$(filter true,$(TARGET_BOOTS_16K)))
+endif
+
 ifdef PRODUCT_MEMCG_V2_FORCE_ENABLED
 $(call add_soong_config_var_value,ANDROID,memcg_v2_force_enabled,$(PRODUCT_MEMCG_V2_FORCE_ENABLED))
 endif
@@ -90,6 +123,7 @@
 $(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES))
+$(call add_soong_config_var_value,ANDROID,release_avf_enable_early_vm,$(RELEASE_AVF_ENABLE_EARLY_VM))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_llpvm_changes,$(RELEASE_AVF_ENABLE_LLPVM_CHANGES))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_multi_tenant_microdroid_vm,$(RELEASE_AVF_ENABLE_MULTI_TENANT_MICRODROID_VM))
 $(call add_soong_config_var_value,ANDROID,release_avf_enable_network,$(RELEASE_AVF_ENABLE_NETWORK))
@@ -162,3 +196,69 @@
 # Enable Profiling module. Also used by platform_bootclasspath.
 $(call soong_config_set,ANDROID,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
 $(call soong_config_set,bootclasspath,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE))
+
+# Add perf-setup build flag to soong
+# Note: BOARD_PERFSETUP_SCRIPT location must be under platform_testing/scripts/perf-setup/.
+ifdef BOARD_PERFSETUP_SCRIPT
+  $(call soong_config_set,perf,board_perfsetup_script,$(notdir $(BOARD_PERFSETUP_SCRIPT)))
+endif
+
+# Add target_use_pan_display flag for hardware/libhardware:gralloc.default
+$(call soong_config_set_bool,gralloc,target_use_pan_display,$(if $(filter true,$(TARGET_USE_PAN_DISPLAY)),true,false))
+
+# Add use_camera_v4l2_hal flag for hardware/libhardware/modules/camera/3_4:camera.v4l2
+$(call soong_config_set_bool,camera,use_camera_v4l2_hal,$(if $(filter true,$(USE_CAMERA_V4L2_HAL)),true,false))
+
+# Add audioserver_multilib flag for hardware/interfaces/soundtrigger/2.0/default:android.hardware.soundtrigger@2.0-impl
+ifneq ($(strip $(AUDIOSERVER_MULTILIB)),)
+  $(call soong_config_set,soundtrigger,audioserver_multilib,$(AUDIOSERVER_MULTILIB))
+endif
+
+# Add sim_count, disable_rild_oem_hook, and use_aosp_rild flag for ril related modules
+$(call soong_config_set,ril,sim_count,$(SIM_COUNT))
+ifneq ($(DISABLE_RILD_OEM_HOOK), false)
+  $(call soong_config_set_bool,ril,disable_rild_oem_hook,true)
+endif
+ifneq ($(ENABLE_VENDOR_RIL_SERVICE), true)
+  $(call soong_config_set_bool,ril,use_aosp_rild,true)
+endif
+
+# Export target_board_platform to soong for hardware/google/graphics/common/libmemtrack:memtrack.$(TARGET_BOARD_PLATFORM)
+$(call soong_config_set,ANDROID,target_board_platform,$(TARGET_BOARD_PLATFORM))
+
+# Export board_uses_scaler_m2m1shot and board_uses_align_restriction to soong for hardware/google/graphics/common/libscaler:libexynosscaler
+$(call soong_config_set_bool,google_graphics,board_uses_scaler_m2m1shot,$(if $(filter true,$(BOARD_USES_SCALER_M2M1SHOT)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_align_restriction,$(if $(filter true,$(BOARD_USES_ALIGN_RESTRICTION)),true,false))
+
+# Export related variables to soong for hardware/google/graphics/common/libacryl:libacryl
+ifdef BOARD_LIBACRYL_DEFAULT_COMPOSITOR
+  $(call soong_config_set,acryl,libacryl_default_compositor,$(BOARD_LIBACRYL_DEFAULT_COMPOSITOR))
+endif
+ifdef BOARD_LIBACRYL_DEFAULT_SCALER
+  $(call soong_config_set,acryl,libacryl_default_scaler,$(BOARD_LIBACRYL_DEFAULT_SCALER))
+endif
+ifdef BOARD_LIBACRYL_DEFAULT_BLTER
+  $(call soong_config_set,acryl,libacryl_default_blter,$(BOARD_LIBACRYL_DEFAULT_BLTER))
+endif
+ifdef BOARD_LIBACRYL_G2D_HDR_PLUGIN
+  #BOARD_LIBACRYL_G2D_HDR_PLUGIN is set in each board config
+  $(call soong_config_set_bool,acryl,libacryl_use_g2d_hdr_plugin,true)
+endif
+
+# Export related variables to soong for hardware/google/graphics/common/BoardConfigCFlags.mk
+$(call soong_config_set_bool,google_graphics,hwc_no_support_skip_validate,$(if $(filter true,$(HWC_NO_SUPPORT_SKIP_VALIDATE)),true,false))
+$(call soong_config_set_bool,google_graphics,hwc_support_color_transform,$(if $(filter true,$(HWC_SUPPORT_COLOR_TRANSFORM)),true,false))
+$(call soong_config_set_bool,google_graphics,hwc_support_render_intent,$(if $(filter true,$(HWC_SUPPORT_RENDER_INTENT)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_virtual_display,$(if $(filter true,$(BOARD_USES_VIRTUAL_DISPLAY)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_dt,$(if $(filter true,$(BOARD_USES_DT)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_decon_64bit_address,$(if $(filter true,$(BOARD_USES_DECON_64BIT_ADDRESS)),true,false))
+$(call soong_config_set_bool,google_graphics,board_uses_hdrui_gles_conversion,$(if $(filter true,$(BOARD_USES_HDRUI_GLES_CONVERSION)),true,false))
+$(call soong_config_set_bool,google_graphics,uses_idisplay_intf_sec,$(if $(filter true,$(USES_IDISPLAY_INTF_SEC)),true,false))
+
+# Variables for fs_config
+$(call soong_config_set_bool,fs_config,vendor,$(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),true,false))
+$(call soong_config_set_bool,fs_config,oem,$(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),true,false))
+$(call soong_config_set_bool,fs_config,odm,$(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),true,false))
+$(call soong_config_set_bool,fs_config,vendor_dlkm,$(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false))
+$(call soong_config_set_bool,fs_config,odm_dlkm,$(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false))
+$(call soong_config_set_bool,fs_config,system_dlkm,$(if $(BOARD_USES_SYSTEM_DLKMIMAGE)$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE),true,false))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ca553f6..5363e0f 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -340,7 +340,7 @@
 
 ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE))
   ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    $(call pretty-error, LOCAL_SOONG_INSTALLED_MODULE can only be used from $(SOONG_ANDROID_MK))
+    $(call pretty-error, LOCAL_MODULE_MAKEFILE can only be used from $(SOONG_ANDROID_MK))
   endif
   # Use the install path requested by Soong.
   LOCAL_INSTALLED_MODULE := $(LOCAL_SOONG_INSTALLED_MODULE)
@@ -776,6 +776,8 @@
   $(eval my_compat_dist_$(suite) := $(patsubst %:$(LOCAL_INSTALLED_MODULE),$(LOCAL_INSTALLED_MODULE):$(LOCAL_INSTALLED_MODULE),\
     $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
       $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem)))) \
+  $(eval my_compat_module_arch_dir_$(suite).$(my_register_name) :=) \
+  $(foreach dir,$(call compatibility_suite_dirs,$(suite),$(arch_dir)),$(eval my_compat_module_arch_dir_$(suite).$(my_register_name) += $(dir))) \
   $(eval my_compat_dist_config_$(suite) := ))
 
 ifneq (,$(LOCAL_SOONG_CLASSES_JAR))
diff --git a/core/binary.mk b/core/binary.mk
index 0bc9469..ea862be 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -174,7 +174,7 @@
 endif
 endif
 
-my_ndk_sysroot_include :=
+my_ndk_sysroot :=
 my_ndk_sysroot_lib :=
 my_api_level := 10000
 
@@ -205,15 +205,11 @@
     my_api_level := $(my_ndk_api)
   endif
 
-  my_ndk_source_root := \
-      $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
   my_built_ndk := $(SOONG_OUT_DIR)/ndk
   my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE)
-  my_ndk_sysroot_include := \
-      $(my_built_ndk)/sysroot/usr/include \
-      $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
+  my_ndk_sysroot := $(my_built_ndk)/sysroot
 
-  my_ndk_sysroot_lib := $(my_built_ndk)/sysroot/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
+  my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
 
   # The bionic linker now has support for packed relocations and gnu style
   # hashes (which are much faster!), but shipping to older devices requires
@@ -239,16 +235,18 @@
   endif
 
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
+    my_ndk_source_root := \
+        $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
     my_system_shared_libraries += libstdc++
   else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++/include
-    my_ndk_stl_include_path += \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include
+    my_llvm_dir := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)
+    my_libcxx_arch_dir := $(my_llvm_dir)/android_libc++/ndk/$($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR)
 
-    my_libcxx_libdir := \
-      $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)
+    # Include the target-specific __config_site file followed by the generic libc++ headers.
+    my_ndk_stl_include_path := $(my_libcxx_arch_dir)/include/c++/v1
+    my_ndk_stl_include_path += $(my_llvm_dir)/include/c++/v1
+    my_libcxx_libdir := $(my_libcxx_arch_dir)/lib
 
     ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
       my_ndk_stl_static_lib := \
@@ -258,7 +256,7 @@
       my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so
     endif
 
-    my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
+    my_ndk_stl_static_lib += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBUNWIND)
     my_ldlibs += -ldl
   else # LOCAL_NDK_STL_VARIANT must be none
     # Do nothing.
@@ -330,18 +328,20 @@
   ifneq ($(LOCAL_IN_VENDOR),)
     # Vendor modules have LOCAL_IN_VENDOR
     my_cflags += -D__ANDROID_VENDOR__
-
-    ifeq ($(BOARD_API_LEVEL),)
-      # TODO(b/314036847): This is a fallback for UDC targets.
-      # This must be a build failure when UDC is no longer built from this source tree.
-      my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
-    else
-      my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
-    endif
   else ifneq ($(LOCAL_IN_PRODUCT),)
     # Product modules have LOCAL_IN_PRODUCT
     my_cflags += -D__ANDROID_PRODUCT__
   endif
+
+  # Define __ANDROID_VENDOR_API__ for both product and vendor variants because
+  # they both use the same LLNDK libraries.
+  ifeq ($(BOARD_API_LEVEL),)
+    # TODO(b/314036847): This is a fallback for UDC targets.
+    # This must be a build failure when UDC is no longer built from this source tree.
+    my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION)
+  else
+    my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL)
+  endif
 endif
 
 ifndef LOCAL_IS_HOST_MODULE
@@ -1626,19 +1626,6 @@
 ###########################################################
 ifndef LOCAL_IS_HOST_MODULE
 
-ifeq ($(call module-in-vendor-or-product),true)
-  my_target_global_c_includes :=
-  my_target_global_c_system_includes := $(TARGET_OUT_HEADERS)
-else ifdef LOCAL_SDK_VERSION
-  my_target_global_c_includes :=
-  my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
-else
-  my_target_global_c_includes := $(SRC_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
-endif
-
 my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
 my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS) $(my_c_std_conlyflags)
 my_target_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS) $(my_cpp_std_cppflags)
@@ -1654,6 +1641,22 @@
   my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
 endif # my_use_clang_lld
 
+ifeq ($(call module-in-vendor-or-product),true)
+  my_target_global_c_includes :=
+  my_target_global_c_system_includes := $(TARGET_OUT_HEADERS)
+  my_target_global_cflags += -nostdlibinc
+else ifdef LOCAL_SDK_VERSION
+  my_target_global_c_includes :=
+  my_target_global_c_system_includes := $(my_ndk_stl_include_path)
+  my_target_global_cflags += --sysroot $(my_ndk_sysroot)
+else
+  my_target_global_c_includes := $(SRC_HEADERS) \
+    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
+  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
+    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
+  my_target_global_cflags += -nostdlibinc
+endif
+
 my_target_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)TRIPLE)
 ifndef LOCAL_IS_HOST_MODULE
   my_target_triple_flag := -target $(my_target_triple)$(my_api_level)
diff --git a/core/board_config.mk b/core/board_config.mk
index d3f0493..ea0d022 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -27,6 +27,7 @@
 _board_strip_readonly_list += BOARD_KERNEL_CMDLINE
 _board_strip_readonly_list += BOARD_BOOT_HEADER_VERSION
 _board_strip_readonly_list += BOARD_BOOTCONFIG
+_board_strip_readonly_list += BOARD_BOOTCONFIG_FILE
 _board_strip_readonly_list += BOARD_KERNEL_BASE
 _board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
 _board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
@@ -237,6 +238,7 @@
   .KATI_READONLY := TARGET_DEVICE_DIR
 endif
 
+$(call dump-phase-start,BOARD,,,, build/make/core/board_config.mk)
 ifndef RBC_PRODUCT_CONFIG
 include $(board_config_mk)
 else
@@ -261,6 +263,7 @@
 
   include $(OUT_DIR)/rbc/rbc_board_config_results.mk
 endif
+$(call dump-phase-end, build/make/core/board_config.mk)
 
 ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE)))
   $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE)
@@ -309,9 +312,10 @@
 .KATI_READONLY := $(_board_strip_readonly_list)
 
 INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
-ifneq (,$(BOARD_BOOTCONFIG))
+ifneq (,$(BOARD_BOOTCONFIG)$(BOARD_BOOTCONFIG_FILE))
   INTERNAL_KERNEL_CMDLINE += bootconfig
   INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
+  INTERNAL_BOOTCONFIG_FILE := $(BOARD_BOOTCONFIG_FILE)
 endif
 
 ifneq ($(filter %64,$(TARGET_ARCH)),)
@@ -920,6 +924,18 @@
 endif
 .KATI_READONLY := BOARD_USES_PVMFWIMAGE
 
+BOARD_USES_DESKTOP_RECOVERY_IMAGE :=
+ifeq ($(PRODUCT_BUILD_DESKTOP_RECOVERY_IMAGE),true)
+  BOARD_USES_DESKTOP_RECOVERY_IMAGE := true
+endif
+.KATI_READONLY := BOARD_USES_DESKTOP_RECOVERY_IMAGE
+
+BOARD_USES_DESKTOP_UPDATE_IMAGE :=
+ifeq ($(PRODUCT_BUILD_DESKTOP_UPDATE_IMAGE),true)
+  BOARD_USES_DESKTOP_UPDATE_IMAGE := true
+endif
+.KATI_READONLY := BOARD_USES_DESKTOP_UPDATE_IMAGE
+
 ###########################################
 # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE.
 TARGET_RECOVERY_UPDATER_LIBS ?=
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index f18747a..126482f 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -4,7 +4,10 @@
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-arm-android.a
 $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-arm-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/arm/libunwind.a
 
 # Address sanitizer clang config
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
+
+$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := arm
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 42bed0a..e7ab6cb 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-aarch64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-aarch64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/aarch64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := aarch64
diff --git a/core/clang/TARGET_riscv64.mk b/core/clang/TARGET_riscv64.mk
index cfb5c7d..58c9c7b 100644
--- a/core/clang/TARGET_riscv64.mk
+++ b/core/clang/TARGET_riscv64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-riscv64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-riscv64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/riscv64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := riscv64
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index 5491a05..1a08c79 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -4,7 +4,10 @@
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686-android.a
 $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-i686-android.a
+$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/i386/libunwind.a
 
 # Address sanitizer clang config
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan
+
+$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := i386
diff --git a/core/clang/TARGET_x86_64.mk b/core/clang/TARGET_x86_64.mk
index 167db72..f39b41e 100644
--- a/core/clang/TARGET_x86_64.mk
+++ b/core/clang/TARGET_x86_64.mk
@@ -4,7 +4,10 @@
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64-android.a
 TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-x86_64-android.a
+TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/x86_64/libunwind.a
 
 # Address sanitizer clang config
 ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
+
+PREBUILT_LIBCXX_ARCH_DIR := x86_64
diff --git a/core/combo/arch/arm64/armv9-2a.mk b/core/combo/arch/arm64/armv9-2a.mk
new file mode 100644
index 0000000..69ffde0
--- /dev/null
+++ b/core/combo/arch/arm64/armv9-2a.mk
@@ -0,0 +1,18 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the ARMv9.2-A arch variant.
+# The file just needs to be present, it does not need to contain anything.
diff --git a/core/combo/arch/x86/alderlake.mk b/core/combo/arch/x86/alderlake.mk
new file mode 100644
index 0000000..a7ae6ed
--- /dev/null
+++ b/core/combo/arch/x86/alderlake.mk
@@ -0,0 +1,6 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/alderlake.mk b/core/combo/arch/x86_64/alderlake.mk
new file mode 100644
index 0000000..a7ae6ed
--- /dev/null
+++ b/core/combo/arch/x86_64/alderlake.mk
@@ -0,0 +1,6 @@
+# Configuration for Linux on x86.
+# Generating binaries for processors
+# that have AVX2 feature flag
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/config.mk b/core/config.mk
index 82b63cf..b546ea1 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -173,6 +173,7 @@
 $(KATI_obsolete_var BUILDING_PVMFW_IMAGE,BUILDING_PVMFW_IMAGE is no longer used)
 $(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 $(KATI_obsolete_var FS_GET_STATS)
+$(KATI_obsolete_var BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -363,8 +364,7 @@
 # configs, generally for cross-cutting features.
 
 # Build broken variables that should be treated as booleans
-_build_broken_bool_vars := \
-  BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \
+_build_broken_bool_vars :=
 
 # Build broken variables that should be treated as lists
 _build_broken_list_vars := \
@@ -432,13 +432,6 @@
 endif
 .KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
 
-ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
-  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE)
-else
-  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
-endif
-.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
-
 # Boolean variable determining if AOSP relies on bionic's PAGE_SIZE macro.
 ifdef PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO
   TARGET_NO_BIONIC_PAGE_SIZE_MACRO := $(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO)
@@ -811,6 +804,24 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
+ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),)
+  ifneq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),)
+    $(error Must not set NEED_AIDL_NDK_PLATFORM_BACKEND, but it is set to: $(NEED_AIDL_NDK_PLATFORM_BACKEND). Support will be removed.)
+  endif
+endif
+
+ifdef PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := $(PRODUCT_CHECK_PREBUILT_MAX_PAGE_SIZE)
+else ifeq (true,$(TARGET_BUILD_UNBUNDLED))
+  # unbundled builds may not have updated build sources
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
+else ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),)
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := true
+else
+  TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE := false
+endif
+.KATI_READONLY := TARGET_CHECK_PREBUILT_MAX_PAGE_SIZE
+
 # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
 # devices if unset.
 ifndef BOARD_SYSTEMSDK_VERSIONS
@@ -833,12 +844,6 @@
 .KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
 
 ifdef PRODUCT_SHIPPING_API_LEVEL
-  board_api_level := $(firstword $(BOARD_API_LEVEL) $(BOARD_SHIPPING_API_LEVEL))
-  ifneq (,$(board_api_level))
-    min_systemsdk_version := $(call math_min,$(board_api_level),$(PRODUCT_SHIPPING_API_LEVEL))
-  else
-    min_systemsdk_version := $(PRODUCT_SHIPPING_API_LEVEL)
-  endif
   ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
     ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
@@ -889,6 +894,11 @@
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
     PLATFORM_SEPOLICY_VERSION \
 
+BOARD_GENFS_LABELS_VERSION ?= $(BOARD_API_LEVEL)
+ifeq ($(call math_gt,$(BOARD_API_LEVEL),$(BOARD_GENFS_LABELS_VERSION)),true)
+  $(error BOARD_GENFS_LABELS_VERSION ($(BOARD_GENFS_LABELS_VERSION)) must be greater than or equal to BOARD_API_LEVEL ($(BOARD_API_LEVEL)))
+endif
+
 ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
   ifneq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
     $(error PRODUCT_USE_DYNAMIC_PARTITIONS must be true when PRODUCT_RETROFIT_DYNAMIC_PARTITIONS \
@@ -1251,14 +1261,43 @@
 # in the source tree.
 dont_bother_goals := out product-graph
 
+ifeq ($(TARGET_SYSTEM_PROP),)
+TARGET_SYSTEM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
+endif
+
+ifeq ($(TARGET_SYSTEM_EXT_PROP),)
+TARGET_SYSTEM_EXT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop)
+endif
+
+ifeq ($(TARGET_PRODUCT_PROP),)
+TARGET_PRODUCT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/product.prop)
+endif
+
+ifeq ($(TARGET_ODM_PROP),)
+TARGET_ODM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)
+endif
+
+.KATI_READONLY := \
+    TARGET_SYSTEM_PROP \
+    TARGET_SYSTEM_EXT_PROP \
+    TARGET_PRODUCT_PROP \
+    TARGET_ODM_PROP \
+
 include $(BUILD_SYSTEM)/sysprop_config.mk
 
 # Make ANDROID Soong config variables visible to Android.mk files, for
 # consistency with those defined in BoardConfig.mk files.
 include $(BUILD_SYSTEM)/android_soong_config_vars.mk
 
-SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
-SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).extra.variables
+# EMMA_INSTRUMENT is set to true when coverage is enabled. Creates a suffix to
+# differeciate the coverage version of ninja files. This will save 5 minutes of
+# build time used to regenerate ninja.
+ifeq (true,$(EMMA_INSTRUMENT))
+COVERAGE_SUFFIX := .coverage
+endif
+
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).variables
+SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).extra.variables
 
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ninja_config.mk
@@ -1268,10 +1307,6 @@
 SOONG_VARIABLES :=
 SOONG_EXTRA_VARIABLES :=
 
--include external/ltp/android/ltp_package_list.mk
-DEFAULT_DATA_OUT_MODULES := ltp $(ltp_packages)
-.KATI_READONLY := DEFAULT_DATA_OUT_MODULES
-
 include $(BUILD_SYSTEM)/dumpvar.mk
 
 ifdef BOARD_VNDK_VERSION
diff --git a/core/definitions.mk b/core/definitions.mk
index b30b159..7a32464 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2605,7 +2605,17 @@
         @$(call emit-line,$(wordlist 108501,109000,$(1)),$(2))
         @$(call emit-line,$(wordlist 109001,109500,$(1)),$(2))
         @$(call emit-line,$(wordlist 109501,110000,$(1)),$(2))
-        @$(if $(wordlist 110001,110002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 110001,110500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 110501,111000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 111001,111500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 111501,112000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 112001,112500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 112501,113000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 113001,113500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 113501,114000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 114001,114500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 114501,115000,$(1)),$(2))
+        @$(if $(wordlist 115001,115002,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
@@ -3612,6 +3622,7 @@
     $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(my_compat_dist_test_data_$(suite))) \
+  $(eval COMPATIBILITY.$(suite).ARCH_DIRS.$(my_register_name) := $(my_compat_module_arch_dir_$(suite).$(my_register_name))) \
   $(eval COMPATIBILITY.$(suite).API_MAP_FILES += $$(my_compat_api_map_$(suite))) \
   $(eval COMPATIBILITY.$(suite).SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES += $(LOCAL_SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) \
   $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 26b8b17..88e0cc7 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -13,47 +13,6 @@
 install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1)))))
 endif
 
-# We want to install the profile even if we are not using preopt since it is required to generate
-# the image on the device.
-ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT))
-
-# Install boot images. Note that there can be multiple.
-my_boot_image_arch := TARGET_ARCH
-my_boot_image_out := $(PRODUCT_OUT)
-my_boot_image_syms := $(TARGET_OUT_UNSTRIPPED)
-DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \
-  $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \
-    $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-    $(my_boot_image_module)))
-ifdef TARGET_2ND_ARCH
-  my_boot_image_arch := TARGET_2ND_ARCH
-  2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \
-    $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \
-      $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-      $(my_boot_image_module)))
-endif
-# Install boot images for testing on host. We exclude framework image as it is not part of art manifest.
-my_boot_image_arch := HOST_ARCH
-my_boot_image_out := $(HOST_OUT)
-my_boot_image_syms := $(HOST_OUT)/symbols
-HOST_BOOT_IMAGE_MODULE := \
-  $(foreach my_boot_image_name,art_host,$(strip \
-    $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-    $(my_boot_image_module)))
-HOST_BOOT_IMAGE := $(call module-installed-files,$(HOST_BOOT_IMAGE_MODULE))
-ifdef HOST_2ND_ARCH
-  my_boot_image_arch := HOST_2ND_ARCH
-  2ND_HOST_BOOT_IMAGE_MODULE := \
-    $(foreach my_boot_image_name,art_host,$(strip \
-      $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \
-      $(my_boot_image_module)))
-  2ND_HOST_BOOT_IMAGE := $(call module-installed-files,$(2ND_HOST_BOOT_IMAGE_MODULE))
-endif
-my_boot_image_arch :=
-my_boot_image_out :=
-my_boot_image_syms :=
-my_boot_image_module :=
-
 # Build the boot.zip which contains the boot jars and their compilation output
 # We can do this only if preopt is enabled and if the product uses libart config (which sets the
 # default properties for preopting).
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index d51de33..f1e9fb5 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,4 +1,4 @@
-DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
+DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt${COVERAGE_SUFFIX}.config
 
 ENABLE_PREOPT := true
 ENABLE_PREOPT_BOOT_IMAGES := true
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
deleted file mode 100644
index a2c9942..0000000
--- a/core/dex_preopt_libart.mk
+++ /dev/null
@@ -1,109 +0,0 @@
-####################################
-# ART boot image installation
-# Input variables:
-#   my_boot_image_name: the boot image to install
-#   my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded)
-#   my_boot_image_out:  the install directory (e.g. $(PRODUCT_OUT))
-#   my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED))
-#
-# Output variables:
-#   my_boot_image_module: the created module name. Empty if no module is created.
-#
-# Install the boot images compiled by Soong.
-# Create a module named dexpreopt_bootjar.$(my_boot_image_name)_$($(my_boot_image_arch))
-# that installs all of boot image files.
-# If there is no file to install for $(my_boot_image_name), for example when
-# building an unbundled build, then no module is created.
-#
-####################################
-
-# Takes a list of src:dest install pairs and returns a new list with a path
-# prefixed to each dest value.
-# $(1): list of src:dest install pairs
-# $(2): path to prefix to each dest value
-define prefix-copy-many-files-dest
-$(foreach v,$(1),$(call word-colon,1,$(v)):$(2)$(call word-colon,2,$(v)))
-endef
-
-# Converts an architecture-specific vdex path into a location that can be shared
-# between architectures.
-define vdex-shared-install-path
-$(dir $(patsubst %/,%,$(dir $(1))))$(notdir $(1))
-endef
-
-# Takes a list of src:dest install pairs of vdex files and returns a new list
-# where each dest has been rewritten to the shared location for vdex files.
-define vdex-copy-many-files-shared-dest
-$(foreach v,$(1),$(call word-colon,1,$(v)):$(call vdex-shared-install-path,$(call word-colon,2,$(v))))
-endef
-
-# Creates a rule to symlink an architecture specific vdex file to the shared
-# location for that vdex file.
-define symlink-vdex-file
-$(strip \
-  $(call symlink-file,\
-    $(call vdex-shared-install-path,$(1)),\
-    ../$(notdir $(1)),\
-    $(1))\
-  $(1))
-endef
-
-# Takes a list of src:dest install pairs of vdex files and creates rules to
-# symlink each dest to the shared location for that vdex file.
-define symlink-vdex-files
-$(foreach v,$(1),$(call symlink-vdex-file,$(call word-colon,2,$(v))))
-endef
-
-my_boot_image_module :=
-
-my_suffix := $(my_boot_image_name)_$($(my_boot_image_arch))
-my_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out))
-my_vdex_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out))
-my_vdex_copy_shared_pairs := $(call vdex-copy-many-files-shared-dest,$(my_vdex_copy_pairs))
-ifeq (,$(filter %_2ND_ARCH,$(my_boot_image_arch)))
-  # Only install the vdex to the shared location for the primary architecture.
-  my_copy_pairs += $(my_vdex_copy_shared_pairs)
-endif
-
-my_unstripped_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_syms))
-
-# Generate the boot image module only if there is any file to install.
-ifneq (,$(strip $(my_copy_pairs)))
-  my_first_pair := $(firstword $(my_copy_pairs))
-  my_rest_pairs := $(wordlist 2,$(words $(my_copy_pairs)),$(my_copy_pairs))
-
-  my_first_src := $(call word-colon,1,$(my_first_pair))
-  my_first_dest := $(call word-colon,2,$(my_first_pair))
-
-  my_installed := $(call copy-many-files,$(my_copy_pairs))
-  my_unstripped_installed := $(call copy-many-files,$(my_unstripped_copy_pairs))
-
-  my_symlinks := $(call symlink-vdex-files,$(my_vdex_copy_pairs))
-
-  # We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
-  LOCAL_PATH := $(BUILD_SYSTEM)
-  # Hack to let these pseudo-modules wrapped around Soong modules use LOCAL_SOONG_INSTALLED_MODULE.
-  LOCAL_MODULE_MAKEFILE := $(SOONG_ANDROID_MK)
-
-  include $(CLEAR_VARS)
-  LOCAL_MODULE := dexpreopt_bootjar.$(my_suffix)
-  LOCAL_PREBUILT_MODULE_FILE := $(my_first_src)
-  LOCAL_MODULE_PATH := $(dir $(my_first_dest))
-  LOCAL_MODULE_STEM := $(notdir $(my_first_dest))
-  LOCAL_SOONG_INSTALL_PAIRS := $(my_copy_pairs)
-  LOCAL_SOONG_INSTALL_SYMLINKS := $(my_symlinks)
-  LOCAL_SOONG_INSTALLED_MODULE := $(my_first_dest)
-  LOCAL_SOONG_LICENSE_METADATA := $(DEXPREOPT_IMAGE_LICENSE_METADATA_$(my_suffix))
-  ifneq (,$(strip $(filter HOST_%,$(my_boot_image_arch))))
-    LOCAL_IS_HOST_MODULE := true
-  endif
-  LOCAL_MODULE_CLASS := ETC
-  include $(BUILD_PREBUILT)
-  $(LOCAL_BUILT_MODULE): | $(my_unstripped_installed)
-  # Installing boot.art causes all boot image bits to be installed.
-  # Keep this old behavior in case anyone still needs it.
-  $(LOCAL_INSTALLED_MODULE): $(wordlist 2,$(words $(my_installed)),$(my_installed)) $(my_symlinks)
-  $(my_all_targets): $(my_installed) $(my_symlinks)
-
-  my_boot_image_module := $(LOCAL_MODULE)
-endif  # my_copy_pairs != empty
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 640fe10..eb4c822 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -56,7 +56,7 @@
 
 # Escape quotation marks for CSV, and wraps in quotation marks.
 define escape-for-csv
-"$(subst ","",$1)"
+"$(subst ","",$(subst $(newline), ,$1))"
 endef
 
 # Args:
@@ -68,7 +68,7 @@
 # Args:
 #   $(1): include stack
 define dump-import-done
-$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)),$(filter-out $(1),$(MAKEFILE_LIST))))
 endef
 
 # Args:
diff --git a/core/envsetup.mk b/core/envsetup.mk
index c063f60..f82e861 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -417,6 +417,7 @@
 HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64
 HOST_OUT_COVERAGE := $(HOST_OUT)/coverage
 HOST_OUT_TESTCASES := $(HOST_OUT)/testcases
+HOST_OUT_ETC := $(HOST_OUT)/etc
 .KATI_READONLY := \
   HOST_OUT_EXECUTABLES \
   HOST_OUT_SHARED_LIBRARIES \
@@ -425,7 +426,8 @@
   HOST_OUT_SDK_ADDON \
   HOST_OUT_NATIVE_TESTS \
   HOST_OUT_COVERAGE \
-  HOST_OUT_TESTCASES
+  HOST_OUT_TESTCASES \
+  HOST_OUT_ETC
 
 HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin
 HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 5491247..4959edd 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -38,8 +38,9 @@
       $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
           LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
     endif
+    my_libcxx_arch := $($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR)
     my_jni_shared_libraries += \
-        $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
+        $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/android_libc++/ndk/$(my_libcxx_arch)/lib/libc++_shared.so
   endif
 
   # Set the abi directory used by the local JNI shared libraries.
diff --git a/core/java_common.mk b/core/java_common.mk
index a21f062..f574b76 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -32,7 +32,7 @@
     else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS))
       # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
       LOCAL_JAVA_LANGUAGE_VERSION := 1.8
-    else ifeq ($(EXPERIMENTAL_TARGET_JAVA_VERSION_21),true)
+    else ifeq ($(RELEASE_TARGET_JAVA_21),true)
       LOCAL_JAVA_LANGUAGE_VERSION := 21
     else
       LOCAL_JAVA_LANGUAGE_VERSION := 17
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index 46393ac..4b6eea7 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -172,6 +172,12 @@
 endif
 endif
 
+# transitive-res-packages is only populated for Soong modules for now, but needs
+# to exist so that other Make modules can depend on it.  Create an empty file.
+my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages
+$(my_transitive_res_packages):
+	touch $@
+
 my_res_package := $(intermediates.COMMON)/package-res.apk
 
 # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk
index e45f7ef..dabcfb2 100644
--- a/core/layoutlib_data.mk
+++ b/core/layoutlib_data.mk
@@ -31,8 +31,18 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) cp -vf $< $@
 
-# List of all data files - font files, font configuration files, key character map files
-LAYOUTLIB_FILES := $(fonts_device) $(font_config) $(keyboards)
+HYPHEN_TEMP := $(call intermediates-dir-for,PACKAGING,hyphen,HOST,COMMON)
+
+# The hyphenation pattern files needed to support text hyphenation
+hyphen := $(filter $(TARGET_OUT)/usr/hyphen-data/%.hyb, $(INTERNAL_SYSTEMIMAGE_FILES))
+hyphen := $(addprefix $(HYPHEN_TEMP)/, $(notdir $(hyphen)))
+
+$(hyphen): $(HYPHEN_TEMP)/%: $(TARGET_OUT)/usr/hyphen-data/%
+	$(hide) mkdir -p $(dir $@)
+	$(hide) cp -vf $< $@
+
+# List of all data files - font files, font configuration files, key character map files, hyphenation pattern files
+LAYOUTLIB_FILES := $(fonts_device) $(font_config) $(keyboards) $(hyphen)
 
 .PHONY: layoutlib layoutlib-tests
 layoutlib layoutlib-tests: $(LAYOUTLIB_FILES)
@@ -40,6 +50,7 @@
 $(call dist-for-goals, layoutlib, $(foreach m,$(fonts_device), $(m):layoutlib_native/fonts/$(notdir $(m))))
 $(call dist-for-goals, layoutlib, $(foreach m,$(font_config), $(m):layoutlib_native/fonts/$(notdir $(m))))
 $(call dist-for-goals, layoutlib, $(foreach m,$(keyboards), $(m):layoutlib_native/keyboards/$(notdir $(m))))
+$(call dist-for-goals, layoutlib, $(foreach m,$(hyphen), $(m):layoutlib_native/hyphen-data/$(notdir $(m))))
 
 FONT_TEMP :=
 font_config :=
@@ -66,11 +77,19 @@
 # Resource files from frameworks/base/core/res/res
 LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON)
 LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort)
-$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES)
+EMULATED_OVERLAYS_FILES := $(shell find frameworks/base/packages/overlays/*/res/ | sort)
+DEVICE_OVERLAYS_FILES := $(shell find device/generic/goldfish/phone/overlay/frameworks/base/packages/overlays/*/AndroidOverlay/res/ | sort)
+$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
-	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt
-	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip
-	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip
+	echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist_res.txt
+	$(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist_res.txt -o $(LAYOUTLIB_RES)/temp_res.zip
+	echo $(EMULATED_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt
+	$(SOONG_ZIP) -C frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt -o $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	echo $(DEVICE_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_device_overlays.txt
+	$(SOONG_ZIP) -C device/generic/goldfish/phone/overlay/frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_device_overlays.txt -o $(LAYOUTLIB_RES)/temp_device_overlays.zip
+	rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_res.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_emulated_overlays.zip
+	unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_device_overlays.zip
 	rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled
 	printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml
 	$(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml
@@ -78,7 +97,7 @@
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done
 	for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done
 	cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data
-	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@
+	$(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/ -o $@
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip)
 
@@ -87,6 +106,7 @@
 _layoutlib_font_config_files := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
 _layoutlib_fonts_files := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
 _layoutlib_keyboard_files := $(sort $(wildcard frameworks/base/data/keyboards/*.kcm))
+_layoutlib_hyphen_files := $(filter $(TARGET_OUT)/usr/hyphen-data/%.hyb, $(INTERNAL_SYSTEMIMAGE_FILES))
 
 # Find out files disted with layoutlib in Soong.
 ### Filter out static libraries for Windows and files already handled in make.
@@ -116,6 +136,13 @@
 	  echo data/keyboards/$(notdir $f),frameworks/base/data/keyboards,prebuilt_etc,,,,,$f,,, >> $@; \
 	)
 
+	$(foreach f,$(_layoutlib_hyphen_files), \
+	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+	  echo data/hyphen-data/$(notdir $f),$(_module_path),$(_soong_module_type),,,,,$f,,, >> $@; \
+	)
+
 	$(foreach f,$(_layoutlib_files_disted_by_soong), \
 	  $(eval _prebuilt_module_file := $(call word-colon,1,$f)) \
 	  $(eval _dist_file := $(call word-colon,2,$f)) \
@@ -132,16 +159,26 @@
 	  echo $(_path),,,,,,Y,$f,,, >> $@; \
 	)
 
+	$(foreach f,$(EMULATED_OVERLAYS_FILES), \
+	  $(eval _path := $(subst frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
+	$(foreach f,$(DEVICE_OVERLAYS_FILES), \
+	  $(eval _path := $(subst device/generic/goldfish/phone/overlay/frameworks/base/packages,data,$f)) \
+	  echo $(_path),,,,,,Y,$f,,, >> $@; \
+	)
+
 .PHONY: layoutlib-sbom
 layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json
-$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES)
+$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(_layoutlib_hyphen_files) $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES)
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json
 
 $(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json)
 
 # Generate SBOM of framework_res.jar that is created in release_layoutlib.sh.
-# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include:
+# The generated SBOM contains placeholders for release_layoutlib.sh to substitute, and the placeholders include:
 # document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar.
 GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res
 .PHONY: layoutlib-framework_res-sbom
diff --git a/core/main.mk b/core/main.mk
index 8d73793..624df49 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -31,8 +31,7 @@
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),SOONG_CONFIG_$(n))
 .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),$(foreach k,$(SOONG_CONFIG_$(n)),SOONG_CONFIG_$(n)_$(k)))
 
-include $(SOONG_MAKEVARS_MK)
-
+include $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 YACC :=$= $(BISON) -d
 
 include $(BUILD_SYSTEM)/clang/config.mk
@@ -84,6 +83,8 @@
 -include test/cts-root/tools/build/config.mk
 # WVTS-specific config.
 -include test/wvts/tools/build/config.mk
+# DTS-specific config.
+-include test/dts/tools/build/config.mk
 
 
 # Clean rules
@@ -276,17 +277,36 @@
 # Include all of the makefiles in the system
 #
 
-subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT).mk $(SOONG_ANDROID_MK)
+subdir_makefiles := \
+    $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk \
+    $(SOONG_ANDROID_MK) \
+    build/make/target/board/android-info.mk
+
 # Android.mk files are only used on Linux builds, Mac only supports Android.bp
 ifeq ($(HOST_OS),linux)
-  subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list)
+  ifeq ($(PRODUCT_IGNORE_ALL_ANDROIDMK),true)
+    allowed_androidmk_files :=
+    ifdef PRODUCT_ANDROIDMK_ALLOWLIST_FILE
+      -include $(PRODUCT_ANDROIDMK_ALLOWLIST_FILE)
+    endif
+    allowed_androidmk_files += $(PRODUCT_ALLOWED_ANDROIDMK_FILES)
+    subdir_makefiles += $(filter $(allowed_androidmk_files),$(file <$(OUT_DIR)/.module_paths/Android.mk.list))
+    allowed_androidmk_files :=
+  else
+    subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list)
+  endif
 endif
-subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT).mk
+
+subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+
 subdir_makefiles_total := $(words int $(subdir_makefiles) post finish)
 .KATI_READONLY := subdir_makefiles_total
 
 $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
 
+# Build bootloader.img/radio.img, and unpack the partitions.
+-include vendor/google/build/tasks/tools/update_bootloader_radio_image.mk
+
 # For an unbundled image, we can skip blueprint_tools because unbundled image
 # aims to remove a large number framework projects from the manifest, the
 # sources or dependencies for these tools may be missing from the tree.
@@ -295,6 +315,9 @@
 checkbuild: blueprint_tests
 endif
 
+# Create necessary directories and symlinks in the root filesystem
+include system/core/rootdir/create_root_structure.mk
+
 endif # dont_bother
 
 ifndef subdir_makefiles_total
@@ -679,17 +702,20 @@
 # Scan all modules in general-tests, device-tests and other selected suites and
 # flatten the shared library dependencies.
 define update-host-shared-libs-deps-for-suites
-$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests,\
+$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests camera-hal-tests,\
   $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\
     $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
     $(foreach dep,$(my_deps),\
       $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
-        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\
+        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests camera-hal-tests),\
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
-        $(eval link_target := ../../../$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
-        $(eval symlink := $(my_testcases)/$(m)/shared_libs/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(eval prefix := ../../..)
+        $(if $(strip $(patsubst %x86,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))), \
+          $(if $(strip $(patsubst %x86_64,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))),$(eval prefix := ../..),),) \
+        $(eval link_target := $(prefix)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(eval symlink := $(COMPATIBILITY.$(suite).ARCH_DIRS.$(m))/shared_libs/$(notdir $(f)))\
         $(eval COMPATIBILITY.$(suite).SYMLINKS := \
           $$(COMPATIBILITY.$(suite).SYMLINKS) $(f):$(link_target):$(symlink))\
         $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
@@ -970,8 +996,8 @@
 # Returns modules included automatically as a result of certain BoardConfig
 # variables being set.
 define auto-included-modules
-  llndk_in_system \
-  $(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \
+  $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver)) \
+  llndk.libraries.txt \
   $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \
   $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \
   $(if $(ODM_MANIFEST_SKUS),$(foreach sku, $(ODM_MANIFEST_SKUS),odm_manifest_$(sku).xml)) \
@@ -1373,6 +1399,7 @@
     $(INSTALLED_RAMDISK_TARGET) \
     $(INSTALLED_BOOTIMAGE_TARGET) \
     $(INSTALLED_INIT_BOOT_IMAGE_TARGET) \
+    $(INSTALLED_DTBOIMAGE_TARGET) \
     $(INSTALLED_RADIOIMAGE_TARGET) \
     $(INSTALLED_DEBUG_RAMDISK_TARGET) \
     $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
@@ -1855,80 +1882,18 @@
 filter_out_files += $(PRODUCT_OUT)/recovery/%
 endif
 
+# userdata.img
+ifndef BUILDING_USERDATA_IMAGE
+filter_out_files += $(PRODUCT_OUT)/data/%
+endif
+
 installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
 else
 installed_files := $(apps_only_installed_files)
 endif  # TARGET_BUILD_APPS
 
-# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py.
-# There are multiple columns and each identifies the source of an installed file for a specific case.
-# The columns and their uses are described as below:
-#   installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk
-#   module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2
-#   soong_module_type: Soong module type, e.g. android_app, cc_binary
-#   is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk
-#   product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc
-#   kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files
-#   is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y
-#       is_build_prop: build.prop in each partition, see sysprop.mk.
-#       is_notice_file: NOTICE.xml.gz in each partition, see Makefile.
-#       is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make
-#       is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET
-#       is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER
-#       is_event_log_tags_file: see variable event_log_tags_file in Makefile
-#       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
-#       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
-#       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
-#   build_output_path: the path of the built file, used to calculate checksum
-#   static_libraries/whole_static_libraries: list of module name of the static libraries the file links against, e.g. libclang_rt.builtins or libclang_rt.builtins_32
-#       Info of all static libraries of all installed files are collected in variable _all_static_libs that is used to list all the static library files in sbom-metadata.csv.
-#       See the second foreach loop in the rule of sbom-metadata.csv for the detailed info of static libraries collected in _all_static_libs.
-#   is_static_lib: whether the file is a static library
-
 metadata_list := $(OUT_DIR)/.module_paths/METADATA.list
 metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list)))
-$(PRODUCT_OUT)/sbom-metadata.csv:
-	rm -f $@
-	echo 'installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib' >> $@
-	$(eval _all_static_libs :=)
-	$(foreach f,$(installed_files),\
-	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
-	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
-	  $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \
-	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
-	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
-	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
-	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
-	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
-	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
-	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
-	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
-	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
-	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
-	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
-	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
-	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
-	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
-	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
-	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
-	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
-	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
-	  $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \
-	  $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \
-	  $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
-	  $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
-	  echo '/$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs),' >> $@; \
-	)
-	$(foreach l,$(sort $(_all_static_libs)), \
-	  $(eval _lib_stem := $(call word-colon,1,$l)) \
-	  $(eval _module_path := $(call word-colon,2,$l)) \
-	  $(eval _soong_module_type := $(call word-colon,3,$l)) \
-	  $(eval _built_file := $(call word-colon,4,$l)) \
-	  $(eval _static_libs := $(ALL_STATIC_LIBRARIES.$l.STATIC_LIBRARIES)) \
-	  $(eval _whole_static_libs := $(ALL_STATIC_LIBRARIES.$l.WHOLE_STATIC_LIBRARIES)) \
-	  $(eval _is_static_lib := Y) \
-	  echo '$(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib)' >> $@; \
-	)
 
 # Create metadata for compliance support in Soong
 .PHONY: make-compliance-metadata
@@ -1950,7 +1915,6 @@
 	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
 	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
 	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
-	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
 	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
 	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
 	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
@@ -1960,7 +1924,7 @@
 	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
 	  $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \
 	  $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \
-	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \
 	  $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \
 	  $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \
 	  $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \
@@ -1988,22 +1952,13 @@
 $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/installed_files.stamp: $(installed_files)
 	touch $@
 
-# (TODO: b/272358583 find another way of always rebuilding sbom.spdx)
 # Remove the always_dirty_file.txt whenever the makefile is evaluated
 $(shell rm -f $(PRODUCT_OUT)/always_dirty_file.txt)
 $(PRODUCT_OUT)/always_dirty_file.txt:
 	touch $@
 
 .PHONY: sbom
-ifeq ($(TARGET_BUILD_APPS),)
-sbom: $(PRODUCT_OUT)/sbom.spdx.json
-$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
-$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM) $(installed_files) $(metadata_list) $(metadata_files) $(PRODUCT_OUT)/always_dirty_file.txt
-	rm -rf $@
-	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json
-
-$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
-else
+ifneq ($(TARGET_BUILD_APPS),)
 # Create build rules for generating SBOMs of unbundled APKs and APEXs
 # $1: sbom file
 # $2: sbom fragment file
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index 1e1b7df..d15a3d0 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -17,13 +17,17 @@
 
 $(eval $(call text-notice-rule,$(target_notice_file_txt),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS)))
 
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 $(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz)
 	$(copy-file-to-target)
 endif
+endif
 
 $(call declare-1p-target,$(target_notice_file_xml_gz))
+ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true)
 $(call declare-1p-target,$(installed_notice_html_or_xml_gz))
 endif
+endif
 
 .PHONY: vendorlicense
 vendorlicense: $(call corresponding-license-metadata, $(VENDOR_NOTICE_DEPS)) reportmissinglicenses
diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk
index a77956b..ccb502c 100644
--- a/core/packaging/flags.mk
+++ b/core/packaging/flags.mk
@@ -18,7 +18,7 @@
 #
 
 # TODO: Should we do all of the images in $(IMAGES_TO_BUILD)?
-_FLAG_PARTITIONS := product system system_ext vendor
+_FLAG_PARTITIONS := product system vendor
 
 
 # -----------------------------------------------------------------
@@ -28,7 +28,6 @@
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
 # $(3): the partition (in)
-# $(4): input aconfig files for the partition (in)
 define generate-partition-aconfig-flag-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
 $(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
@@ -36,12 +35,14 @@
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
 		$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
-			--filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \
+			--filter container:$(strip $(3)) \
+			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 		echo -n > $$(PRIVATE_OUT) \
 	)
 $(call copy-one-file, $(1), $(2))
 endef
 
+
 # Create a summary file of build flags for each partition
 # $(1): built aconfig flags file (out)
 # $(2): installed aconfig flags file (out)
@@ -59,16 +60,22 @@
 $(call copy-one-file, $(1), $(2))
 endef
 
-
 $(foreach partition, $(_FLAG_PARTITIONS), \
 	$(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \
 	$(eval $(call generate-partition-aconfig-flag-file, \
-				$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
-				$(aconfig_flag_summaries_protobuf.$(partition)), \
-				$(partition), \
-				$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \
+			$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
+			$(aconfig_flag_summaries_protobuf.$(partition)), \
+			$(partition), \
+			$(sort \
+				$(foreach m, $(call register-names-for-partition, $(partition)), \
 					$(ALL_MODULES.$(m).ACONFIG_FILES) \
-				)), \
+				) \
+				$(if $(filter system, $(partition)), \
+					$(foreach m, $(call register-names-for-partition, system_ext), \
+						$(ALL_MODULES.$(m).ACONFIG_FILES) \
+					) \
+				) \
+			) \
 	)) \
 )
 
@@ -90,42 +97,54 @@
 # $(1): built aconfig flags storage package map file (out)
 # $(2): built aconfig flags storage flag map file (out)
 # $(3): built aconfig flags storage flag val file (out)
-# $(4): installed aconfig flags storage package map file (out)
-# $(5): installed aconfig flags storage flag map file (out)
-# $(6): installed aconfig flags storage flag value file (out)
-# $(7): input aconfig files for the partition (in)
-# $(8): partition name
+# $(4): built aconfig flags storage flag info file (out)
+# $(5): installed aconfig flags storage package map file (out)
+# $(6): installed aconfig flags storage flag map file (out)
+# $(7): installed aconfig flags storage flag value file (out)
+# $(8): installed aconfig flags storage flag info file (out)
+# $(9): input aconfig files for the partition (in)
+# $(10): partition name
 define generate-partition-aconfig-storage-file
 $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
-$(eval $(strip $(1)): PRIVATE_IN := $(strip $(7)))
-$(strip $(1)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(1)): PRIVATE_IN := $(strip $(9)))
+$(strip $(1)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file package_map --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file package_map --out $$(PRIVATE_OUT) \
 			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
 $(eval $(strip $(2)): PRIVATE_OUT := $(strip $(2)))
-$(eval $(strip $(2)): PRIVATE_IN := $(strip $(7)))
-$(strip $(2)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(2)): PRIVATE_IN := $(strip $(9)))
+$(strip $(2)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file flag_map --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file flag_map --out $$(PRIVATE_OUT) \
 			$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
 $(eval $(strip $(3)): PRIVATE_OUT := $(strip $(3)))
-$(eval $(strip $(3)): PRIVATE_IN := $(strip $(7)))
-$(strip $(3)): $(ACONFIG) $(strip $(7))
+$(eval $(strip $(3)): PRIVATE_IN := $(strip $(9)))
+$(strip $(3)): $(ACONFIG) $(strip $(9))
 	mkdir -p $$(dir $$(PRIVATE_OUT))
 	$$(if $$(PRIVATE_IN), \
-		$$(ACONFIG) create-storage --container $(8) --file flag_val --out $$(PRIVATE_OUT) \
+		$$(ACONFIG) create-storage --container $(10) --file flag_val --out $$(PRIVATE_OUT) \
 		$$(addprefix --cache ,$$(PRIVATE_IN)), \
 	)
 	touch $$(PRIVATE_OUT)
-$(call copy-one-file, $(strip $(1)), $(4))
-$(call copy-one-file, $(strip $(2)), $(5))
-$(call copy-one-file, $(strip $(3)), $(6))
+$(eval $(strip $(4)): PRIVATE_OUT := $(strip $(4)))
+$(eval $(strip $(4)): PRIVATE_IN := $(strip $(9)))
+$(strip $(4)): $(ACONFIG) $(strip $(9))
+	mkdir -p $$(dir $$(PRIVATE_OUT))
+	$$(if $$(PRIVATE_IN), \
+		$$(ACONFIG) create-storage --container $(10) --file flag_info --out $$(PRIVATE_OUT) \
+		$$(addprefix --cache ,$$(PRIVATE_IN)), \
+	)
+	touch $$(PRIVATE_OUT)
+$(call copy-one-file, $(strip $(1)), $(5))
+$(call copy-one-file, $(strip $(2)), $(6))
+$(call copy-one-file, $(strip $(3)), $(7))
+$(call copy-one-file, $(strip $(4)), $(8))
 endef
 
 ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true)
@@ -133,13 +152,16 @@
 	$(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/package.map) \
 	$(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.map) \
 	$(eval aconfig_storage_flag_val.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.val) \
+	$(eval aconfig_storage_flag_info.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.info) \
 	$(eval $(call generate-partition-aconfig-storage-file, \
 				$(TARGET_OUT_FLAGS)/$(partition)/package.map, \
 				$(TARGET_OUT_FLAGS)/$(partition)/flag.map, \
 				$(TARGET_OUT_FLAGS)/$(partition)/flag.val, \
+				$(TARGET_OUT_FLAGS)/$(partition)/flag.info, \
 				$(aconfig_storage_package_map.$(partition)), \
 				$(aconfig_storage_flag_map.$(partition)), \
 				$(aconfig_storage_flag_val.$(partition)), \
+				$(aconfig_storage_flag_info.$(partition)), \
 				$(aconfig_flag_summaries_protobuf.$(partition)), \
 				$(partition), \
 	)) \
@@ -155,6 +177,7 @@
 			$(aconfig_storage_package_map.$(partition)) \
 			$(aconfig_storage_flag_map.$(partition)) \
 			$(aconfig_storage_flag_val.$(partition)) \
+			$(aconfig_storage_flag_info.$(partition)) \
 		))
 
 ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files)
@@ -174,5 +197,5 @@
 	$(eval aconfig_storage_package_map.$(partition):=) \
 	$(eval aconfig_storage_flag_map.$(partition):=) \
 	$(eval aconfig_storage_flag_val.$(partition):=) \
+	$(eval aconfig_storage_flag_info.$(partition):=) \
 )
-
diff --git a/core/product.mk b/core/product.mk
index 832d094..1b336b0 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -26,6 +26,7 @@
 _product_single_value_vars += PRODUCT_MODEL
 _product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
 _product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
+_product_single_value_vars += PRODUCT_BASE_OS
 
 # Defines the ELF segment alignment for binaries (executables and shared libraries).
 # The ELF segment alignment has to be a PAGE_SIZE multiple. For example, if
@@ -283,6 +284,9 @@
 # Whether APEX should be compressed or not
 _product_single_value_vars += PRODUCT_COMPRESSED_APEX
 
+# Default fs type for APEX payload image (apex_payload.img)
+_product_single_value_vars += PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE
+
 # VNDK version of product partition. It can be 'current' if the product
 # partitions uses PLATFORM_VNDK_VERSION.
 _product_single_value_vars += PRODUCT_PRODUCT_VNDK_VERSION
@@ -365,6 +369,8 @@
 _product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_SUPER_EMPTY_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_PVMFW_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_DESKTOP_RECOVERY_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_DESKTOP_UPDATE_IMAGE
 
 # List of boot jars delivered via updatable APEXes, following the same format as
 # PRODUCT_BOOT_JARS.
@@ -389,20 +395,6 @@
 # If set, Java module in product partition cannot use hidden APIs.
 _product_single_value_vars += PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE
 
-# If set, only java_sdk_library can be used at inter-partition dependency.
-# Note: Build error if BOARD_VNDK_VERSION is not set while
-#       PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true, because
-#       PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY has no meaning if
-#       BOARD_VNDK_VERSION is not set.
-# Note: When PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE is not set, there are
-#       no restrictions at dependency between system and product partition.
-_product_single_value_vars += PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
-
-# Allowlist for PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY option.
-# Listed modules are allowed at inter-partition dependency even if it isn't
-# a java_sdk_library module.
-_product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST
-
 # Install a copy of the debug policy to the system_ext partition, and allow
 # init-second-stage to load debug policy from system_ext.
 # This option is only meant to be set by compliance GSI targets.
@@ -435,8 +427,9 @@
 # If true, the cgroup v2 hierarchy will be split into apps/system subtrees
 _product_single_value_vars += PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED
 
-# List of .json files to be merged/compiled into vendor/etc/linker.config.pb
+# List of .json files to be merged/compiled into vendor/etc/linker.config.pb and product/etc/linker.config.pb
 _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
+_product_list_vars += PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS
 
 # Whether to use userfaultfd GC.
 # Possible values are:
@@ -498,6 +491,17 @@
 # If set, build would generate system image from Soong-defined module.
 _product_single_value_vars += PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE
 
+# List of stub libraries specific to the product that are already present in the system image and
+# should be included in the system_linker_config.
+_product_list_vars += PRODUCT_EXTRA_STUB_LIBRARIES
+
+# If set to true, all Android.mk files will be ignored.
+_product_single_value_vars += PRODUCT_IGNORE_ALL_ANDROIDMK
+# When PRODUCT_IGNORE_ALL_ANDROIDMK is set to true, this variable will be used to allow some Android.mk files.
+_product_list_vars += PRODUCT_ALLOWED_ANDROIDMK_FILES
+# When PRODUCT_IGNORE_ALL_ANDROIDMK is set to true, path of file that contains a list of allowed Android.mk files
+_product_single_value_vars += PRODUCT_ANDROIDMK_ALLOWLIST_FILE
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index cc2fea9..692e375 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -311,6 +311,14 @@
 
 TARGET_DEVICE := $(PRODUCT_DEVICE)
 
+# Allow overriding PLATFORM_BASE_OS when PRODUCT_BASE_OS is defined
+ifdef PRODUCT_BASE_OS
+  PLATFORM_BASE_OS := $(PRODUCT_BASE_OS)
+else
+  PLATFORM_BASE_OS := $(PLATFORM_BASE_OS_ENV_INPUT)
+endif
+.KATI_READONLY := PLATFORM_BASE_OS
+
 # TODO: also keep track of things like "port", "land" in product files.
 
 # Figure out which resoure configuration options to use for this
@@ -416,10 +424,12 @@
   endif
 endif
 
-$(foreach pair,$(PRODUCT_APEX_BOOT_JARS), \
-  $(eval jar := $(call word-colon,2,$(pair))) \
-  $(if $(findstring $(jar), $(PRODUCT_BOOT_JARS)), \
-    $(error A jar in PRODUCT_APEX_BOOT_JARS must not be in PRODUCT_BOOT_JARS, but $(jar) is)))
+$(foreach apexpair,$(PRODUCT_APEX_BOOT_JARS), \
+  $(foreach platformpair,$(PRODUCT_BOOT_JARS), \
+    $(eval apexjar := $(call word-colon,2,$(apexpair))) \
+    $(eval platformjar := $(call word-colon,2,$(platformpair))) \
+    $(if $(filter $(apexjar), $(platformjar)), \
+      $(error A jar in PRODUCT_APEX_BOOT_JARS must not be in PRODUCT_BOOT_JARS, but $(apexjar) is))))
 
 ENFORCE_SYSTEM_CERTIFICATE := $(PRODUCT_ENFORCE_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT)
 ENFORCE_SYSTEM_CERTIFICATE_ALLOW_LIST := $(PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST)
@@ -524,6 +534,17 @@
   PRODUCT_COMPRESSED_APEX := $(OVERRIDE_PRODUCT_COMPRESSED_APEX)
 endif
 
+ifdef OVERRIDE_PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE
+  PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE := $(OVERRIDE_PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE)
+else ifeq ($(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE),)
+  # Use ext4 as a default payload fs type
+  PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE := ext4
+endif
+ifeq ($(filter ext4 erofs,$(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE)),)
+  $(error PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE should be either erofs or ext4,\
+    not $(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE).)
+endif
+
 $(KATI_obsolete_var OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS \
     ,Use PRODUCT_EXTRA_VNDK_VERSIONS instead)
 
@@ -594,7 +615,12 @@
     # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
     # In this case, the VSR API level is the minimum of the PRODUCT_SHIPPING_API_LEVEL
     # and RELEASE_BOARD_API_LEVEL
-    VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(RELEASE_BOARD_API_LEVEL))
+    board_api_level := $(RELEASE_BOARD_API_LEVEL)
+    ifdef BOARD_API_LEVEL_PROP_OVERRIDE
+      board_api_level := $(BOARD_API_LEVEL_PROP_OVERRIDE)
+    endif
+    VSR_VENDOR_API_LEVEL := $(call math_min,$(VSR_VENDOR_API_LEVEL),$(board_api_level))
+    board_api_level :=
   endif
 endif
 .KATI_READONLY := VSR_VENDOR_API_LEVEL
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 59e2c95..20344f4 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -382,6 +382,11 @@
     _soong_config_namespace(g, nsname)
     g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value)
 
+def _soong_config_set_bool(g, nsname, var, value):
+    """Assigns the value to the variable in the namespace, and marks it as a boolean."""
+    _soong_config_set(g, nsname, var, _filter("true", value))
+    g["SOONG_CONFIG_TYPE_%s_%s" % (nsname, var)] = "bool"
+
 def _soong_config_append(g, nsname, var, value):
     """Appends to the value of the variable in the namespace."""
     _soong_config_namespace(g, nsname)
@@ -861,6 +866,7 @@
     soong_config_namespace = _soong_config_namespace,
     soong_config_append = _soong_config_append,
     soong_config_set = _soong_config_set,
+    soong_config_set_bool = _soong_config_set_bool,
     soong_config_get = _soong_config_get,
     abspath = _abspath,
     add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config,
diff --git a/core/proguard.flags b/core/proguard.flags
index aa406b9..5148e56 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -38,6 +38,17 @@
   @com.android.internal.annotations.KeepForWeakReference <fields>;
 }
 
+# Needed to ensure callback field references are kept in their respective
+# owning classes when the downstream callback registrars only store weak refs.
+-if @com.android.internal.annotations.WeaklyReferencedCallback class *
+-keepclassmembers,allowaccessmodification class * {
+  <1> *;
+}
+-if class * extends @com.android.internal.annotations.WeaklyReferencedCallback **
+-keepclassmembers,allowaccessmodification class * {
+  <1> *;
+}
+
 # Understand the common @Keep annotation from various Android packages:
 #  * android.support.annotation
 #  * androidx.annotation
diff --git a/core/project_definitions.mk b/core/project_definitions.mk
index 5728b67..184b03e 100644
--- a/core/project_definitions.mk
+++ b/core/project_definitions.mk
@@ -22,3 +22,6 @@
 # Include definitions for prebuilt SDK, if present.
 #
 -include prebuilts/sdk/current/definitions.mk
+
+# SDV-specific config.
+-include system/software_defined_vehicle/platform/config.mk
diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml
index 16a22c0..9e9dd76 100644
--- a/core/ravenwood_test_config_template.xml
+++ b/core/ravenwood_test_config_template.xml
@@ -18,11 +18,11 @@
     <option name="test-suite-tag" value="ravenwood" />
     <option name="test-suite-tag" value="ravenwood-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" />
     <option name="use-ravenwood-resources" value="true" />
     <option name="exclude-paths" value="java" />
-    <option name="socket-timeout" value="10000" />
     <option name="null-device" value="true" />
+    <option name="do-not-swallow-runner-errors" value="true" />
 
     {EXTRA_CONFIGS}
 
diff --git a/core/release_config.mk b/core/release_config.mk
index 2898868..fe2170e 100644
--- a/core/release_config.mk
+++ b/core/release_config.mk
@@ -131,6 +131,9 @@
         _args += --guard=false
     endif
     _args += --allow-missing=true
+    ifneq (,$(TARGET_PRODUCT))
+        _args += --product $(TARGET_PRODUCT)
+    endif
     _flags_dir:=$(OUT_DIR)/soong/release-config
     _flags_file:=$(_flags_dir)/release_config-$(TARGET_PRODUCT)-$(TARGET_RELEASE).vars
     # release-config generates $(_flags_varmk)
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index 56d2312..1956b6e 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -18,10 +18,17 @@
     <option name="test-suite-tag" value="robolectric" />
     <option name="test-suite-tag" value="robolectric-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" />
     <option name="exclude-paths" value="java" />
     <option name="use-robolectric-resources" value="true" />
 
+    <!-- attempt to always show Tradefed errors -->
+    <option name="do-not-swallow-runner-errors" value="true" />
+
+    <!-- prevent Tradefed from hanging indefinitely in CI -->
+    <option name="socket-timeout" value="600000" />
+    <option name="test-case-timeout" value="2m" />
+
     {EXTRA_CONFIGS}
 
     <test class="com.android.tradefed.testtype.IsolatedHostTest" >
@@ -33,5 +40,15 @@
         <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/>
         <!-- b/251387255 -->
         <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/java.net=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/java.nio=ALL-UNNAMED"/> <!-- required for ShadowVMRuntime -->
+        <option name="java-flags" value="--add-opens=java.base/java.security=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/java.text=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/java.util=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/jdk.internal.access=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.desktop/java.awt.font=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED"/>
     </test>
 </configuration>
diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk
index ec3d8c8..d97980d 100644
--- a/core/soong_android_app_set.mk
+++ b/core/soong_android_app_set.mk
@@ -9,10 +9,6 @@
 LOCAL_BUILT_MODULE_STEM := package.apk
 LOCAL_INSTALLED_MODULE_STEM := $(notdir $(LOCAL_PREBUILT_MODULE_FILE))
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 3aa244c..df1cf2d 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -29,16 +29,6 @@
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
 
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_CLASSES_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index a1c6478..da60832 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -38,10 +38,6 @@
   endif
 endif
 
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-
 my_check_same_vndk_variants :=
 same_vndk_variants_stamp :=
 ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
@@ -61,7 +57,7 @@
   # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
   # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
   # isn't used at all and it may break in the downstream trees.
-  LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+  LOCAL_ADDITIONAL_CHECKED_MODULE += $(same_vndk_variants_stamp)
 endif
 
 #######################################
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 12b4135..32ca660 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,5 +1,5 @@
-SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
-SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
+SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
+SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk
 
 include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
@@ -26,7 +26,7 @@
 $(shell mkdir -p $(dir $(SOONG_VARIABLES)))
 $(call json_start)
 
-$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT))
+$(call add_json_str,  Make_suffix, -$(TARGET_PRODUCT)$(COVERAGE_SUFFIX))
 
 $(call add_json_str,  BuildId,                           $(BUILD_ID))
 $(call add_json_str,  BuildFingerprintFile,              build_fingerprint.txt)
@@ -150,9 +150,9 @@
 $(call add_json_str,  BtConfigIncludeDir,                $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
 $(call add_json_str,  VendorApiLevel,                    $(BOARD_API_LEVEL))
+$(call add_json_str,  VendorApiLevelPropOverride,        $(BOARD_API_LEVEL_PROP_OVERRIDE))
 $(call add_json_list, ExtraVndkVersions,                 $(PRODUCT_EXTRA_VNDK_VERSIONS))
 $(call add_json_list, DeviceSystemSdkVersions,           $(BOARD_SYSTEMSDK_VERSIONS))
-$(call add_json_str,  RecoverySnapshotVersion,           $(RECOVERY_SNAPSHOT_VERSION))
 $(call add_json_list, Platform_systemsdk_versions,       $(PLATFORM_SYSTEMSDK_VERSIONS))
 $(call add_json_bool, Malloc_low_memory,                 $(findstring true,$(MALLOC_SVELTE) $(MALLOC_LOW_MEMORY)))
 $(call add_json_bool, Malloc_zero_contents,              $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
@@ -167,8 +167,6 @@
 $(call add_json_list, BootJars,                          $(PRODUCT_BOOT_JARS))
 $(call add_json_list, ApexBootJars,                      $(filter-out $(APEX_BOOT_JARS_EXCLUDED), $(PRODUCT_APEX_BOOT_JARS)))
 
-$(call add_json_bool, VndkSnapshotBuildArtifacts,        $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
-
 $(call add_json_map,  BuildFlags)
 $(foreach flag,$(_ALL_RELEASE_FLAGS),\
   $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).VALUE)))
@@ -178,24 +176,6 @@
   $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).TYPE)))
 $(call end_json_map)
 
-$(call add_json_bool, DirectedVendorSnapshot,            $(DIRECTED_VENDOR_SNAPSHOT))
-$(call add_json_map,  VendorSnapshotModules)
-$(foreach module,$(VENDOR_SNAPSHOT_MODULES),\
-  $(call add_json_bool,$(module),true))
-$(call end_json_map)
-
-$(call add_json_bool, DirectedRecoverySnapshot,          $(DIRECTED_RECOVERY_SNAPSHOT))
-$(call add_json_map,  RecoverySnapshotModules)
-$(foreach module,$(RECOVERY_SNAPSHOT_MODULES),\
-  $(call add_json_bool,$(module),true))
-$(call end_json_map)
-
-$(call add_json_list, VendorSnapshotDirsIncluded,        $(VENDOR_SNAPSHOT_DIRS_INCLUDED))
-$(call add_json_list, VendorSnapshotDirsExcluded,        $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
-$(call add_json_list, RecoverySnapshotDirsIncluded,      $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
-$(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
-$(call add_json_bool, HostFakeSnapshotEnabled,           $(HOST_FAKE_SNAPSHOT_ENABLE))
-
 $(call add_json_bool, MultitreeUpdateMeta,               $(filter true,$(TARGET_MULTITREE_UPDATE_META)))
 
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
@@ -203,9 +183,16 @@
 
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
+$(call add_json_str,  VendorDlkmPath,                    $(TARGET_COPY_OUT_VENDOR_DLKM))
+$(call add_json_bool, BuildingVendorImage,               $(BUILDING_VENDOR_IMAGE))
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
+$(call add_json_bool, BuildingOdmImage,                  $(BUILDING_ODM_IMAGE))
+$(call add_json_str,  OdmDlkmPath,                       $(TARGET_COPY_OUT_ODM_DLKM))
 $(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
+$(call add_json_bool, BuildingProductImage,              $(BUILDING_PRODUCT_IMAGE))
 $(call add_json_str,  SystemExtPath,                     $(TARGET_COPY_OUT_SYSTEM_EXT))
+$(call add_json_str,  SystemDlkmPath,                    $(TARGET_COPY_OUT_SYSTEM_DLKM))
+$(call add_json_str,  OemPath,                           $(TARGET_COPY_OUT_OEM))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
 
 $(call add_json_bool, UseGoma,                           $(filter-out false,$(USE_GOMA)))
@@ -227,6 +214,7 @@
 $(call add_json_str,  BoardSepolicyVers,                 $(BOARD_SEPOLICY_VERS))
 $(call add_json_str,  SystemExtSepolicyPrebuiltApiDir,   $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
 $(call add_json_str,  ProductSepolicyPrebuiltApiDir,     $(BOARD_PRODUCT_PREBUILT_DIR))
+$(call add_json_str,  BoardPlatform,                     $(TARGET_BOARD_PLATFORM))
 
 $(call add_json_str,  PlatformSepolicyVersion,           $(PLATFORM_SEPOLICY_VERSION))
 $(call add_json_list, PlatformSepolicyCompatVersions,    $(PLATFORM_SEPOLICY_COMPAT_VERSIONS))
@@ -256,6 +244,12 @@
 
 $(call add_json_list, TargetFSConfigGen,                 $(TARGET_FS_CONFIG_GEN))
 
+# Although USE_SOONG_DEFINED_SYSTEM_IMAGE determines whether to use the system image specified by
+# PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE, PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE is still used to compare
+# installed files between make and soong, regardless of the USE_SOONG_DEFINED_SYSTEM_IMAGE setting.
+$(call add_json_bool, UseSoongSystemImage,               $(filter true,$(USE_SOONG_DEFINED_SYSTEM_IMAGE)))
+$(call add_json_str,  ProductSoongDefinedSystemImage,    $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))
+
 $(call add_json_map, VendorVars)
 $(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
   $(call add_json_map, $(namespace))\
@@ -279,14 +273,8 @@
 $(call add_json_bool, EnforceProductPartitionInterface,  $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)))
 $(call add_json_str,  DeviceCurrentApiLevelForVendorModules,  $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
 
-$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY)))
-$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST))
-
 $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX)))
-
-ifndef APEX_BUILD_FOR_PRE_S_DEVICES
-$(call add_json_bool, TrimmedApex, $(filter true,$(PRODUCT_TRIMMED_APEX)))
-endif
+$(call add_json_str, DefaultApexPayloadType, $(PRODUCT_DEFAULT_APEX_PAYLOAD_TYPE))
 
 $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 
@@ -306,7 +294,6 @@
 $(call add_json_bool, GenruleSandboxing,                   $(if $(GENRULE_SANDBOXING),$(filter true,$(GENRULE_SANDBOXING)),$(if $(filter true,$(BUILD_BROKEN_GENRULE_SANDBOXING)),,true)))
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,      $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow,  $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
-$(call add_json_bool, BuildBrokenUsesSoongPython2Modules,  $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace,  $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
 $(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES)))
 $(call add_json_list, BuildBrokenInputDirModules,          $(BUILD_BROKEN_INPUT_DIR_MODULES))
@@ -334,6 +321,8 @@
 
 $(call add_json_str,  ProductManufacturer, $(PRODUCT_MANUFACTURER))
 $(call add_json_str,  ProductBrand,        $(PRODUCT_BRAND))
+$(call add_json_str,  ProductDevice,       $(PRODUCT_DEVICE))
+$(call add_json_str,  ProductModel,        $(PRODUCT_MODEL))
 
 $(call add_json_str, ReleaseVersion,    $(_RELEASE_VERSION))
 $(call add_json_list, ReleaseAconfigValueSets,    $(RELEASE_ACONFIG_VALUE_SETS))
@@ -361,6 +350,166 @@
 
 $(call add_json_list, OemProperties, $(PRODUCT_OEM_PROPERTIES))
 
+$(call add_json_list, SystemPropFiles, $(TARGET_SYSTEM_PROP))
+$(call add_json_list, SystemExtPropFiles, $(TARGET_SYSTEM_EXT_PROP))
+$(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP))
+$(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP))
+$(call add_json_list, VendorPropFiles, $(TARGET_VENDOR_PROP))
+
+$(call add_json_str, ExtraAllowedDepsTxt, $(EXTRA_ALLOWED_DEPS_TXT))
+
+# Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set,
+# to have the same behavior from runtime_libart.mk.
+ifneq ($(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD),)
+$(call add_json_bool, ArtTargetIncludeDebugBuild, $(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD))
+endif
+
+_config_enable_uffd_gc := \
+  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
+$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc))
+_config_enable_uffd_gc :=
+
+$(call add_json_list, DeviceFrameworkCompatibilityMatrixFile, $(DEVICE_FRAMEWORK_COMPATIBILITY_MATRIX_FILE))
+$(call add_json_list, DeviceProductCompatibilityMatrixFile, $(DEVICE_PRODUCT_COMPATIBILITY_MATRIX_FILE))
+$(call add_json_list, BoardAvbSystemAddHashtreeFooterArgs, $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS))
+$(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
+
+$(call add_json_str, AdbKeys, $(PRODUCT_ADB_KEYS))
+
+$(call add_json_map, PartitionVarsForSoongMigrationOnlyDoNotUse)
+  $(call add_json_str,  ProductDirectory,    $(dir $(INTERNAL_PRODUCT)))
+
+  $(call add_json_map,PartitionQualifiedVariables)
+  $(foreach image_type,SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM, \
+    $(call add_json_map,$(call to-lower,$(image_type))) \
+    $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \
+    $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \
+    $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \
+    $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \
+    $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \
+    $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \
+    $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \
+    $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \
+    $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \
+    $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \
+    $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \
+    $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \
+    $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \
+    $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \
+    $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \
+    $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \
+    $(call add_json_str, BoardAvbKeyPath, $(BOARD_AVB_$(image_type)_KEY_PATH)) \
+    $(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_$(image_type)_ALGORITHM)) \
+    $(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX)) \
+    $(call add_json_str, BoardAvbRollbackIndexLocation, $(BOARD_AVB_$(image_type)_ROLLBACK_INDEX_LOCATION)) \
+    $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \
+    $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \
+    $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \
+    $(call end_json_map) \
+  )
+  $(call end_json_map)
+
+  $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2)))
+  $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3)))
+  $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4)))
+
+  $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)))
+  $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)))
+
+  $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR))
+  $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS))
+  $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE))
+  $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION))
+  $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS))
+  $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE))
+  $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE))
+  $(call add_json_bool, BuildingVbmetaImage, $(BUILDING_VBMETA_IMAGE))
+
+  # boot image stuff
+  $(call add_json_bool, BuildingRamdiskImage, $(filter true,$(BUILDING_RAMDISK_IMAGE)))
+  $(call add_json_bool, ProductBuildBootImage, $(filter true,$(PRODUCT_BUILD_BOOT_IMAGE)))
+  $(call add_json_str, ProductBuildVendorBootImage, $(PRODUCT_BUILD_VENDOR_BOOT_IMAGE))
+  $(call add_json_bool, ProductBuildInitBootImage, $(filter true,$(PRODUCT_BUILD_INIT_BOOT_IMAGE)))
+  $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
+  $(call add_json_str, BoardPrebuiltBootimage, $(BOARD_PREBUILT_BOOT_IMAGE))
+  $(call add_json_str, BoardPrebuiltInitBootimage, $(BOARD_PREBUILT_INIT_BOOT_IMAGE))
+  $(call add_json_str, BoardBootimagePartitionSize, $(BOARD_BOOTIMAGE_PARTITION_SIZE))
+  $(call add_json_str, BoardInitBootimagePartitionSize, $(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE))
+  $(call add_json_str, BoardBootHeaderVersion, $(BOARD_BOOT_HEADER_VERSION))
+  $(call add_json_str, TargetKernelPath, $(TARGET_KERNEL_PATH))
+  $(call add_json_bool, BoardUsesGenericKernelImage, $(BOARD_USES_GENERIC_KERNEL_IMAGE))
+
+  # Avb (android verified boot) stuff
+  $(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
+  $(call add_json_str, BoardAvbAlgorithm, $(BOARD_AVB_ALGORITHM))
+  $(call add_json_str, BoardAvbKeyPath, $(BOARD_AVB_KEY_PATH))
+  $(call add_json_str, BoardAvbRollbackIndex, $(BOARD_AVB_ROLLBACK_INDEX))
+  $(call add_json_map, ChainedVbmetaPartitions)
+  $(foreach partition,system vendor $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\
+    $(call add_json_map, $(partition)) \
+      $(call add_json_list,Partitions,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition)))) \
+      $(call add_json_str,Key,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_KEY_PATH)) \
+      $(call add_json_str,Algorithm,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ALGORITHM)) \
+      $(call add_json_str,RollbackIndex,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ROLLBACK_INDEX)) \
+      $(call add_json_str,RollbackIndexLocation,$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ROLLBACK_INDEX_LOCATION)) \
+    $(call end_json_map))
+  $(call end_json_map)
+
+  $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)))
+  $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
+
+  $(call add_json_list, ProductPackages, $(PRODUCT_PACKAGES))
+  $(call add_json_list, ProductPackagesDebug, $(PRODUCT_PACKAGES_DEBUG))
+
+  # Used to generate /vendor/linker.config.pb
+  $(call add_json_list, VendorLinkerConfigSrcs, $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS))
+  $(call add_json_list, ProductLinkerConfigSrcs, $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS))
+
+  # Used to generate _dlkm partitions
+  $(call add_json_bool, BuildingSystemDlkmImage,               $(BUILDING_SYSTEM_DLKM_IMAGE))
+  $(call add_json_list, SystemKernelModules, $(BOARD_SYSTEM_KERNEL_MODULES))
+  $(call add_json_str, SystemKernelBlocklistFile, $(BOARD_SYSTEM_KERNEL_MODULES_BLOCKLIST_FILE))
+  $(call add_json_list, SystemKernelLoadModules, $(BOARD_SYSTEM_KERNEL_MODULES_LOAD))
+  $(call add_json_bool, BuildingVendorDlkmImage,               $(BUILDING_VENDOR_DLKM_IMAGE))
+  $(call add_json_list, VendorKernelModules, $(BOARD_VENDOR_KERNEL_MODULES))
+  $(call add_json_str, VendorKernelBlocklistFile, $(BOARD_VENDOR_KERNEL_MODULES_BLOCKLIST_FILE))
+  $(call add_json_bool, BuildingOdmDlkmImage,               $(BUILDING_ODM_DLKM_IMAGE))
+  $(call add_json_list, OdmKernelModules, $(BOARD_ODM_KERNEL_MODULES))
+  $(call add_json_str, OdmKernelBlocklistFile, $(BOARD_ODM_KERNEL_MODULES_BLOCKLIST_FILE))
+
+  # Used to generate /vendor/build.prop
+  $(call add_json_list, BoardInfoFiles, $(if $(TARGET_BOARD_INFO_FILES),$(TARGET_BOARD_INFO_FILES),$(firstword $(TARGET_BOARD_INFO_FILE) $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt))))
+  $(call add_json_str, BootLoaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME))
+
+  $(call add_json_list, ProductCopyFiles, $(PRODUCT_COPY_FILES))
+
+$(call end_json_map)
+
+# For converting vintf_data
+$(call add_json_list, DeviceMatrixFile, $(DEVICE_MATRIX_FILE))
+$(call add_json_list, ProductManifestFiles, $(PRODUCT_MANIFEST_FILES))
+$(call add_json_list, SystemManifestFile, $(DEVICE_FRAMEWORK_MANIFEST_FILE))
+SYSTEM_EXT_HWSERVICE_FILES :=
+ifeq ($(PRODUCT_HIDL_ENABLED),true)
+  ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES)),)
+    SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager_no_max.xml
+  else
+    $(error If PRODUCT_HIDL_ENABLED is set, hwservicemanager must be added to PRODUCT_PACKAGES explicitly)
+  endif
+else
+  ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES)),)
+    SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager.xml
+  else ifneq ($(filter hwservicemanager,$(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34)),)
+    SYSTEM_EXT_HWSERVICE_FILES += system/hwservicemanager/hwservicemanager.xml
+  endif
+endif
+$(call add_json_list, SystemExtManifestFiles, $(SYSTEM_EXT_MANIFEST_FILES) $(SYSTEM_EXT_HWSERVICE_FILES))
+$(call add_json_list, DeviceManifestFiles, $(DEVICE_MANIFEST_FILE))
+$(call add_json_list, OdmManifestFiles, $(ODM_MANIFEST_FILES))
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk
index e4432d2..2ff83a1 100644
--- a/core/soong_extra_config.mk
+++ b/core/soong_extra_config.mk
@@ -43,16 +43,12 @@
 $(call add_json_list, PRODUCT_PRODUCT_PROPERTIES,        $(call collapse-prop-pairs,PRODUCT_PRODUCT_PROPERTIES))
 $(call add_json_list, PRODUCT_ODM_PROPERTIES,            $(call collapse-prop-pairs,PRODUCT_ODM_PROPERTIES))
 $(call add_json_list, PRODUCT_PROPERTY_OVERRIDES,        $(call collapse-prop-pairs,PRODUCT_PROPERTY_OVERRIDES))
+$(call add_json_list, PRODUCT_DEFAULT_PROPERTY_OVERRIDES,        $(call collapse-prop-pairs,PRODUCT_DEFAULT_PROPERTY_OVERRIDES))
 
 $(call add_json_str, BootloaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME))
 
 $(call add_json_bool, SdkBuild, $(filter sdk sdk_addon,$(MAKECMDGOALS)))
 
-_config_enable_uffd_gc := \
-  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
-$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc))
-_config_enable_uffd_gc :=
-
 $(call add_json_str, SystemServerCompilerFilter, $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
 
 $(call add_json_bool, Product16KDeveloperOption, $(filter true,$(PRODUCT_16K_DEVELOPER_OPTION)))
@@ -93,6 +89,12 @@
 
 $(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS))
 
+$(call add_json_bool, ProductNotDebuggableInUserdebug, $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG))
+
+$(call add_json_bool, UsesProductImage, $(filter true,$(BOARD_USES_PRODUCTIMAGE)))
+
+$(call add_json_bool, TargetBoots16K, $(filter true,$(TARGET_BOOTS_16K)))
+
 $(call json_end)
 
 $(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES)))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 7f85231..8c3882f 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -21,19 +21,6 @@
 full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
 common_javalib.jar := $(intermediates.COMMON)/javalib.jar
 
-ifdef LOCAL_SOONG_AAR
-  LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_AAR)
-endif
-
-# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE
-# to avoid checkbuilds making an extra copy of every module.
-LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE)
-LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR)
-
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
@@ -115,16 +102,14 @@
     boot_jars := $(foreach pair,$(PRODUCT_BOOT_JARS), $(call word-colon,2,$(pair)))
     ifneq ($(filter $(LOCAL_MODULE),$(boot_jars)),) # is_boot_jar
       ifeq (true,$(WITH_DEXPREOPT))
-        # $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) contains modules that installs
-        # all of bootjars' dexpreopt files (.art, .oat, .vdex, ...)
+        # dex_bootjars singleton installs all of bootjars' dexpreopt files (.art, .oat, .vdex, ...)
+        # This includes both the primary and secondary arches.
         # Add them to the required list so they are installed alongside this module.
-        ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += \
-          $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) \
-          $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE)
+        ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += dex_bootjars
         # Copy $(LOCAL_BUILT_MODULE) and its dependencies when installing boot.art
         # so that dependencies of $(LOCAL_BUILT_MODULE) (which may include
         # jacoco-report-classes.jar) are copied for every build.
-        $(foreach m,$(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE), \
+        $(foreach m,dex_bootjars, \
           $(eval $(call add-dependency,$(firstword $(call module-installed-files,$(m))),$(LOCAL_BUILT_MODULE))) \
         )
       endif
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 47d8a41..255b699 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -33,34 +33,26 @@
     echo "# from generate-common-build-props" >> $(2);\
     echo "# These properties identify this partition image." >> $(2);\
     echo "####################################" >> $(2);\
-    $(if $(filter system,$(1)),\
-        echo "ro.product.$(1).brand=$(PRODUCT_SYSTEM_BRAND)" >> $(2);\
-        echo "ro.product.$(1).device=$(PRODUCT_SYSTEM_DEVICE)" >> $(2);\
-        echo "ro.product.$(1).manufacturer=$(PRODUCT_SYSTEM_MANUFACTURER)" >> $(2);\
-        echo "ro.product.$(1).model=$(PRODUCT_SYSTEM_MODEL)" >> $(2);\
-        echo "ro.product.$(1).name=$(PRODUCT_SYSTEM_NAME)" >> $(2);\
-      ,\
-        echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\
-        echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\
-        echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
-        echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
-        echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
-        if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-        if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \
-            echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\
-        fi; \
-    )\
+    echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\
+    echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\
+    echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
+    echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
+    echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
+    if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\
+    fi; \
+    if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \
+        echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\
+    fi; \
     $(if $(filter true,$(ZYGOTE_FORCE_64)),\
         $(if $(filter vendor,$(1)),\
             echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
@@ -226,50 +218,11 @@
 # -----------------------------------------------------------------
 # system/build.prop
 #
-# Note: parts of this file that can't be generated by the build-properties
-# macro are manually created as separate files and then fed into the macro
-
-buildinfo_prop := $(call intermediates-dir-for,ETC,buildinfo.prop)/buildinfo.prop
-
-ifdef TARGET_SYSTEM_PROP
-system_prop_file := $(TARGET_SYSTEM_PROP)
-else
-system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
-endif
-
-_prop_files_ := \
-  $(buildinfo_prop) \
-  $(system_prop_file)
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_SYSTEM_PROPERTIES \
-    PRODUCT_SYSTEM_PROPERTIES
-
-# TODO(b/117892318): deprecate this
-_prop_vars_ += \
-    PRODUCT_SYSTEM_DEFAULT_PROPERTIES
-
-ifndef property_overrides_split_enabled
-_prop_vars_ += \
-    ADDITIONAL_VENDOR_PROPERTIES \
-    PRODUCT_VENDOR_PROPERTIES
-endif
+# system/build.prop is built by Soong. See system-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_BUILD_PROP_TARGET := $(TARGET_OUT)/build.prop
 
-$(eval $(call build-properties,\
-    system,\
-    $(INSTALLED_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_BUILD_PROP_TARGET)))
-
 # -----------------------------------------------------------------
 # vendor/build.prop
 #
@@ -313,171 +266,47 @@
 # -----------------------------------------------------------------
 # product/etc/build.prop
 #
-
-_prop_files_ := $(if $(TARGET_PRODUCT_PROP),\
-    $(TARGET_PRODUCT_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/product.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_PRODUCT_PROPERTIES \
-    PRODUCT_PRODUCT_PROPERTIES
+# product/etc/build.prop is built by Soong. See product-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop
 
-ifdef PRODUCT_OEM_PROPERTIES
-import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop
-
-$(import_oem_prop):
-	$(hide) echo "####################################" >> $@; \
-	        echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
-	        echo "####################################" >> $@;
-	$(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \
-	    echo "import /oem/oem.prop $(prop)" >> $@;)
-
-_footers_ := $(import_oem_prop)
-else
-_footers_ :=
-endif
-
-# Skip common /product properties generation if device released before R and
-# has no product partition. This is the first part of the check.
-ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true)
-  _skip_common_properties := true
-endif
-
-# The second part of the check - always generate common properties for the
-# devices with product partition regardless of shipping level.
-ifneq ($(BOARD_USES_PRODUCTIMAGE),)
-  _skip_common_properties :=
-endif
-
-$(eval $(call build-properties,\
-    product,\
-    $(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(_footers_),\
-    $(_skip_common_properties)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_PRODUCT_BUILD_PROP_TARGET)))
-
-_skip_common_properties :=
-
 # ----------------------------------------------------------------
 # odm/etc/build.prop
 #
-_prop_files_ := $(if $(TARGET_ODM_PROP),\
-    $(TARGET_ODM_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/odm.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := \
-    ADDITIONAL_ODM_PROPERTIES \
-    PRODUCT_ODM_PROPERTIES
+# odm/etc/build.prop is built by Soong. See odm-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop
-$(eval $(call build-properties,\
-    odm,\
-    $(INSTALLED_ODM_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_ODM_BUILD_PROP_TARGET)))
 
 # ----------------------------------------------------------------
 # vendor_dlkm/etc/build.prop
-#
+# odm_dlkm/etc/build.prop
+# system_dlkm/build.prop
+# These are built by Soong. See build/soong/Android.bp
 
 INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR_DLKM)/etc/build.prop
-$(eval $(call build-properties,\
-    vendor_dlkm,\
-    $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET)))
-
-# ----------------------------------------------------------------
-# odm_dlkm/etc/build.prop
-#
-
 INSTALLED_ODM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM_DLKM)/etc/build.prop
-$(eval $(call build-properties,\
-    odm_dlkm,\
-    $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET)))
-
-# ----------------------------------------------------------------
-# system_dlkm/build.prop
-#
-
 INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_DLKM)/etc/build.prop
-$(eval $(call build-properties,\
-    system_dlkm,\
-    $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET)))
+ALL_DEFAULT_INSTALLED_MODULES += \
+  $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET) \
 
 # -----------------------------------------------------------------
 # system_ext/etc/build.prop
 #
-_prop_files_ := $(if $(TARGET_SYSTEM_EXT_PROP),\
-    $(TARGET_SYSTEM_EXT_PROP),\
-    $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop))
-
-# Order matters here. When there are duplicates, the last one wins.
-# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter
-_prop_vars_ := PRODUCT_SYSTEM_EXT_PROPERTIES
+# system_ext/etc/build.prop is built by Soong. See system-build.prop module in
+# build/soong/Android.bp.
 
 INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_EXT)/etc/build.prop
-$(eval $(call build-properties,\
-    system_ext,\
-    $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET),\
-    $(_prop_files_),\
-    $(_prop_vars_),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET)))
-
-# ----------------------------------------------------------------
-# ramdisk/boot/etc/build.prop
-#
 
 RAMDISK_BUILD_PROP_REL_PATH := system/etc/ramdisk/build.prop
+ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT))
+INSTALLED_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/first_stage_ramdisk/$(RAMDISK_BUILD_PROP_REL_PATH)
+else
 INSTALLED_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RAMDISK_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH)
-$(eval $(call build-properties,\
-    bootimage,\
-    $(INSTALLED_RAMDISK_BUILD_PROP_TARGET),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty),\
-    $(empty)))
-
-$(eval $(call declare-1p-target,$(INSTALLED_RAMDISK_BUILD_PROP_TARGET)))
+endif
 
 ALL_INSTALLED_BUILD_PROP_FILES := \
   $(INSTALLED_BUILD_PROP_TARGET) \
diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk
index f9b9d1c..1991503 100644
--- a/core/sysprop_config.mk
+++ b/core/sysprop_config.mk
@@ -15,63 +15,9 @@
 )
 _additional_prop_var_names :=
 
-#
-# -----------------------------------------------------------------
-# Add the product-defined properties to the build properties.
-ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true)
-  ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-else
-  ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
-    ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
-  endif
-endif
-
-ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
-
-# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in
-# the system partition supports.
-ifdef RELEASE_BOARD_API_LEVEL
-ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL)
-endif
-
-# Sets ro.actionable_compatible_property.enabled to know on runtime whether the
-# allowed list of actionable compatible properties is enabled or not.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true
-
-# Add the system server compiler filter if they are specified for the product.
-ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER))
-ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)
-endif
-
-# Add the 16K developer option if it is defined for the product.
-ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false
-endif
-
-ifeq ($(TARGET_BOOTS_16K),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=16384
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=4096
-endif
-
-# Enable core platform API violation warnings on userdebug and eng builds.
-ifneq ($(TARGET_BUILD_VARIANT),user)
-ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
-endif
-
-# Define ro.sanitize.<name> properties for all global sanitizers.
-ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
-
-# Sets the default value of ro.postinstall.fstab.prefix to /system.
-# Device board config should override the value to /product when needed by:
-#
-#     PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product
-#
-# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to
-# mount system_other partition.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.postinstall.fstab.prefix=/system
+$(KATI_obsolete_var ADDITIONAL_SYSTEM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
+$(KATI_obsolete_var ADDITIONAL_ODM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
+$(KATI_obsolete_var ADDITIONAL_PRODUCT_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead)
 
 # Add cpu properties for bionic and ART.
 ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH)
@@ -145,8 +91,12 @@
 # Build system set BOARD_API_LEVEL to show the api level of the vendor API surface.
 # This must not be altered outside of build system.
 ifdef BOARD_API_LEVEL
-ADDITIONAL_VENDOR_PROPERTIES += \
-    ro.board.api_level=$(BOARD_API_LEVEL)
+  ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.board.api_level?=$(BOARD_API_LEVEL)
+  ifdef BOARD_API_LEVEL_PROP_OVERRIDE
+    ADDITIONAL_VENDOR_PROPERTIES += \
+      ro.board.api_level=$(BOARD_API_LEVEL_PROP_OVERRIDE)
+  endif
 endif
 # RELEASE_BOARD_API_LEVEL_FROZEN is true when the vendor API surface is frozen.
 ifdef RELEASE_BOARD_API_LEVEL_FROZEN
@@ -184,118 +134,16 @@
     ro.build.ab_update=$(AB_OTA_UPDATER)
 endif
 
-ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
-
 ifeq ($(AB_OTA_UPDATER),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 endif
 
-# Set this property for VTS to skip large page size tests on unsupported devices.
-ADDITIONAL_PRODUCT_PROPERTIES += \
-    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
-
-ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true
-endif
-
 user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
-enable_target_debugging := true
-enable_dalvik_lock_contention_logging := true
-ifneq (,$(user_variant))
-  # Target is secure in user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1
-  ADDITIONAL_SYSTEM_PROPERTIES += security.perf_harden=1
-
-  ifeq ($(user_variant),user)
-    ADDITIONAL_SYSTEM_PROPERTIES += ro.adb.secure=1
-  endif
-
-  ifneq ($(user_variant),userdebug)
-    # Disable debugging in plain user builds.
-    enable_target_debugging :=
-    enable_dalvik_lock_contention_logging :=
-  else
-    # Disable debugging in userdebug builds if PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG
-    # is set.
-    ifneq (,$(strip $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG)))
-      enable_target_debugging :=
-    endif
-  endif
-
-  # Disallow mock locations by default for user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=0
-
-else # !user_variant
-  # Turn on checkjni for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.kernel.android.checkjni=1
-  # Set device insecure for non-user builds.
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=0
-  # Allow mock locations by default for non user builds
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1
-endif # !user_variant
-
-ifeq (true,$(strip $(enable_dalvik_lock_contention_logging)))
-  # Enable Dalvik lock contention logging.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500
-endif # !enable_dalvik_lock_contention_logging
-
-ifeq (true,$(strip $(enable_target_debugging)))
-  # Target is more debuggable and adbd is on by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1
-else # !enable_target_debugging
-  # Target is less debuggable and adbd is off by default
-  ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0
-endif # !enable_target_debugging
-
-enable_target_debugging:=
-enable_dalvik_lock_contention_logging:=
-
-ifneq ($(filter sdk sdk_addon,$(MAKECMDGOALS)),)
-_is_sdk_build := true
-endif
-
-ifeq ($(TARGET_BUILD_VARIANT),eng)
-ifneq ($(filter ro.setupwizard.mode=ENABLED, $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))),)
-  # Don't require the setup wizard on eng builds
-  ADDITIONAL_SYSTEM_PROPERTIES := $(filter-out ro.setupwizard.mode=%,\
-          $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))) \
-          ro.setupwizard.mode=OPTIONAL
-endif
-ifndef _is_sdk_build
-  # To speedup startup of non-preopted builds, don't verify or compile the boot image.
-  ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract
-endif
-# b/323566535
-ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true
-endif
-
-ifdef _is_sdk_build
-ADDITIONAL_SYSTEM_PROPERTIES += xmpp.auto-presence=true
-ADDITIONAL_SYSTEM_PROPERTIES += ro.config.nocheckin=yes
-endif
-
-_is_sdk_build :=
-
-ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
-
-# This property is set by flashing debug boot image, so default to false.
-ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
 
 config_enable_uffd_gc := \
   $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default)
 
-# This is a temporary system property that controls the ART module. The plan is
-# to remove it by Aug 2025, at which time Mainline updates of the ART module
-# will ignore it as well.
-# If the value is "default", it will be mangled by post_process_props.py.
-ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc)
-
-ADDITIONAL_SYSTEM_PROPERTIES := $(strip $(ADDITIONAL_SYSTEM_PROPERTIES))
-ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES))
 ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES))
 
 .KATI_READONLY += \
-    ADDITIONAL_SYSTEM_PROPERTIES \
-    ADDITIONAL_PRODUCT_PROPERTIES \
     ADDITIONAL_VENDOR_PROPERTIES
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index c95f6e7..eb54fae 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -47,21 +47,16 @@
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
 	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \
 	  -sha256
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
-	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list
 	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
-	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
diff --git a/core/tasks/autorepro.mk b/core/tasks/autorepro.mk
new file mode 100644
index 0000000..2f81f9b
--- /dev/null
+++ b/core/tasks/autorepro.mk
@@ -0,0 +1,39 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/sts/README-autorepro.md),)
+test_suite_name := autorepro
+test_suite_tradefed := sts-tradefed
+test_suite_readme := test/sts/README-autorepro.md
+autorepro_zip := $(HOST_OUT)/$(test_suite_name)/autorepro.zip
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+autorepro_plugin_skel := $(call intermediates-dir-for,ETC,autorepro-plugin-skel.zip)/autorepro-plugin-skel.zip
+
+$(autorepro_zip): AUTOREPRO_ZIP := $(compatibility_zip)
+$(autorepro_zip): AUTOREPRO_PLUGIN_SKEL := $(autorepro_plugin_skel)
+$(autorepro_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(autorepro_plugin_skel)
+	rm -f $@ $(AUTOREPRO_ZIP)_filtered
+	$(ZIP2ZIP) -i $(AUTOREPRO_ZIP) -o $(AUTOREPRO_ZIP)_filtered \
+		-x android-autorepro/tools/sts-tradefed-tests.jar \
+		'android-autorepro/tools/*:autorepro/src/main/resources/sts-tradefed-tools/'
+	$(MERGE_ZIPS) $@ $(AUTOREPRO_ZIP)_filtered $(AUTOREPRO_PLUGIN_SKEL)
+	rm -f $(AUTOREPRO_ZIP)_filtered
+
+.PHONY: autorepro
+autorepro: $(autorepro_zip)
+$(call dist-for-goals, autorepro, $(autorepro_zip))
+
+endif
diff --git a/target/product/gsi/Android.mk b/core/tasks/check-abi-dump-list.mk
similarity index 71%
rename from target/product/gsi/Android.mk
rename to core/tasks/check-abi-dump-list.mk
index 36897fe..81d549e 100644
--- a/target/product/gsi/Android.mk
+++ b/core/tasks/check-abi-dump-list.mk
@@ -1,4 +1,16 @@
-LOCAL_PATH:= $(call my-dir)
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
 #####################################################################
 # Check the generate list against the latest list stored in the
@@ -109,60 +121,3 @@
 	$(if $(added_vndk_abi_dumps)$(added_platform_abi_dumps),exit 1)
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
-
-#####################################################################
-# VNDK package and snapshot.
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := vndk_apex_snapshot_package
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver))
-include $(BUILD_PHONY_PACKAGE)
-
-#####################################################################
-# Define Phony module to install LLNDK modules which are installed in
-# the system image
-include $(CLEAR_VARS)
-LOCAL_MODULE := llndk_in_system
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-
-# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
-LOCAL_REQUIRED_MODULES := \
-    $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES)) \
-    llndk.libraries.txt
-
-
-include $(BUILD_PHONY_PACKAGE)
-
-#####################################################################
-# init.gsi.rc, GSI-specific init script.
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := init.gsi.rc
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_SYSTEM_EXT_MODULE := true
-LOCAL_MODULE_RELATIVE_PATH := init
-
-include $(BUILD_PREBUILT)
-
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := init.vndk-nodef.rc
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_SYSTEM_EXT_MODULE := true
-LOCAL_MODULE_RELATIVE_PATH := gsi
-
-include $(BUILD_PREBUILT)
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 5850c4e..6164c2e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -14,6 +14,7 @@
 
 
 .PHONY: device-tests
+.PHONY: device-tests-host-shared-libs
 
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
 # Create an artifact to include a list of test config files in device-tests.
@@ -23,37 +24,45 @@
 my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
 device_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-tests_host-shared-libs.zip
 
-$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip)
+$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip)
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
 $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
-$(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
 $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
-	rm -f $@-shared-libs.list
 	echo $(sort $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
 	grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
 	  echo $$shared_lib >> $@-host.list; \
-	  echo $$shared_lib >> $@-shared-libs.list; \
 	done
-	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
 	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
-	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
-	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
 	rm -f $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list)
 	$(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list)
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list)
 	rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \
-	  $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_tests_list)
+		$(PRIVATE_device_tests_list)
+
+$(device_tests_host_shared_libs_zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip)
+$(device_tests_host_shared_libs_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
+$(device_tests_host_shared_libs_zip) : $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
+	rm -f $@-shared-libs.list
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $@-shared-libs.list; \
+	done
+	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
+	$(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $@-host-shared-libs.list
 
 device-tests: $(device-tests-zip)
+device-tests-host-shared-libs: $(device_tests_host_shared_libs_zip)
+
 $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip))
+$(call dist-for-goals, device-tests-host-shared-libs, $(device_tests_host_shared_libs_zip))
 
 $(call declare-1p-container,$(device-tests-zip),)
 $(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/)
diff --git a/core/tasks/dts.mk b/core/tasks/dts.mk
new file mode 100644
index 0000000..8f09082
--- /dev/null
+++ b/core/tasks/dts.mk
@@ -0,0 +1,28 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Desktop test suite
+ifneq ($(wildcard test/dts/tools/dts-tradefed/README),)
+test_suite_name := dts
+test_suite_tradefed := dts-tradefed
+test_suite_readme := test/dts/tools/dts-tradefed/README
+test_suite_tools := $(HOST_OUT_JAVA_LIBRARIES)/ats_console_deploy.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/ats_olc_server_local_mode_deploy.jar
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: dts
+dts: $(compatibility_zip) $(compatibility_tests_list_zip)
+$(call dist-for-goals, dts, $(compatibility_zip) $(compatibility_tests_list_zip))
+endif
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index d6fc072..1901ed5 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -27,21 +27,9 @@
 # Create an artifact to include all test config files in general-tests.
 general_tests_configs_zip := $(PRODUCT_OUT)/general-tests_configs.zip
 
-# Copy kernel test modules to testcases directories
-include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
-ltp_copy_pairs := \
-  $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_host_out))
-copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
-
-# PHONY target to be used to build and test `vts_ltp_tests` without building full vts
-.PHONY: vts_kernel_ltp_tests
-vts_kernel_ltp_tests: $(copy_ltp_tests)
-
 general_tests_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip
 
 $(general_tests_zip) : $(general_tests_shared_libs_zip)
-$(general_tests_zip) : $(copy_ltp_tests)
-$(general_tests_zip) : PRIVATE_KERNEL_LTP_HOST_OUT := $(kernel_ltp_host_out)
 $(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
 $(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip)
 $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
@@ -52,7 +40,6 @@
 	rm -f $@ $(PRIVATE_general_tests_list_zip)
 	mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
 	echo $(sort $(COMPATIBILITY.general-tests.FILES) $(COMPATIBILITY.general-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
-	find $(PRIVATE_KERNEL_LTP_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
 	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk
index 85357eb..620b1e2 100644
--- a/core/tasks/meta-lic.mk
+++ b/core/tasks/meta-lic.mk
@@ -83,6 +83,40 @@
 $(eval $(call declare-1p-copy-files,device/google/coral,audio_policy_configuration.xml))
 $(eval $(call declare-1p-copy-files,device/google/coral,display_19260504575090817.xml))
 
+# Moved here from device/google/cuttlefish/Android.mk
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,.idc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.cutf_cvm.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,audio_policy.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
+
+$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish/shared/config,pci.ids,SPDX-license-identifier-BSD-3-Clause,notice,device/google/cuttlefish/shared/config/LICENSE_BSD,))
+
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,privapp-permissions-cuttlefish.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_profiles_V1_0.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_performance.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,cuttlefish_excluded_hardware.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_google_video.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,car_audio_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,audio_policy_configuration.xml))
+$(eval $(call declare-1p-copy-files,device/google/cuttlefish,preinstalled-packages-product-car-cuttlefish.xml))
+$(eval $(call declare-1p-copy-files,hardware/google/camera/devices,.json))
+
 # Moved here from device/google/gs101/Android.mk
 $(eval $(call declare-copy-files-license-metadata,device/google/gs101,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
 $(eval $(call declare-copy-files-license-metadata,device/google/gs101,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,))
@@ -191,3 +225,6 @@
 
 # Moved here from hardware/libhardware_legacy/Android.mk
 $(eval $(call declare-1p-copy-files,hardware/libhardware_legacy,))
+
+# Moved here from system/core/rootdir/Android.mk
+$(eval $(call declare-1p-copy-files,system/core/rootdir,))
diff --git a/core/tasks/mke2fs-dist.mk b/core/tasks/mke2fs-dist.mk
new file mode 100644
index 0000000..3540c1f
--- /dev/null
+++ b/core/tasks/mke2fs-dist.mk
@@ -0,0 +1,22 @@
+# Copyright (C) 2024 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: After Soong's recovery partition variation can be set to selectable
+#       and the meta_lic file duplication issue is resolved, move it to the
+#       dist section of the corresponding module's Android.bp.
+my_dist_files := $(HOST_OUT_EXECUTABLES)/mke2fs
+my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs
+my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs_casefold
+$(call dist-for-goals,dist_files sdk,$(my_dist_files))
+my_dist_files :=
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 7593668..0ca27d8 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -13,7 +13,7 @@
 $(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"')
 endef
 
-SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json
+SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT)${COVERAGE_SUFFIX}.json
 
 $(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO)
 $(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json
diff --git a/core/tasks/prebuilt_tradefed.mk b/core/tasks/prebuilt_tradefed.mk
new file mode 100644
index 0000000..96c57d5
--- /dev/null
+++ b/core/tasks/prebuilt_tradefed.mk
@@ -0,0 +1,22 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifeq (,$(wildcard tools/tradefederation/core))
+.PHONY: tradefed-core
+tradefed-core: tradefed atest_tradefed.sh
+.PHONY: tradefed-all
+tradefed-all: tradefed atest_tradefed.sh
+
+$(call dist-for-goals, tradefed, $(HOST_OUT)/etc/tradefed.zip)
+endif
diff --git a/core/tasks/recovery_snapshot.mk b/core/tasks/recovery_snapshot.mk
deleted file mode 100644
index 525273b..0000000
--- a/core/tasks/recovery_snapshot.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-current_makefile := $(lastword $(MAKEFILE_LIST))
-
-# RECOVERY_SNAPSHOT_VERSION must be set to 'current' in order to generate a recovery snapshot.
-ifeq ($(RECOVERY_SNAPSHOT_VERSION),current)
-
-.PHONY: recovery-snapshot
-recovery-snapshot: $(SOONG_RECOVERY_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, recovery-snapshot, $(SOONG_RECOVERY_SNAPSHOT_ZIP))
-
-else # RECOVERY_SNAPSHOT_VERSION is NOT set to 'current'
-
-.PHONY: recovery-snapshot
-recovery-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-recovery-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Recovery snapshot. RECOVERY_SNAPSHOT_VERSION must be set to 'current'.")
-	exit 1
-
-endif # RECOVERY_SNAPSHOT_VERSION
diff --git a/core/tasks/sts-lite.mk b/core/tasks/sts-lite.mk
deleted file mode 100644
index 65c65c3..0000000
--- a/core/tasks/sts-lite.mk
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-ifneq ($(wildcard test/sts/README-sts-sdk.md),)
-test_suite_name := sts-lite
-test_suite_tradefed := sts-tradefed
-test_suite_readme := test/sts/README-sts-sdk.md
-sts_sdk_zip := $(HOST_OUT)/$(test_suite_name)/sts-sdk.zip
-
-include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
-
-sts_sdk_samples := $(call intermediates-dir-for,ETC,sts-sdk-samples.zip)/sts-sdk-samples.zip
-
-$(sts_sdk_zip): STS_LITE_ZIP := $(compatibility_zip)
-$(sts_sdk_zip): STS_SDK_SAMPLES := $(sts_sdk_samples)
-$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_samples)
-	rm -f $@ $(STS_LITE_ZIP)_filtered
-	$(ZIP2ZIP) -i $(STS_LITE_ZIP) -o $(STS_LITE_ZIP)_filtered \
-		-x android-sts-lite/tools/sts-tradefed-tests.jar \
-		'android-sts-lite/tools/*:sts-test/libs/' \
-		'android-sts-lite/testcases/*:sts-test/utils/' \
-		'android-sts-lite/jdk/**/*:sts-test/jdk/'
-	$(MERGE_ZIPS) $@ $(STS_LITE_ZIP)_filtered $(STS_SDK_SAMPLES)
-	rm -f $(STS_LITE_ZIP)_filtered
-
-.PHONY: sts-sdk
-sts-sdk: $(sts_sdk_zip)
-$(call dist-for-goals, sts-sdk, $(sts_sdk_zip))
-
-endif
diff --git a/core/tasks/tools/vts-kernel-tests.mk b/core/tasks/tools/vts-kernel-tests.mk
deleted file mode 100644
index e727dc1..0000000
--- a/core/tasks/tools/vts-kernel-tests.mk
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
--include external/ltp/android/ltp_package_list.mk
-
-include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
-
-# Copy kernel test modules to testcases directories
-kernel_ltp_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_ltp_tests
-kernel_ltp_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_ltp_tests
-kernel_ltp_modules := \
-    ltp \
-    $(ltp_packages)
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
deleted file mode 100644
index 1a819f2..0000000
--- a/core/tasks/tools/vts_package_utils.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# $(1): List of target native files to copy.
-# $(2): Copy destination directory.
-# Evaluates to a list of ":"-separated pairs src:dst.
-define target-native-copy-pairs
-$(foreach m,$(1),\
-  $(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
-  $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
-  $(foreach i, $(sort $(_built_files)),\
-    $(eval bui_ins := $(subst :,$(space),$(i)))\
-    $(eval ins := $(word 2,$(bui_ins)))\
-    $(if $(filter $(TARGET_OUT_ROOT)/%,$(ins)),\
-      $(eval bui := $(word 1,$(bui_ins)))\
-      $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
-                               $(patsubst system/%,DATA/%,\
-                                   $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
-      $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\
-      $(bui):$(2)/$(my_copy_dest))))
-endef
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index 61bf136..47c360d 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -15,6 +15,11 @@
 # List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir
 .PHONY: tradefed-tests-list
 
+COMPATIBILITY.tradefed_tests_dir := \
+  $(COMPATIBILITY.tradefed_tests_dir) \
+  tools/tradefederation/core/res/config \
+  tools/tradefederation/core/javatests/res/config
+
 tradefed_tests :=
 $(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \
   $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml")))
diff --git a/core/tasks/vendor_snapshot.mk b/core/tasks/vendor_snapshot.mk
deleted file mode 100644
index 83c1379..0000000
--- a/core/tasks/vendor_snapshot.mk
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-current_makefile := $(lastword $(MAKEFILE_LIST))
-
-# BOARD_VNDK_VERSION must be set to 'current' in order to generate a vendor snapshot.
-ifeq ($(BOARD_VNDK_VERSION),current)
-
-.PHONY: vendor-snapshot
-vendor-snapshot: $(SOONG_VENDOR_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, vendor-snapshot, $(SOONG_VENDOR_SNAPSHOT_ZIP))
-
-.PHONY: vendor-fake-snapshot
-vendor-fake-snapshot: $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)
-
-$(call dist-for-goals, vendor-fake-snapshot, $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP):fake/$(notdir $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP)))
-
-else # BOARD_VNDK_VERSION is NOT set to 'current'
-
-.PHONY: vendor-snapshot
-vendor-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-vendor-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
-	exit 1
-
-.PHONY: vendor-fake-snapshot
-vendor-fake-snapshot: PRIVATE_MAKEFILE := $(current_makefile)
-vendor-fake-snapshot:
-	$(call echo-error,$(PRIVATE_MAKEFILE),\
-		"CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.")
-	exit 1
-
-endif # BOARD_VNDK_VERSION
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 1eeb078..11bb932 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -16,15 +16,6 @@
 test_suite_tradefed := vts-tradefed
 test_suite_readme := test/vts/tools/vts-core-tradefed/README
 
-include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
-
-ltp_copy_pairs := \
-  $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_vts_out))
-
-copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
-
-test_suite_extra_deps := $(copy_ltp_tests)
-
 include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
 .PHONY: vts
diff --git a/core/version_util.mk b/core/version_util.mk
index eb568be..0e34634 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -183,14 +183,17 @@
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH_TIMESTAMP
 
-ifndef PLATFORM_BASE_OS
-  # Used to indicate the base os applied to the device.
-  # Can be an arbitrary string, but must be a single word.
-  #
-  # If there is no $PLATFORM_BASE_OS set, keep it empty.
-  PLATFORM_BASE_OS :=
-endif
-.KATI_READONLY := PLATFORM_BASE_OS
+# PLATFORM_BASE_OS is used to indicate the base os applied
+# to the device. Can be an arbitrary string, but must be a
+# single word.
+#
+# If there is no $PLATFORM_BASE_OS set, keep it empty.
+#
+# PLATFORM_BASE_OS can either be set via an enviornment
+# variable, or set via the PRODUCT_BASE_OS product variable.
+PLATFORM_BASE_OS_ENV_INPUT := $(PLATFORM_BASE_OS)
+.KATI_READONLY := PLATFORM_BASE_OS_ENV_INPUT
+PLATFORM_BASE_OS :=
 
 ifndef BUILD_ID
   # Used to signify special builds.  E.g., branches and/or releases,
diff --git a/envsetup.sh b/envsetup.sh
index 06dadd3..554a220 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -362,7 +362,6 @@
       packages/modules/adb/adb.bash
       system/core/fastboot/fastboot.bash
       tools/asuite/asuite.sh
-      prebuilts/bazel/common/bazel-complete.bash
     )
     # Completion can be disabled selectively to allow users to use non-standard completion.
     # e.g.
@@ -442,6 +441,7 @@
 function lunch()
 {
     local answer
+    setup_cog_env_if_needed
 
     if [[ $# -gt 1 ]]; then
         echo "usage: lunch [target]" >&2
@@ -1079,10 +1079,7 @@
         done
     done
 
-    if [[ "${PWD}" == /google/cog/* ]]; then
-        f="build/make/cogsetup.sh"
-        echo "including $f"; . "$T/$f"
-    fi
+    setup_cog_env_if_needed
 }
 
 function showcommands() {
diff --git a/shell_utils.sh b/shell_utils.sh
index 86f3f49..9053c42 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -63,6 +63,70 @@
 }
 fi
 
+# This function sets up the build environment to be appropriate for Cog.
+function setup_cog_env_if_needed() {
+  local top=$(gettop)
+
+  # return early if not in a cog workspace
+  if [[ ! "$top" =~ ^/google/cog ]]; then
+    return 0
+  fi
+
+  setup_cog_symlink
+
+  export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog"
+
+  # Running repo command within Cog workspaces is not supported, so override
+  # it with this function. If the user is running repo within a Cog workspace,
+  # we'll fail with an error, otherwise, we run the original repo command with
+  # the given args.
+  if ! ORIG_REPO_PATH=`which repo`; then
+    return 0
+  fi
+  function repo {
+    if [[ "${PWD}" == /google/cog/* ]]; then
+      echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces."
+      kill -INT $$ # exits the script without exiting the user's shell
+    fi
+    ${ORIG_REPO_PATH} "$@"
+  }
+}
+
+# creates a symlink for the out/ dir when inside a cog workspace.
+function setup_cog_symlink() {
+  local out_dir=$(getoutdir)
+  local top=$(gettop)
+
+  # return early if out dir is already a symlink
+  if [[ -L "$out_dir" ]]; then
+    return 0
+  fi
+
+  # return early if out dir is not in the workspace
+  if [[ ! "$out_dir" =~ ^$top/ ]]; then
+    return 0
+  fi
+
+  local link_destination="${HOME}/.cog/android-build-out"
+
+  # remove existing out/ dir if it exists
+  if [[ -d "$out_dir" ]]; then
+    echo "Detected existing out/ directory in the Cog workspace which is not supported. Repairing workspace by removing it and creating the symlink to ~/.cog/android-build-out"
+    if ! rm -rf "$out_dir"; then
+      echo "Failed to remove existing out/ directory: $out_dir" >&2
+      kill -INT $$ # exits the script without exiting the user's shell
+    fi
+  fi
+
+  # create symlink
+  echo "Creating symlink: $out_dir -> $link_destination"
+  mkdir -p ${link_destination}
+  if ! ln -s "$link_destination" "$out_dir"; then
+    echo "Failed to create cog symlink: $out_dir -> $link_destination" >&2
+    kill -INT $$ # exits the script without exiting the user's shell
+  fi
+}
+
 function getoutdir
 {
     local top=$(gettop)
@@ -114,11 +178,11 @@
         echo -n "${color_failed}#### failed to build some targets "
     fi
     if [ $hours -gt 0 ] ; then
-        printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs
+        printf "(%02d:%02d:%02d (hh:mm:ss))" $hours $mins $secs
     elif [ $mins -gt 0 ] ; then
-        printf "(%02g:%02g (mm:ss))" $mins $secs
+        printf "(%02d:%02d (mm:ss))" $mins $secs
     elif [ $secs -gt 0 ] ; then
-        printf "(%s seconds)" $secs
+        printf "(%d seconds)" $secs
     fi
     echo " ####${color_reset}"
     echo
diff --git a/target/board/Android.mk b/target/board/android-info.mk
similarity index 70%
rename from target/board/Android.mk
rename to target/board/android-info.mk
index decc345..36be002 100644
--- a/target/board/Android.mk
+++ b/target/board/android-info.mk
@@ -51,30 +51,6 @@
 
 # Copy compatibility metadata to the device.
 
-# Device Manifest
-ifdef DEVICE_MANIFEST_FILE
-# $(DEVICE_MANIFEST_FILE) can be a list of files
-include $(CLEAR_VARS)
-LOCAL_MODULE        := vendor_manifest.xml
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
-LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
-LOCAL_MODULE_STEM   := manifest.xml
-LOCAL_MODULE_CLASS  := ETC
-LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
-
-GEN := $(local-generated-sources-dir)/manifest.xml
-$(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE)
-$(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \
-	PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-	PRODUCT_SHIPPING_API_LEVEL=$(PRODUCT_SHIPPING_API_LEVEL) \
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
-		-i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE))
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-endif
-
 # DEVICE_MANIFEST_SKUS: a list of SKUS where DEVICE_MANIFEST_<sku>_FILES is defined.
 ifdef DEVICE_MANIFEST_SKUS
 
@@ -99,7 +75,6 @@
 $$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf
 	BOARD_SEPOLICY_VERS=$$(BOARD_SEPOLICY_VERS) \
 	PRODUCT_ENFORCE_VINTF_MANIFEST=$$(PRODUCT_ENFORCE_VINTF_MANIFEST) \
-	PRODUCT_SHIPPING_API_LEVEL=$$(PRODUCT_SHIPPING_API_LEVEL) \
 	$$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \
 		-i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES))
 
@@ -114,30 +89,6 @@
 
 endif # DEVICE_MANIFEST_SKUS
 
-# ODM manifest
-ifdef ODM_MANIFEST_FILES
-# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
-include $(CLEAR_VARS)
-LOCAL_MODULE := odm_manifest.xml
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
-LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
-LOCAL_MODULE_STEM := manifest.xml
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_RELATIVE_PATH := vintf
-LOCAL_ODM_MODULE := true
-
-GEN := $(local-generated-sources-dir)/manifest.xml
-$(GEN): PRIVATE_SRC_FILES := $(ODM_MANIFEST_FILES)
-$(GEN): $(ODM_MANIFEST_FILES) $(HOST_OUT_EXECUTABLES)/assemble_vintf
-	# Set VINTF_IGNORE_TARGET_FCM_VERSION to true because it should only be in device manifest.
-	VINTF_IGNORE_TARGET_FCM_VERSION=true \
-	$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \
-		-i $(call normalize-path-list,$(PRIVATE_SRC_FILES))
-
-LOCAL_PREBUILT_MODULE_FILE := $(GEN)
-include $(BUILD_PREBUILT)
-endif # ODM_MANIFEST_FILES
-
 # ODM_MANIFEST_SKUS: a list of SKUS where ODM_MANIFEST_<sku>_FILES are defined.
 ifdef ODM_MANIFEST_SKUS
 
diff --git a/target/product/base_product.mk b/target/product/base_product.mk
index 0ac220b..acfc653 100644
--- a/target/product/base_product.mk
+++ b/target/product/base_product.mk
@@ -25,3 +25,8 @@
     product_compatibility_matrix.xml \
     product_manifest.xml \
     selinux_policy_product \
+    product-build.prop \
+
+# Packages included only for eng or userdebug builds, previously debug tagged
+PRODUCT_PACKAGES_DEBUG += \
+    adb_keys \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 9a6746f..3a7256e 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -18,6 +18,7 @@
 PRODUCT_PACKAGES += \
     abx \
     aconfigd \
+    aconfigd-system \
     adbd_system_api \
     aflags \
     am \
@@ -96,6 +97,7 @@
     enhanced-confirmation.xml \
     ExtShared \
     flags_health_check \
+    framework-connectivity-b \
     framework-graphics \
     framework-location \
     framework-minus-apex \
@@ -211,6 +213,7 @@
     libwilhelm \
     linker \
     llkd \
+    llndk_libs \
     lmkd \
     LocalTransport \
     locksettings \
@@ -274,11 +277,11 @@
     Shell \
     shell_and_utilities_system \
     sm \
-    snapshotctl \
     snapuserd \
     storaged \
     surfaceflinger \
     svc \
+    system-build.prop \
     task_profiles.json \
     tc \
     telecom \
@@ -286,6 +289,7 @@
     tombstoned \
     traced \
     traced_probes \
+    tradeinmode \
     tune2fs \
     uiautomator \
     uinput \
@@ -344,6 +348,16 @@
         com.android.webview.bootstrap
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_PACKAGES += \
+        com.android.ranging
+endif
+
+ifeq ($(RELEASE_MEMORY_MANAGEMENT_DAEMON),true)
+  PRODUCT_PACKAGES += \
+        mm_daemon
+endif
+
 # VINTF data for system image
 PRODUCT_PACKAGES += \
     system_manifest.xml \
@@ -402,7 +416,6 @@
     BugReport \
     adb \
     adevice \
-    art-tools \
     atest \
     bcc \
     bit \
@@ -419,6 +432,7 @@
     lpdump \
     mke2fs \
     mkfs.erofs \
+    pbtombstone \
     resize2fs \
     sgdisk \
     sqlite3 \
@@ -433,6 +447,21 @@
     tz_version_host \
     tz_version_host_tzdata_apex \
 
+# For art-tools, if the dependencies have changed, please sync them to art/Android.bp as well.
+PRODUCT_HOST_PACKAGES += \
+    ahat \
+    dexdump \
+    hprof-conv
+# A subset of the tools are disabled when HOST_PREFER_32_BIT is defined as make reports that
+# they are not supported on host (b/129323791). This is likely due to art_apex disabling host
+# APEX builds when HOST_PREFER_32_BIT is set (b/120617876).
+ifneq ($(HOST_PREFER_32_BIT),true)
+PRODUCT_HOST_PACKAGES += \
+    dexlist \
+    oatdump
+endif
+
+
 PRODUCT_PACKAGES += init.usb.rc init.usb.configfs.rc
 
 PRODUCT_PACKAGES += etc_hosts
@@ -450,7 +479,6 @@
 
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
-    adb_keys \
     adevice_fingerprint \
     arping \
     dmuserd \
@@ -472,6 +500,7 @@
     record_binder \
     servicedispatcher \
     showmap \
+    snapshotctl \
     sqlite3 \
     ss \
     start_with_lockagent \
diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk
index 92ca227..febe537 100644
--- a/target/product/base_system_ext.mk
+++ b/target/product/base_system_ext.mk
@@ -24,6 +24,7 @@
     SatelliteClient \
     selinux_policy_system_ext \
     system_ext_manifest.xml \
+    system_ext-build.prop \
 
 # Base modules when shipping api level is less than or equal to 34
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 1854f97..16fc7fd 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -17,7 +17,6 @@
 # Base modules and settings for recovery.
 PRODUCT_PACKAGES += \
     adbd.recovery \
-    android.hardware.health@2.0-impl-default.recovery \
     build_flag_vendor \
     cgroups.recovery.json \
     charger.recovery \
@@ -72,7 +71,15 @@
     passwd_odm \
     passwd_vendor \
     selinux_policy_nonsystem \
+    selinux_policy_vendor \
+    selinux_policy_odm \
     shell_and_utilities_vendor \
+    odm-build.prop \
+
+# libhealthloop BPF filter. This is in base_vendor.mk because libhealthloop must
+# be a static library and because the Android build system ignores 'required'
+# sections for static libraries.
+PRODUCT_PACKAGES += filterPowerSupplyEvents.o
 
 # Base modules when shipping api level is less than or equal to 34
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \
@@ -99,9 +106,16 @@
 # VINTF data for vendor image
 PRODUCT_PACKAGES += \
     vendor_compatibility_matrix.xml \
+    vendor_manifest.xml \
 
 # Base modules and settings for the debug ramdisk, which is then packed
 # into a boot-debug.img and a vendor_boot-debug.img.
 PRODUCT_PACKAGES += \
     adb_debug.prop \
     userdebug_plat_sepolicy.cil
+
+# On eng or userdebug builds, build in perf-setup-sh by default.
+ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
+PRODUCT_PACKAGES += \
+    perf-setup-sh
+endif
diff --git a/target/product/build_variables.mk b/target/product/build_variables.mk
index 5fe5333..7661e06 100644
--- a/target/product/build_variables.mk
+++ b/target/product/build_variables.mk
@@ -17,5 +17,11 @@
 # This file contains the trunk-stable flags that should be exported to all
 # Android targets.
 
+# Control libbinder client caching
+$(call soong_config_set, libbinder, release_libbinder_client_cache, $(RELEASE_LIBBINDER_CLIENT_CACHE))
+
 # Use the configured release of sqlite
 $(call soong_config_set, libsqlite3, release_package_libsqlite3, $(RELEASE_PACKAGE_LIBSQLITE3))
+
+# Use the configured MessageQueue implementation
+$(call soong_config_set, messagequeue, release_package_messagequeue_implementation, $(RELEASE_PACKAGE_MESSAGEQUEUE_IMPLEMENTATION))
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 1a3f2cf..83d9215 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -51,6 +51,7 @@
     framework-minus-apex \
     framework-graphics \
     framework-location \
+    framework-connectivity-b \
     ext \
     telephony-common \
     voip-common \
@@ -76,6 +77,7 @@
     com.android.mediaprovider:framework-mediaprovider \
     com.android.mediaprovider:framework-pdf \
     com.android.mediaprovider:framework-pdf-v \
+    com.android.mediaprovider:framework-photopicker \
     com.android.ondevicepersonalization:framework-ondevicepersonalization \
     com.android.os.statsd:framework-statsd \
     com.android.permission:framework-permission \
@@ -113,6 +115,12 @@
 
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_APEX_BOOT_JARS += \
+        com.android.uwb:framework-ranging \
+    $(call soong_config_set,bootclasspath,release_ranging_stack,true)
+endif
+
 # List of system_server classpath jars delivered via apex.
 # Keep the list sorted by module names and then library names.
 # Note: For modules available in Q, DO NOT add new entries here.
@@ -168,6 +176,11 @@
 
 endif
 
+ifneq (,$(RELEASE_RANGING_STACK))
+    PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \
+        com.android.uwb:service-ranging
+endif
+
 # Overrides the (apex, jar) pairs above when determining the on-device location. The format is:
 # <old_apex>:<old_jar>:<new_apex>:<new_jar>
 PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \
diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp
new file mode 100644
index 0000000..5451042
--- /dev/null
+++ b/target/product/generic/Android.bp
@@ -0,0 +1,887 @@
+generic_rootdirs = [
+    "acct",
+    "apex",
+    "bootstrap-apex",
+    "config",
+    "data",
+    "data_mirror",
+    "debug_ramdisk",
+    "dev",
+    "linkerconfig",
+    "metadata",
+    "mnt",
+    "odm",
+    "odm_dlkm",
+    "oem",
+    "postinstall",
+    "proc",
+    "second_stage_resources",
+    "storage",
+    "sys",
+    "system",
+    "system_dlkm",
+    "tmp",
+    "vendor",
+    "vendor_dlkm",
+]
+
+android_rootdirs = [
+    "system_ext",
+    "product",
+]
+
+generic_symlinks = [
+    {
+        target: "/system/bin/init",
+        name: "init",
+    },
+    {
+        target: "/system/etc",
+        name: "etc",
+    },
+    {
+        target: "/system/bin",
+        name: "bin",
+    },
+    {
+        target: "/vendor",
+        name: "system/vendor",
+    },
+    {
+        target: "/system_dlkm/lib/modules",
+        name: "system/lib/modules",
+    },
+    {
+        target: "/data/user_de/0/com.android.shell/files/bugreports",
+        name: "bugreports",
+    },
+    {
+        target: "/sys/kernel/debug",
+        name: "d",
+    },
+    {
+        target: "/storage/self/primary",
+        name: "sdcard",
+    },
+    {
+        target: "/product/etc/security/adb_keys",
+        name: "adb_keys",
+    },
+    // For Treble Generic System Image (GSI), system-as-root GSI needs to work on both devices with
+    // and without /odm partition. Those symlinks are for devices without /odm partition. For
+    // devices with /odm partition, mount odm.img under /odm will hide those symlinks.
+    {
+        target: "/vendor/odm/app",
+        name: "odm/app",
+    },
+    {
+        target: "/vendor/odm/bin",
+        name: "odm/bin",
+    },
+    {
+        target: "/vendor/odm/etc",
+        name: "odm/etc",
+    },
+    {
+        target: "/vendor/odm/firmware",
+        name: "odm/firmware",
+    },
+    {
+        target: "/vendor/odm/framework",
+        name: "odm/framework",
+    },
+    {
+        target: "/vendor/odm/lib",
+        name: "odm/lib",
+    },
+    {
+        target: "/vendor/odm/lib64",
+        name: "odm/lib64",
+    },
+    {
+        target: "/vendor/odm/overlay",
+        name: "odm/overlay",
+    },
+    {
+        target: "/vendor/odm/priv-app",
+        name: "odm/priv-app",
+    },
+    {
+        target: "/vendor/odm/usr",
+        name: "odm/usr",
+    },
+]
+
+android_symlinks = [
+    {
+        target: "/product",
+        name: "system/product",
+    },
+    {
+        target: "/system_ext",
+        name: "system/system_ext",
+    },
+    {
+        target: "/data/cache",
+        name: "cache",
+    },
+]
+
+filegroup {
+    name: "generic_system_sign_key",
+    srcs: [":avb_testkey_rsa4096"],
+}
+
+phony {
+    name: "generic_system_fonts",
+    required: [
+        "AndroidClock.ttf",
+        "CarroisGothicSC-Regular.ttf",
+        "ComingSoon.ttf",
+        "CutiveMono.ttf",
+        "DancingScript-Regular.ttf",
+        "DroidSansMono.ttf",
+        "NotoColorEmoji.ttf",
+        "NotoColorEmojiFlags.ttf",
+        "NotoNaskhArabic-Bold.ttf",
+        "NotoNaskhArabic-Regular.ttf",
+        "NotoNaskhArabicUI-Bold.ttf",
+        "NotoNaskhArabicUI-Regular.ttf",
+        "NotoSansAdlam-VF.ttf",
+        "NotoSansAhom-Regular.otf",
+        "NotoSansAnatolianHieroglyphs-Regular.otf",
+        "NotoSansArmenian-VF.ttf",
+        "NotoSansAvestan-Regular.ttf",
+        "NotoSansBalinese-Regular.ttf",
+        "NotoSansBamum-Regular.ttf",
+        "NotoSansBassaVah-Regular.otf",
+        "NotoSansBatak-Regular.ttf",
+        "NotoSansBengali-VF.ttf",
+        "NotoSansBengaliUI-VF.ttf",
+        "NotoSansBhaiksuki-Regular.otf",
+        "NotoSansBrahmi-Regular.ttf",
+        "NotoSansBuginese-Regular.ttf",
+        "NotoSansBuhid-Regular.ttf",
+        "NotoSansCJK-Regular.ttc",
+        "NotoSansCanadianAboriginal-Regular.ttf",
+        "NotoSansCarian-Regular.ttf",
+        "NotoSansChakma-Regular.otf",
+        "NotoSansCham-Bold.ttf",
+        "NotoSansCham-Regular.ttf",
+        "NotoSansCherokee-Regular.ttf",
+        "NotoSansCoptic-Regular.ttf",
+        "NotoSansCuneiform-Regular.ttf",
+        "NotoSansCypriot-Regular.ttf",
+        "NotoSansDeseret-Regular.ttf",
+        "NotoSansDevanagari-VF.ttf",
+        "NotoSansDevanagariUI-VF.ttf",
+        "NotoSansEgyptianHieroglyphs-Regular.ttf",
+        "NotoSansElbasan-Regular.otf",
+        "NotoSansEthiopic-VF.ttf",
+        "NotoSansGeorgian-VF.ttf",
+        "NotoSansGlagolitic-Regular.ttf",
+        "NotoSansGothic-Regular.ttf",
+        "NotoSansGrantha-Regular.ttf",
+        "NotoSansGujarati-Bold.ttf",
+        "NotoSansGujarati-Regular.ttf",
+        "NotoSansGujaratiUI-Bold.ttf",
+        "NotoSansGujaratiUI-Regular.ttf",
+        "NotoSansGunjalaGondi-Regular.otf",
+        "NotoSansGurmukhi-VF.ttf",
+        "NotoSansGurmukhiUI-VF.ttf",
+        "NotoSansHanifiRohingya-Regular.otf",
+        "NotoSansHanunoo-Regular.ttf",
+        "NotoSansHatran-Regular.otf",
+        "NotoSansHebrew-Bold.ttf",
+        "NotoSansHebrew-Regular.ttf",
+        "NotoSansImperialAramaic-Regular.ttf",
+        "NotoSansInscriptionalPahlavi-Regular.ttf",
+        "NotoSansInscriptionalParthian-Regular.ttf",
+        "NotoSansJavanese-Regular.otf",
+        "NotoSansKaithi-Regular.ttf",
+        "NotoSansKannada-VF.ttf",
+        "NotoSansKannadaUI-VF.ttf",
+        "NotoSansKayahLi-Regular.ttf",
+        "NotoSansKharoshthi-Regular.ttf",
+        "NotoSansKhmer-VF.ttf",
+        "NotoSansKhmerUI-Bold.ttf",
+        "NotoSansKhmerUI-Regular.ttf",
+        "NotoSansKhojki-Regular.otf",
+        "NotoSansLao-Bold.ttf",
+        "NotoSansLao-Regular.ttf",
+        "NotoSansLaoUI-Bold.ttf",
+        "NotoSansLaoUI-Regular.ttf",
+        "NotoSansLepcha-Regular.ttf",
+        "NotoSansLimbu-Regular.ttf",
+        "NotoSansLinearA-Regular.otf",
+        "NotoSansLinearB-Regular.ttf",
+        "NotoSansLisu-Regular.ttf",
+        "NotoSansLycian-Regular.ttf",
+        "NotoSansLydian-Regular.ttf",
+        "NotoSansMalayalam-VF.ttf",
+        "NotoSansMalayalamUI-VF.ttf",
+        "NotoSansMandaic-Regular.ttf",
+        "NotoSansManichaean-Regular.otf",
+        "NotoSansMarchen-Regular.otf",
+        "NotoSansMasaramGondi-Regular.otf",
+        "NotoSansMedefaidrin-VF.ttf",
+        "NotoSansMeeteiMayek-Regular.ttf",
+        "NotoSansMeroitic-Regular.otf",
+        "NotoSansMiao-Regular.otf",
+        "NotoSansModi-Regular.ttf",
+        "NotoSansMongolian-Regular.ttf",
+        "NotoSansMro-Regular.otf",
+        "NotoSansMultani-Regular.otf",
+        "NotoSansMyanmar-Bold.otf",
+        "NotoSansMyanmar-Medium.otf",
+        "NotoSansMyanmar-Regular.otf",
+        "NotoSansMyanmarUI-Bold.otf",
+        "NotoSansMyanmarUI-Medium.otf",
+        "NotoSansMyanmarUI-Regular.otf",
+        "NotoSansNKo-Regular.ttf",
+        "NotoSansNabataean-Regular.otf",
+        "NotoSansNewTaiLue-Regular.ttf",
+        "NotoSansNewa-Regular.otf",
+        "NotoSansOgham-Regular.ttf",
+        "NotoSansOlChiki-Regular.ttf",
+        "NotoSansOldItalic-Regular.ttf",
+        "NotoSansOldNorthArabian-Regular.otf",
+        "NotoSansOldPermic-Regular.otf",
+        "NotoSansOldPersian-Regular.ttf",
+        "NotoSansOldSouthArabian-Regular.ttf",
+        "NotoSansOldTurkic-Regular.ttf",
+        "NotoSansOriya-Bold.ttf",
+        "NotoSansOriya-Regular.ttf",
+        "NotoSansOriyaUI-Bold.ttf",
+        "NotoSansOriyaUI-Regular.ttf",
+        "NotoSansOsage-Regular.ttf",
+        "NotoSansOsmanya-Regular.ttf",
+        "NotoSansPahawhHmong-Regular.otf",
+        "NotoSansPalmyrene-Regular.otf",
+        "NotoSansPauCinHau-Regular.otf",
+        "NotoSansPhagsPa-Regular.ttf",
+        "NotoSansPhoenician-Regular.ttf",
+        "NotoSansRejang-Regular.ttf",
+        "NotoSansRunic-Regular.ttf",
+        "NotoSansSamaritan-Regular.ttf",
+        "NotoSansSaurashtra-Regular.ttf",
+        "NotoSansSharada-Regular.otf",
+        "NotoSansShavian-Regular.ttf",
+        "NotoSansSinhala-VF.ttf",
+        "NotoSansSinhalaUI-VF.ttf",
+        "NotoSansSoraSompeng-Regular.otf",
+        "NotoSansSoyombo-VF.ttf",
+        "NotoSansSundanese-Regular.ttf",
+        "NotoSansSylotiNagri-Regular.ttf",
+        "NotoSansSymbols-Regular-Subsetted.ttf",
+        "NotoSansSymbols-Regular-Subsetted2.ttf",
+        "NotoSansSyriacEastern-Regular.ttf",
+        "NotoSansSyriacEstrangela-Regular.ttf",
+        "NotoSansSyriacWestern-Regular.ttf",
+        "NotoSansTagalog-Regular.ttf",
+        "NotoSansTagbanwa-Regular.ttf",
+        "NotoSansTaiLe-Regular.ttf",
+        "NotoSansTaiTham-Regular.ttf",
+        "NotoSansTaiViet-Regular.ttf",
+        "NotoSansTakri-VF.ttf",
+        "NotoSansTamil-VF.ttf",
+        "NotoSansTamilUI-VF.ttf",
+        "NotoSansTelugu-VF.ttf",
+        "NotoSansTeluguUI-VF.ttf",
+        "NotoSansThaana-Bold.ttf",
+        "NotoSansThaana-Regular.ttf",
+        "NotoSansThai-Bold.ttf",
+        "NotoSansThai-Regular.ttf",
+        "NotoSansThaiUI-Bold.ttf",
+        "NotoSansThaiUI-Regular.ttf",
+        "NotoSansTifinagh-Regular.otf",
+        "NotoSansUgaritic-Regular.ttf",
+        "NotoSansVai-Regular.ttf",
+        "NotoSansWancho-Regular.otf",
+        "NotoSansWarangCiti-Regular.otf",
+        "NotoSansYi-Regular.ttf",
+        "NotoSerif-Bold.ttf",
+        "NotoSerif-BoldItalic.ttf",
+        "NotoSerif-Italic.ttf",
+        "NotoSerif-Regular.ttf",
+        "NotoSerifArmenian-VF.ttf",
+        "NotoSerifBengali-VF.ttf",
+        "NotoSerifCJK-Regular.ttc",
+        "NotoSerifDevanagari-VF.ttf",
+        "NotoSerifDogra-Regular.ttf",
+        "NotoSerifEthiopic-VF.ttf",
+        "NotoSerifGeorgian-VF.ttf",
+        "NotoSerifGujarati-VF.ttf",
+        "NotoSerifGurmukhi-VF.ttf",
+        "NotoSerifHebrew-Bold.ttf",
+        "NotoSerifHebrew-Regular.ttf",
+        "NotoSerifHentaigana.ttf",
+        "NotoSerifKannada-VF.ttf",
+        "NotoSerifKhmer-Bold.otf",
+        "NotoSerifKhmer-Regular.otf",
+        "NotoSerifLao-Bold.ttf",
+        "NotoSerifLao-Regular.ttf",
+        "NotoSerifMalayalam-VF.ttf",
+        "NotoSerifMyanmar-Bold.otf",
+        "NotoSerifMyanmar-Regular.otf",
+        "NotoSerifNyiakengPuachueHmong-VF.ttf",
+        "NotoSerifSinhala-VF.ttf",
+        "NotoSerifTamil-VF.ttf",
+        "NotoSerifTelugu-VF.ttf",
+        "NotoSerifThai-Bold.ttf",
+        "NotoSerifThai-Regular.ttf",
+        "NotoSerifTibetan-VF.ttf",
+        "NotoSerifYezidi-VF.ttf",
+        "Roboto-Regular.ttf",
+        "RobotoFlex-Regular.ttf",
+        "RobotoStatic-Regular.ttf",
+        "SourceSansPro-Bold.ttf",
+        "SourceSansPro-BoldItalic.ttf",
+        "SourceSansPro-Italic.ttf",
+        "SourceSansPro-Regular.ttf",
+        "SourceSansPro-SemiBold.ttf",
+        "SourceSansPro-SemiBoldItalic.ttf",
+        "font_fallback.xml",
+        "fonts.xml",
+    ],
+}
+
+android_filesystem_defaults {
+    name: "system_image_defaults",
+    partition_name: "system",
+    base_dir: "system",
+    dirs: generic_rootdirs,
+    symlinks: generic_symlinks,
+    file_contexts: ":plat_file_contexts",
+    linker_config: {
+        gen_linker_config: true,
+        linker_config_srcs: [":system_linker_config_json_file"],
+    },
+    fsverity: {
+        inputs: [
+            "etc/boot-image.prof",
+            "etc/classpaths/*.pb",
+            "etc/dirty-image-objects",
+            "etc/preloaded-classes",
+            "framework/*",
+            "framework/*/*", // framework/{arch}
+            "framework/oat/*/*", // framework/oat/{arch}
+        ],
+        libs: [":framework-res{.export-package.apk}"],
+    },
+    build_logtags: true,
+    gen_aconfig_flags_pb: true,
+
+    compile_multilib: "both",
+
+    use_avb: true,
+    avb_private_key: ":generic_system_sign_key",
+    avb_algorithm: "SHA256_RSA4096",
+    avb_hash_algorithm: "sha256",
+
+    deps: [
+        "abx",
+        "aconfigd",
+        "aconfigd-system",
+        "aflags",
+        "am",
+        "android.software.credentials.prebuilt.xml", // generic_system
+        "android.software.webview.prebuilt.xml", // media_system
+        "android.software.window_magnification.prebuilt.xml", // handheld_system
+        "android.system.suspend-service",
+        "apexd",
+        "appops",
+        "approved-ogki-builds.xml", // base_system
+        "appwidget",
+        "atrace",
+        "audioserver",
+        "bcc",
+        "blank_screen",
+        "blkid",
+        "bmgr",
+        "bootanimation",
+        "bootstat",
+        "bpfloader",
+        "bu",
+        "bugreport",
+        "bugreportz",
+        "cameraserver",
+        "cgroups.json",
+        "cmd",
+        "content",
+        "cppreopts.sh", // generic_system
+        "credstore",
+        "debuggerd",
+        "device_config",
+        "dirty-image-objects",
+        "dmctl",
+        "dmesgd",
+        "dnsmasq",
+        "dpm",
+        "dump.erofs",
+        "dumpstate",
+        "dumpsys",
+        "e2fsck",
+        "enhanced-confirmation.xml", // base_system
+        "etc_hosts",
+        "flags_health_check",
+        "framework-audio_effects.xml", // for handheld // handheld_system
+        "framework-sysconfig.xml",
+        "fs_config_dirs_system",
+        "fs_config_files_system",
+        "fsck.erofs",
+        "fsck.f2fs", // for media_system
+        "fsck_msdos",
+        "fsverity-release-cert-der",
+        "gatekeeperd",
+        "gpu_counter_producer",
+        "gpuservice",
+        "group_system",
+        "gsi_tool",
+        "gsid",
+        "heapprofd",
+        "hid",
+        "hiddenapi-package-whitelist.xml", // from runtime_libart
+        "idc_data",
+        "idmap2",
+        "idmap2d",
+        "ime",
+        "incident",
+        "incident-helper-cmd",
+        "incident_helper",
+        "incidentd",
+        "init.environ.rc-soong",
+        "init.usb.configfs.rc",
+        "init.usb.rc",
+        "init.zygote32.rc",
+        "init.zygote64.rc",
+        "init.zygote64_32.rc",
+        "initial-package-stopped-states.xml",
+        "input",
+        "installd",
+        "ip", // base_system
+        "iptables",
+        "kcmdlinectrl",
+        "kernel-lifetimes.xml", // base_system
+        "keychars_data",
+        "keylayout_data",
+        "keystore2",
+        "ld.mc",
+        "llkd", // base_system
+        "lmkd", // base_system
+        "locksettings", // base_system
+        "logcat", // base_system
+        "logd", // base_system
+        "logpersist.start",
+        "lpdump", // base_system
+        "lshal", // base_system
+        "make_f2fs", // media_system
+        "mdnsd", // base_system
+        "media_profiles_V1_0.dtd", // base_system
+        "mediacodec.policy", // base_system
+        "mediaextractor", // base_system
+        "mediametrics", // base_system
+        "misctrl", // from base_system
+        "mke2fs", // base_system
+        "mkfs.erofs", // base_system
+        "monkey", // base_system
+        "mtectrl", // base_system
+        "ndc", // base_system
+        "netd", // base_system
+        "netutils-wrapper-1.0", // full_base
+        "notice_xml_system",
+        "odsign", // base_system
+        "otapreopt_script", // generic_system
+        "package-shareduid-allowlist.xml", // base_system
+        "passwd_system", // base_system
+        "perfetto", // base_system
+        "ping", // base_system
+        "ping6", // base_system
+        "pintool", // base_system
+        "platform.xml", // base_system
+        "pm", // base_system
+        "preinstalled-packages-asl-files.xml", // base_system
+        "preinstalled-packages-platform-generic-system.xml", // generic_system
+        "preinstalled-packages-platform-handheld-system.xml", // handheld_system
+        "preinstalled-packages-platform.xml", // base_system
+        "preinstalled-packages-strict-signature.xml", // base_system
+        "preloaded-classes", // ok
+        "printflags", // base_system
+        "privapp-permissions-platform.xml", // base_system
+        "prng_seeder", // base_system
+        "public.libraries.android.txt",
+        "recovery-persist", // base_system
+        "recovery-refresh", // generic_system
+        "requestsync", // media_system
+        "resize2fs", // base_system
+        "rss_hwm_reset", // base_system
+        "run-as", // base_system
+        "schedtest", // base_system
+        "screencap", // base_system
+        "screenrecord", // handheld_system
+        "sdcard", // base_system
+        "secdiscard", // base_system
+        "sensorservice", // base_system
+        "service", // base_system
+        "servicemanager", // base_system
+        "settings", // base_system
+        "sfdo", // base_system
+        "sgdisk", // base_system
+        "sm", // base_system
+        "snapshotctl", // base_system
+        "snapuserd", // base_system
+        "storaged", // base_system
+        "surfaceflinger", // base_system
+        "svc", // base_system
+        "system_manifest.xml", // base_system
+        "task_profiles.json", // base_system
+        "tc", // base_system
+        "telecom", // base_system
+        "tombstoned", // base_system
+        "traced", // base_system
+        "traced_probes", // base_system
+        "tradeinmode", // base_system
+        "tune2fs", // base_system
+        "uiautomator", // base_system
+        "uinput", // base_system
+        "uncrypt", // base_system
+        "update_engine", // generic_system
+        "update_engine_sideload", // recovery
+        "update_verifier", // generic_system
+        "usbd", // base_system
+        "vdc", // base_system
+        "virtual_camera", // handheld_system // release_package_virtual_camera
+        "vold", // base_system
+        "vr", // handheld_system
+        "watchdogd", // base_system
+        "wifi.rc", // base_system
+        "wificond", // base_system
+        "wm", // base_system
+    ] + select(release_flag("RELEASE_PLATFORM_VERSION_CODENAME"), {
+        "REL": [],
+        default: [
+            "android.software.preview_sdk.prebuilt.xml", // media_system
+        ],
+    }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), {
+        "true": [
+            "trace_redactor", // base_system (RELEASE_PACKAGE_PROFILING_MODULE)
+        ],
+        default: [],
+    }) + select(release_flag("RELEASE_MEMORY_MANAGEMENT_DAEMON"), {
+        true: [
+            "mm_daemon", // base_system (RELEASE_MEMORY_MANAGEMENT_DAEMON)
+        ],
+        default: [],
+    }) + select(product_variable("debuggable"), {
+        true: [
+            "adevice_fingerprint",
+            "arping",
+            "avbctl",
+            "bootctl",
+            "dmuserd",
+            "evemu-record",
+            "idlcli",
+            "init-debug.rc",
+            "iotop",
+            "iperf3",
+            "iw",
+            "layertracegenerator",
+            "logtagd.rc",
+            "ot-cli-ftd",
+            "ot-ctl",
+            "procrank",
+            "profcollectctl",
+            "profcollectd",
+            "record_binder",
+            "sanitizer-status",
+            "servicedispatcher",
+            "showmap",
+            "sqlite3",
+            "ss",
+            "start_with_lockagent",
+            "strace",
+            "su",
+            "tinycap",
+            "tinyhostless",
+            "tinymix",
+            "tinypcminfo",
+            "tinyplay", // host
+            "tracepath",
+            "tracepath6",
+            "traceroute6",
+            "unwind_info",
+            "unwind_reg_info",
+            "unwind_symbols",
+            "update_engine_client",
+        ],
+        default: [],
+    }),
+    multilib: {
+        common: {
+            deps: [
+                "BackupRestoreConfirmation", // base_system
+                "BasicDreams", // handheld_system
+                "BlockedNumberProvider", // handheld_system
+                "BluetoothMidiService", // handheld_system
+                "BookmarkProvider", // handheld_system
+                "BuiltInPrintService", // handheld_system
+                "CalendarProvider", // handheld_system
+                "CallLogBackup", // telephony_system
+                "CameraExtensionsProxy", // handheld_system
+                "CaptivePortalLogin", // handheld_system
+                "CarrierDefaultApp", // telephony_system
+                "CellBroadcastLegacyApp", // telephony_system
+                "CertInstaller", // handheld_system
+                "CompanionDeviceManager", // media_system
+                "ContactsProvider", // base_system
+                "CredentialManager", // handheld_system
+                "DeviceAsWebcam", // handheld_system
+                "DeviceDiagnostics", // handheld_system - internal
+                "DocumentsUI", // handheld_system
+                "DownloadProvider", // base_system
+                "DownloadProviderUi", // handheld_system
+                "DynamicSystemInstallationService", // base_system
+                "E2eeContactKeysProvider", // base_system
+                "EasterEgg", // handheld_system
+                "ExtShared", // base_system
+                "ExternalStorageProvider", // handheld_system
+                "FusedLocation", // handheld_system
+                "HTMLViewer", // media_system
+                "InputDevices", // handheld_system
+                "IntentResolver", // base_system
+                "KeyChain", // handheld_system
+                "LiveWallpapersPicker", // generic_system, full_base
+                "LocalTransport", // base_system
+                "ManagedProvisioning", // handheld_system
+                "MediaProviderLegacy", // base_system
+                "MmsService", // handheld_system
+                "MtpService", // handheld_system
+                "MusicFX", // handheld_system
+                "NetworkStack", // base_system
+                "ONS", // telephony_system
+                "PacProcessor", // handheld_system
+                "PackageInstaller", // base_system
+                "PartnerBookmarksProvider", // generic_system
+                "PrintRecommendationService", // handheld_system
+                "PrintSpooler", // handheld_system
+                "ProxyHandler", // handheld_system
+                "SecureElement", // handheld_system
+                "SettingsProvider", // base_system
+                "SharedStorageBackup", // handheld_system
+                "Shell", // base_system
+                "SimAppDialog", // handheld_system
+                "SoundPicker", // not installed by anyone
+                "StatementService", // media_system
+                "Stk", // generic_system
+                "Tag", // generic_system
+                "TeleService", // handheld_system
+                "Telecom", // handheld_system
+                "TelephonyProvider", // handheld_system
+                "Traceur", // handheld_system
+                "UserDictionaryProvider", // handheld_system
+                "VpnDialogs", // handheld_system
+                "WallpaperBackup", // base_system
+                "adbd_system_api", // base_system
+                "android.hidl.base-V1.0-java", // base_system
+                "android.hidl.manager-V1.0-java", // base_system
+                "android.test.base", // from runtime_libart
+                "android.test.mock", // base_system
+                "android.test.runner", // base_system
+                "aosp_mainline_modules", // ok
+                "build_flag_system", // base_system
+                "charger_res_images", // generic_system
+                "com.android.apex.cts.shim.v1_prebuilt", // ok
+                "com.android.cellbroadcast", // telephony_system
+                "com.android.future.usb.accessory", // media_system
+                "com.android.location.provider", // base_system
+                "com.android.media.remotedisplay", // media_system
+                "com.android.media.remotedisplay.xml", // media_system
+                "com.android.mediadrm.signer", // media_system
+                "com.android.nfc_extras", // ok
+                "com.android.nfcservices", // base_system (RELEASE_PACKAGE_NFC_STACK != NfcNci)
+                "com.android.runtime", // ok
+                "dex_bootjars",
+                "ext", // from runtime_libart
+                "framework-graphics", // base_system
+                "framework-location", // base_system
+                "framework-minus-apex-install-dependencies", // base_system
+                "framework-connectivity-b", // base_system
+                "framework_compatibility_matrix.device.xml",
+                "generic_system_fonts", // ok
+                "hwservicemanager_compat_symlink_module", // base_system
+                "hyph-data",
+                "ims-common", // base_system
+                "init_system", // base_system
+                "javax.obex", // base_system
+                "llndk.libraries.txt", //ok
+                "org.apache.http.legacy", // base_system
+                "perfetto-extras", // system
+                "sanitizer.libraries.txt", // base_system
+                "selinux_policy_system_soong", // ok
+                "services", // base_system
+                "shell_and_utilities_system", // ok
+                "system-build.prop",
+                "system_compatibility_matrix.xml", //base_system
+                "telephony-common", // libs from TeleService
+                "voip-common", // base_system
+            ] + select(soong_config_variable("ANDROID", "release_crashrecovery_module"), {
+                "true": [
+                    "com.android.crashrecovery", // base_system (RELEASE_CRASHRECOVERY_MODULE)
+                ],
+                default: [],
+            }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), {
+                "true": [
+                    "com.android.profiling", // base_system (RELEASE_PACKAGE_PROFILING_MODULE)
+                ],
+                default: [],
+            }) + select(release_flag("RELEASE_AVATAR_PICKER_APP"), {
+                true: [
+                    "AvatarPicker", // generic_system (RELEASE_AVATAR_PICKER_APP)
+                ],
+                default: [],
+            }),
+        },
+        prefer32: {
+            deps: [
+                "drmserver", // media_system
+                "mediaserver", // base_system
+            ],
+        },
+        lib64: {
+            deps: [
+                "android.system.virtualizationcommon-ndk",
+                "android.system.virtualizationservice-ndk",
+                "libgsi",
+                "servicemanager",
+            ],
+        },
+        both: {
+            deps: [
+                "android.hardware.biometrics.fingerprint@2.1", // generic_system
+                "android.hardware.radio.config@1.0", // generic_system
+                "android.hardware.radio.deprecated@1.0", // generic_system
+                "android.hardware.radio@1.0", // generic_system
+                "android.hardware.radio@1.1", // generic_system
+                "android.hardware.radio@1.2", // generic_system
+                "android.hardware.radio@1.3", // generic_system
+                "android.hardware.radio@1.4", // generic_system
+                "android.hardware.secure_element@1.0", // generic_system
+                "app_process", // base_system
+                "boringssl_self_test", // base_system
+                "heapprofd_client", // base_system
+                "libEGL", // base_system
+                "libEGL_angle", // base_system
+                "libETC1", // base_system
+                "libFFTEm", // base_system
+                "libGLESv1_CM", // base_system
+                "libGLESv1_CM_angle", // base_system
+                "libGLESv2", // base_system
+                "libGLESv2_angle", // base_system
+                "libGLESv3", // base_system
+                "libOpenMAXAL", // base_system
+                "libOpenSLES", // base_system
+                "libaaudio", // base_system
+                "libalarm_jni", // base_system
+                "libamidi", // base_system
+                "libandroid",
+                "libandroid_runtime",
+                "libandroid_servers",
+                "libandroidfw",
+                "libartpalette-system",
+                "libaudio-resampler", // generic-system
+                "libaudioeffect_jni",
+                "libaudiohal", // generic-system
+                "libaudiopolicyengineconfigurable", // generic-system
+                "libbinder",
+                "libbinder_ndk",
+                "libbinder_rpc_unstable",
+                "libcamera2ndk",
+                "libcgrouprc", // llndk library
+                "libclang_rt.asan",
+                "libcompiler_rt",
+                "libcutils", // used by many libs
+                "libdmabufheap", // used by many libs
+                "libdrm", // used by many libs // generic_system
+                "libdrmframework", // base_system
+                "libdrmframework_jni", // base_system
+                "libfdtrack", // base_system
+                "libfilterfw", // base_system
+                "libfilterpack_imageproc", // media_system
+                "libfwdlockengine", // generic_system
+                "libgatekeeper", // base_system
+                "libgui", // base_system
+                "libhardware", // base_system
+                "libhardware_legacy", // base_system
+                "libhidltransport", // generic_system
+                "libhwbinder", // generic_system
+                "libinput", // base_system
+                "libinputflinger", // base_system
+                "libiprouteutil", // base_system
+                "libjnigraphics", // base_system
+                "libjpeg", // base_system
+                "liblog", // base_system
+                "liblogwrap", // generic_system
+                "liblz4", // generic_system
+                "libmedia", // base_system
+                "libmedia_jni", // base_system
+                "libmediandk", // base_system
+                "libminui", // generic_system
+                "libmtp", // base_system
+                "libnetd_client", // base_system
+                "libnetlink", // base_system
+                "libnetutils", // base_system
+                "libneuralnetworks_packageinfo", // base_system
+                "libnl", // generic_system
+                "libpdfium", // base_system
+                "libpolicy-subsystem", // generic_system
+                "libpower", // base_system
+                "libpowermanager", // base_system
+                "libprotobuf-cpp-full", // generic_system
+                "libradio_metadata", // base_system
+                "librs_jni", // handheld_system
+                "librtp_jni", // base_system
+                "libsensorservice", // base_system
+                "libsfplugin_ccodec", // base_system
+                "libskia", // base_system
+                "libsonic", // base_system
+                "libsonivox", // base_system
+                "libsoundpool", // base_system
+                "libspeexresampler", // base_system
+                "libsqlite", // base_system
+                "libstagefright", // base_system
+                "libstagefright_foundation", // base_system
+                "libstagefright_omx", // base_system
+                "libstdc++", // base_system
+                "libsysutils", // base_system
+                "libui", // base_system
+                "libusbhost", // base_system
+                "libutils", // base_system
+                "libvendorsupport", // llndk library
+                "libvintf_jni", // base_system
+                "libvulkan", // base_system
+                "libwebviewchromium_loader", // media_system
+                "libwebviewchromium_plat_support", // media_system
+                "libwilhelm", // base_system
+                "linker", // base_system
+            ] + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_DRMSERVER"), {
+                "true": ["drmserver"],
+                default: [],
+            }) + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_MEDIASERVER"), {
+                "true": ["mediaserver"],
+                default: [],
+            }),
+        },
+    },
+}
+
+android_system_image {
+    name: "aosp_shared_system_image",
+    defaults: ["system_image_defaults"],
+    dirs: android_rootdirs,
+    symlinks: android_symlinks,
+    type: "erofs",
+    erofs: {
+        compressor: "lz4hc,9",
+        compress_hints: "erofs_compress_hints.txt",
+    },
+}
diff --git a/target/product/generic/OWNERS b/target/product/generic/OWNERS
new file mode 100644
index 0000000..6d1446f
--- /dev/null
+++ b/target/product/generic/OWNERS
@@ -0,0 +1,6 @@
+# Bug component: 1322713
+inseob@google.com
+jeongik@google.com
+jiyong@google.com
+justinyun@google.com
+kiyoungkim@google.com
diff --git a/target/product/generic/erofs_compress_hints.txt b/target/product/generic/erofs_compress_hints.txt
new file mode 100644
index 0000000..8b2a711
--- /dev/null
+++ b/target/product/generic/erofs_compress_hints.txt
@@ -0,0 +1 @@
+0 .*\.apex$
\ No newline at end of file
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index ebac62f..5ecb55f 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -23,6 +23,7 @@
 PRODUCT_PACKAGES += \
     init_first_stage \
     snapuserd_ramdisk \
+    ramdisk-build.prop \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
@@ -35,8 +36,6 @@
 _my_paths := \
     $(TARGET_COPY_OUT_RAMDISK)/ \
     $(TARGET_COPY_OUT_DEBUG_RAMDISK)/ \
-    system/usr/share/zoneinfo/tz_version \
-    system/usr/share/zoneinfo/tzdata \
     $(TARGET_COPY_OUT_RECOVERY)/root/first_stage_ramdisk/system \
 
 
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 0a09eb1..b9a623d 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -152,4 +152,5 @@
 $(call require-artifacts-in-path, $(_my_paths), $(_my_allowed_list))
 
 # Product config map to toggle between sources and prebuilts of required mainline modules
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline/required/release_config_map.textproto)
 PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline/required/release_config_map.textproto)
diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk
index c928530..ccc4f36 100644
--- a/target/product/go_defaults.mk
+++ b/target/product/go_defaults.mk
@@ -18,6 +18,7 @@
 $(call inherit-product, build/make/target/product/go_defaults_common.mk)
 
 # Product config map to toggle between sources and prebuilts of required mainline modules
+PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline_go/required/release_config_map.textproto)
 PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline_go/required/release_config_map.textproto)
 
 # Add the system properties.
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index 5218f29..0fcf16b 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -24,11 +24,6 @@
 # Speed profile services and wifi-service to reduce RAM and storage.
 PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile
 
-# Use a profile based boot image for this device. Note that this is currently a
-# generic profile and not Android Go optimized.
-PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt
-
 # Do not generate libartd.
 PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false
 
@@ -37,9 +32,9 @@
 # leave less information available via JDWP.
 PRODUCT_MINIMIZE_JAVA_DEBUG_INFO := true
 
-# Disable Scudo outside of eng builds to save RAM.
+# Use the low memory allocator outside of eng builds to save RSS.
 ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
-  PRODUCT_DISABLE_SCUDO := true
+  MALLOC_LOW_MEMORY := true
 endif
 
 # Add the system properties.
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
index 45ba143..9e8946d 100644
--- a/target/product/gsi/Android.bp
+++ b/target/product/gsi/Android.bp
@@ -46,3 +46,164 @@
     installed_location: "etc/init/config",
     symlink_target: "/system/system_ext/etc/init/config",
 }
+
+// init.gsi.rc, GSI-specific init script.
+prebuilt_etc {
+    name: "init.gsi.rc",
+    src: "init.gsi.rc",
+    system_ext_specific: true,
+    relative_install_path: "init",
+}
+
+prebuilt_etc {
+    name: "init.vndk-nodef.rc",
+    src: "init.vndk-nodef.rc",
+    system_ext_specific: true,
+    relative_install_path: "gsi",
+}
+
+gsi_symlinks = [
+    {
+        target: "/system/system_ext",
+        name: "system_ext",
+    },
+    {
+        target: "/system/product",
+        name: "product",
+    },
+    {
+        target: "/odm/odm_dlkm/etc",
+        name: "odm_dlkm/etc",
+    },
+    {
+        target: "/vendor/vendor_dlkm/etc",
+        name: "vendor_dlkm/etc",
+    },
+]
+
+android_system_image {
+    name: "android_gsi",
+    defaults: ["system_image_defaults"],
+    symlinks: gsi_symlinks,
+    dirs: ["cache"],
+    deps: [
+        ///////////////////////////////////////////
+        // gsi_system_ext
+        ///////////////////////////////////////////
+
+        // handheld packages
+        "Launcher3QuickStep",
+        "Provision",
+        "Settings",
+        "StorageManager",
+        "SystemUI",
+
+        // telephony packages
+        "CarrierConfig",
+
+        // Install a copy of the debug policy to the system_ext partition, and allow
+        // init-second-stage to load debug policy from system_ext.
+        // This option is only meant to be set by compliance GSI targets.
+        "system_ext_userdebug_plat_sepolicy.cil",
+
+        ///////////////////////////////////////////
+        // base_system_ext
+        ///////////////////////////////////////////
+        "build_flag_system_ext",
+        "fs_config_dirs_system_ext",
+        "fs_config_files_system_ext",
+        "group_system_ext",
+        "passwd_system_ext",
+        "SatelliteClient",
+        "selinux_policy_system_ext",
+        "system_ext_manifest.xml",
+        "system_ext-build.prop",
+        // Base modules when shipping api level is less than or equal to 34
+        "hwservicemanager",
+        "android.hidl.allocator@1.0-service",
+
+        ///////////////////////////////////////////
+        // window_extensions_base
+        ///////////////////////////////////////////
+        "androidx.window.extensions",
+        "androidx.window.sidecar",
+
+        ///////////////////////////////////////////
+        // gsi_release
+        ///////////////////////////////////////////
+        "gsi_skip_mount.cfg",
+        "init.gsi.rc",
+        "init.vndk-nodef.rc",
+        // Overlay the GSI specific setting for framework and SystemUI
+        "gsi_overlay_framework",
+        "gsi_overlay_systemui",
+
+        ///////////////////////////////////////////
+        // VNDK
+        ///////////////////////////////////////////
+        "com.android.vndk.v30",
+        "com.android.vndk.v31",
+        "com.android.vndk.v32",
+        "com.android.vndk.v33",
+        "com.android.vndk.v34",
+
+        ///////////////////////////////////////////
+        // AVF
+        ///////////////////////////////////////////
+        "com.android.compos",
+        "features_com.android.virt.xml",
+
+        ///////////////////////////////////////////
+        // gsi_product
+        ///////////////////////////////////////////
+        "Browser2",
+        "Camera2",
+        "Dialer",
+        "LatinIME",
+        "apns-full-conf.xml",
+
+        ///////////////////////////////////////////
+        // media_product
+        ///////////////////////////////////////////
+        "webview",
+
+        ///////////////////////////////////////////
+        // base_product
+        ///////////////////////////////////////////
+
+        // Base modules and settings for the product partition.
+        "build_flag_product",
+        "fs_config_dirs_product",
+        "fs_config_files_product",
+        "group_product",
+        "ModuleMetadata",
+        "passwd_product",
+        "product_compatibility_matrix.xml",
+        "product_manifest.xml",
+        "selinux_policy_product",
+        "product-build.prop",
+
+        // AUDIO
+        "frameworks_sounds",
+
+        ///////////////////////////////////////////
+        // base_system
+        ///////////////////////////////////////////
+        "charger",
+    ] + select(product_variable("debuggable"), {
+        // Packages included only for eng or userdebug builds, previously debug tagged
+        true: ["adb_keys"],
+        default: [],
+    }),
+    multilib: {
+        both: {
+            // PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34
+            deps: ["android.hidl.memory@1.0-impl"],
+        },
+    },
+    enabled: select(soong_config_variable("ANDROID", "PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT"), {
+        "true": true,
+        default: false,
+    }),
+    type: "ext4",
+}
diff --git a/target/product/large_screen_common.mk b/target/product/large_screen_common.mk
new file mode 100644
index 0000000..3eb9ff0
--- /dev/null
+++ b/target/product/large_screen_common.mk
@@ -0,0 +1,21 @@
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
+
+# Enable Settings 2-pane optimization for large-screen
+PRODUCT_SYSTEM_PROPERTIES += \
+    persist.settings.large_screen_opt.enabled=true
diff --git a/target/product/media_system_ext.mk b/target/product/media_system_ext.mk
index 30dd2e2..e79a7eb 100644
--- a/target/product/media_system_ext.mk
+++ b/target/product/media_system_ext.mk
@@ -20,9 +20,5 @@
 # base_system_ext.mk.
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base_system_ext.mk)
 
-# /system_ext packages
-PRODUCT_PACKAGES += \
-    vndk_apex_snapshot_package \
-
 # Window Extensions
 $(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions_base.mk)
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 58234a8..9e8afa8 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -181,3 +181,5 @@
 
 # Copy preopted files from system_b on first boot.
 PRODUCT_SYSTEM_PROPERTIES += ro.cp_system_other_odex=1
+PRODUCT_PACKAGES += \
+  cppreopts.sh
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 0d7b35e..ffbec06 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -37,3 +37,8 @@
     relative_install_path: "security",
     filename: "otacerts.zip",
 }
+
+adb_keys {
+    name: "adb_keys",
+    product_specific: true,
+}
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
deleted file mode 100644
index 91b272c..0000000
--- a/target/product/security/Android.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-#######################################
-# adb key, if configured via PRODUCT_ADB_KEYS
-ifdef PRODUCT_ADB_KEYS
-  ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
-    include $(CLEAR_VARS)
-    LOCAL_MODULE := adb_keys
-    LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-    LOCAL_LICENSE_CONDITIONS := notice
-    LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-    LOCAL_MODULE_CLASS := ETC
-    LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
-    LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
-    include $(BUILD_PREBUILT)
-  endif
-endif
diff --git a/target/product/userspace_reboot.mk b/target/product/userspace_reboot.mk
index f235d14..51feb07 100644
--- a/target/product/userspace_reboot.mk
+++ b/target/product/userspace_reboot.mk
@@ -14,6 +14,4 @@
 # limitations under the License.
 #
 
-# Inherit this when the target supports userspace reboot
-
-PRODUCT_VENDOR_PROPERTIES := init.userspace_reboot.is_supported=true
+# DEPRECATED! Do not inherit this.
diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk
index dc1ee3e..e77c36f 100644
--- a/target/product/virtual_ab_ota/compression.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -18,9 +18,12 @@
 
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
 
+# Optional assignment. On low memory devices, disabling io_uring can relieve cpu and memory
+# pressure during an OTA.
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled?=true
+
 # Enabling this property, will improve OTA install time
 # but will use an additional CPU core
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true
diff --git a/target/product/virtual_ab_ota/vabc_features.mk b/target/product/virtual_ab_ota/vabc_features.mk
index e2745a1..d092699 100644
--- a/target/product/virtual_ab_ota/vabc_features.mk
+++ b/target/product/virtual_ab_ota/vabc_features.mk
@@ -31,14 +31,15 @@
 
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
+
+# Optional assignments, low memory devices may benefit from overriding these.
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled?=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled?=true
+
 # Low memory device configurations. If memory usage and cpu utilization is
 # a bottleneck during OTA, the below configurations can be added to a
-# device's .mk file improve performance for low mem devices. Disabling
-# ro.virtual_ab.compression.xor.enabled and ro.virtual_ab.io_uring.enabled
-# is also recommended
+# device's .mk file improve performance for low mem devices.
 #
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.read_ahead_size=16
 # PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.o_direct.enabled=true
diff --git a/teams/Android.bp b/teams/Android.bp
index a9699d2..6307f5c 100644
--- a/teams/Android.bp
+++ b/teams/Android.bp
@@ -13,6 +13,9 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+// DON'T ADD NEW RULES HERE. For more details refer to
+// go/new-android-ownership-model
+
 package {
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
@@ -67,13 +70,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_cloud_platform",
-
-    // go/trendy/manage/engineers/5917762526281728
-    trendy_team_id: "5917762526281728",
-}
-
-team {
     name: "trendy_team_pixel_system_software",
 
     // go/trendy/manage/engineers/4856005120622592
@@ -515,13 +511,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_notifications_alerts_attention_management",
-
-    // go/trendy/manage/engineers/6267643681996800
-    trendy_team_id: "6267643681996800",
-}
-
-team {
     name: "trendy_team_fwk_nfc",
 
     // go/trendy/manage/engineers/5962312512864256
@@ -529,13 +518,6 @@
 }
 
 team {
-    name: "trendy_team_wear_personalization_developer_surfaces",
-
-    // go/trendy/manage/engineers/4819890988810240
-    trendy_team_id: "4819890988810240",
-}
-
-team {
     name: "trendy_team_srajkumar_team",
 
     // go/trendy/manage/engineers/5170053894012928
@@ -690,13 +672,6 @@
 }
 
 team {
-    name: "trendy_team_test_eng_android_wear",
-
-    // go/trendy/manage/engineers/4979150422933504
-    trendy_team_id: "4979150422933504",
-}
-
-team {
     name: "trendy_team_mesch_team",
 
     // go/trendy/manage/engineers/5205465899368448
@@ -718,13 +693,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_developer_devx",
-
-    // go/trendy/manage/engineers/4894890764697600
-    trendy_team_id: "4894890764697600",
-}
-
-team {
     name: "trendy_team_android_rust",
 
     // go/trendy/manage/engineers/4844600586305536
@@ -928,13 +896,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wallet_on_wear",
-
-    // go/trendy/manage/engineers/5724960437731328
-    trendy_team_id: "5724960437731328",
-}
-
-team {
     name: "trendy_team_glanceables",
 
     // go/trendy/manage/engineers/4658222004600832
@@ -1068,13 +1029,6 @@
 }
 
 team {
-    name: "trendy_team_wear_3xp",
-
-    // go/trendy/manage/engineers/5692317612539904
-    trendy_team_id: "5692317612539904",
-}
-
-team {
     name: "trendy_team_clockwork",
 
     // go/trendy/manage/engineers/4908781678755840
@@ -1208,13 +1162,6 @@
 }
 
 team {
-    name: "trendy_team_wear_software_nti",
-
-    // go/trendy/manage/engineers/5164973558759424
-    trendy_team_id: "5164973558759424",
-}
-
-team {
     name: "trendy_team_machine_learning",
 
     // go/trendy/manage/engineers/5276568318246912
@@ -1306,13 +1253,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_power_emulator",
-
-    // go/trendy/manage/engineers/5160338936725504
-    trendy_team_id: "5160338936725504",
-}
-
-team {
     name: "trendy_team_deprecated_framework_svetoslavganov",
 
     // go/trendy/manage/engineers/6404117492531200
@@ -1327,13 +1267,6 @@
 }
 
 team {
-    name: "trendy_team_wear_opus",
-
-    // go/trendy/manage/engineers/5098351636676608
-    trendy_team_id: "5098351636676608",
-}
-
-team {
     name: "trendy_team_text_to_speech",
 
     // go/trendy/manage/engineers/6368933120442368
@@ -1439,13 +1372,6 @@
 }
 
 team {
-    name: "trendy_team_wear_developer_foundation",
-
-    // go/trendy/manage/engineers/5239127108648960
-    trendy_team_id: "5239127108648960",
-}
-
-team {
     name: "trendy_team_tpm_tvc",
 
     // go/trendy/manage/engineers/5390683333230592
@@ -1453,13 +1379,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_ux",
-
-    // go/trendy/manage/engineers/5782097411080192
-    trendy_team_id: "5782097411080192",
-}
-
-team {
     name: "trendy_team_lse_desktop_os_experience",
 
     // go/trendy/manage/engineers/5125234900434944
@@ -1670,13 +1589,6 @@
 }
 
 team {
-    name: "trendy_team_wear_wear_assistant",
-
-    // go/trendy/manage/engineers/5848075306172416
-    trendy_team_id: "5848075306172416",
-}
-
-team {
     name: "trendy_team_android_power_and_comms_infra",
 
     // go/trendy/manage/engineers/5325547653332992
@@ -4414,8 +4326,74 @@
 }
 
 team {
+    name: "trendy_team_android_media_solutions_playback",
+
+    // go/trendy/manage/engineers/6742515252559872
+    trendy_team_id: "6742515252559872",
+}
+
+team {
     name: "trendy_team_android_telemetry_client_infra",
 
     // go/trendy/manage/engineers/5403245077430272
     trendy_team_id: "5403245077430272",
 }
+
+team {
+    name: "trendy_team_pte_sysui",
+
+    // go/trendy/manage/engineers/5185897463382016
+    trendy_team_id: "5185897463382016",
+}
+
+team {
+    name: "trendy_team_pixel_troubleshooting_app",
+
+    // go/trendy/manage/engineers/5097003746426880
+    trendy_team_id: "5097003746426880",
+}
+
+team {
+    name: "trendy_team_desktop_firmware",
+
+    // go/trendy/manage/engineers/5787938454863872
+    trendy_team_id: "5787938454863872",
+}
+
+team {
+    name: "trendy_team_art_cloud",
+
+    // go/trendy/manage/engineers/5121440647577600
+    trendy_team_id: "5121440647577600",
+}
+
+team {
+    name: "trendy_team_ravenwood",
+
+    // go/trendy/manage/engineers/6027181500497920
+    trendy_team_id: "6027181500497920",
+}
+
+team {
+    name: "trendy_team_automotive_cast",
+
+    // go/trendy/manage/engineers/5293683026264064
+    trendy_team_id: "5293683026264064",
+}
+
+team {
+    name: "trendy_team_wear_standalone_kids",
+
+    // go/trendy/manage/engineers/6303298703949824
+    trendy_team_id: "6303298703949824",
+}
+
+team {
+    name: "trendy_team_desktop_stats",
+
+    // go/trendy/manage/engineers/5440764114206720
+    trendy_team_id: "5440764114206720",
+}
+
+// DON'T ADD NEW RULES HERE. For more details refer to
+// go/new-android-ownership-model
diff --git a/tools/aconfig/OWNERS b/tools/aconfig/OWNERS
index 9a76279..c92fc7c 100644
--- a/tools/aconfig/OWNERS
+++ b/tools/aconfig/OWNERS
@@ -1,7 +1,8 @@
-amhk@google.com
 dzshen@google.com
-jham@google.com
-joeo@google.com
 opg@google.com
 tedbauer@google.com
 zhidou@google.com
+
+amhk@google.com  #{LAST_RESORT_SUGGESTION}
+jham@google.com  #{LAST_RESORT_SUGGESTION}
+joeo@google.com  #{LAST_RESORT_SUGGESTION}
diff --git a/tools/aconfig/TEST_MAPPING b/tools/aconfig/TEST_MAPPING
index 15e4187..043a956 100644
--- a/tools/aconfig/TEST_MAPPING
+++ b/tools/aconfig/TEST_MAPPING
@@ -106,8 +106,12 @@
   ],
   "postsubmit": [
     {
-      // aconfig_storage read api java integration tests
-      "name": "aconfig_storage_read_api.test.java"
+      // aconfig_storage read functional test
+      "name": "aconfig_storage_read_functional"
+    },
+    {
+      // aconfig_storage read unit test
+      "name": "aconfig_storage_read_unit"
     }
   ]
 }
diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp
index 68521af..5e3eb12 100644
--- a/tools/aconfig/aconfig/Android.bp
+++ b/tools/aconfig/aconfig/Android.bp
@@ -68,6 +68,14 @@
     ],
 }
 
+aconfig_values {
+    name: "aconfig.test.flag.second_values",
+    package: "com.android.aconfig.test",
+    srcs: [
+        "tests/third.values",
+    ],
+}
+
 aconfig_value_set {
     name: "aconfig.test.flag.value_set",
     values: [
@@ -234,6 +242,7 @@
     name: "libaconfig_test_rust_library",
     crate_name: "aconfig_test_rust_library",
     aconfig_declarations: "aconfig.test.flags",
+    host_supported: true,
 }
 
 rust_test {
diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs
index e743b2f..7a9c382 100644
--- a/tools/aconfig/aconfig/src/codegen/cpp.rs
+++ b/tools/aconfig/aconfig/src/codegen/cpp.rs
@@ -45,6 +45,8 @@
     let header = package.replace('.', "_");
     let package_macro = header.to_uppercase();
     let cpp_namespace = package.replace('.', "::");
+    ensure!(class_elements.len() > 0);
+    let container = class_elements[0].container.clone();
     ensure!(codegen::is_valid_name_ident(&header));
     let context = Context {
         header: &header,
@@ -56,6 +58,7 @@
         readwrite_count,
         is_test_mode: codegen_mode == CodegenMode::Test,
         class_elements,
+        container,
         allow_instrumentation,
     };
 
@@ -100,6 +103,7 @@
     pub readwrite_count: i32,
     pub is_test_mode: bool,
     pub class_elements: Vec<ClassElement>,
+    pub container: String,
     pub allow_instrumentation: bool,
 }
 
@@ -279,39 +283,23 @@
     virtual ~flag_provider_interface() = default;
 
     virtual bool disabled_ro() = 0;
-
-    virtual void disabled_ro(bool val) = 0;
-
     virtual bool disabled_rw() = 0;
-
-    virtual void disabled_rw(bool val) = 0;
-
     virtual bool disabled_rw_exported() = 0;
-
-    virtual void disabled_rw_exported(bool val) = 0;
-
     virtual bool disabled_rw_in_other_namespace() = 0;
-
-    virtual void disabled_rw_in_other_namespace(bool val) = 0;
-
     virtual bool enabled_fixed_ro() = 0;
-
-    virtual void enabled_fixed_ro(bool val) = 0;
-
     virtual bool enabled_fixed_ro_exported() = 0;
-
-    virtual void enabled_fixed_ro_exported(bool val) = 0;
-
     virtual bool enabled_ro() = 0;
-
-    virtual void enabled_ro(bool val) = 0;
-
     virtual bool enabled_ro_exported() = 0;
-
-    virtual void enabled_ro_exported(bool val) = 0;
-
     virtual bool enabled_rw() = 0;
 
+    virtual void disabled_ro(bool val) = 0;
+    virtual void disabled_rw(bool val) = 0;
+    virtual void disabled_rw_exported(bool val) = 0;
+    virtual void disabled_rw_in_other_namespace(bool val) = 0;
+    virtual void enabled_fixed_ro(bool val) = 0;
+    virtual void enabled_fixed_ro_exported(bool val) = 0;
+    virtual void enabled_ro(bool val) = 0;
+    virtual void enabled_ro_exported(bool val) = 0;
     virtual void enabled_rw(bool val) = 0;
 
     virtual void reset_flags() {}
diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs
index ec22ebc..bfdf1a7 100644
--- a/tools/aconfig/aconfig/src/codegen/java.rs
+++ b/tools/aconfig/aconfig/src/codegen/java.rs
@@ -1,18 +1,18 @@
 /*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
+* Copyright (C) 2023 The Android Open Source Project
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 
 use anyhow::Result;
 use serde::Serialize;
@@ -46,7 +46,6 @@
     let runtime_lookup_required =
         flag_elements.iter().any(|elem| elem.is_read_write) || library_exported;
     let container = (flag_elements.first().expect("zero template flags").container).to_string();
-
     let context = Context {
         flag_elements,
         namespace_flags,
@@ -138,6 +137,7 @@
     pub default_value: bool,
     pub device_config_namespace: String,
     pub device_config_flag: String,
+    pub flag_name: String,
     pub flag_name_constant_suffix: String,
     pub flag_offset: u16,
     pub is_read_write: bool,
@@ -157,6 +157,7 @@
         default_value: pf.state() == ProtoFlagState::ENABLED,
         device_config_namespace: pf.namespace().to_string(),
         device_config_flag,
+        flag_name: pf.name().to_string(),
         flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
         flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("),
         is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
@@ -500,7 +501,7 @@
             modified_parsed_flags.into_iter(),
             mode,
             flag_ids,
-            false,
+            true,
         )
         .unwrap();
         let expect_flags_content = EXPECTED_FLAG_COMMON_CONTENT.to_string()
@@ -508,23 +509,43 @@
             private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl();
         }"#;
 
-        let expect_featureflagsimpl_content = r#"
+        let expected_featureflagsmpl_content = r#"
         package com.android.aconfig.test;
         // TODO(b/303773055): Remove the annotation after access issue is resolved.
         import android.compat.annotation.UnsupportedAppUsage;
+        import android.os.Binder;
         import android.provider.DeviceConfig;
         import android.provider.DeviceConfig.Properties;
+        import android.aconfig.storage.StorageInternalReader;
+        import java.nio.file.Files;
+        import java.nio.file.Paths;
+
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
-            private static boolean aconfig_test_is_cached = false;
-            private static boolean other_namespace_is_cached = false;
+            private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage"));
+            private static volatile boolean isCached = false;
+            private static volatile boolean aconfig_test_is_cached = false;
+            private static volatile boolean other_namespace_is_cached = false;
             private static boolean disabledRw = false;
             private static boolean disabledRwExported = false;
             private static boolean disabledRwInOtherNamespace = false;
             private static boolean enabledRw = true;
-
-
+            private void init() {
+                StorageInternalReader reader = null;
+                boolean foundPackage = true;
+                try {
+                    reader = new StorageInternalReader("system", "com.android.aconfig.test");
+                } catch (Exception e) {
+                    foundPackage = false;
+                }
+                disabledRw = foundPackage ? reader.getBooleanFlagValue(1) : false;
+                disabledRwExported = foundPackage ? reader.getBooleanFlagValue(2) : false;
+                enabledRw = foundPackage ? reader.getBooleanFlagValue(8) : true;
+                disabledRwInOtherNamespace = foundPackage ? reader.getBooleanFlagValue(3) : false;
+                isCached = true;
+            }
             private void load_overrides_aconfig_test() {
+                final long ident = Binder.clearCallingIdentity();
                 try {
                     Properties properties = DeviceConfig.getProperties("aconfig_test");
                     disabledRw =
@@ -542,11 +563,16 @@
                         + "flag declaration.",
                         e
                     );
+                } catch (SecurityException e) {
+                    // for isolated process case, skip loading flag value from the storage, use the default
+                } finally {
+                    Binder.restoreCallingIdentity(ident);
                 }
                 aconfig_test_is_cached = true;
             }
 
             private void load_overrides_other_namespace() {
+                final long ident = Binder.clearCallingIdentity();
                 try {
                     Properties properties = DeviceConfig.getProperties("other_namespace");
                     disabledRwInOtherNamespace =
@@ -560,6 +586,10 @@
                         + "flag declaration.",
                         e
                     );
+                } catch (SecurityException e) {
+                    // for isolated process case, skip loading flag value from the storage, use the default
+                } finally {
+                    Binder.restoreCallingIdentity(ident);
                 }
                 other_namespace_is_cached = true;
             }
@@ -574,8 +604,14 @@
             @com.android.aconfig.annotations.AconfigFlagAccessor
             @UnsupportedAppUsage
             public boolean disabledRw() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
                 }
                 return disabledRw;
             }
@@ -583,8 +619,14 @@
             @com.android.aconfig.annotations.AconfigFlagAccessor
             @UnsupportedAppUsage
             public boolean disabledRwExported() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
                 }
                 return disabledRwExported;
             }
@@ -592,8 +634,14 @@
             @com.android.aconfig.annotations.AconfigFlagAccessor
             @UnsupportedAppUsage
             public boolean disabledRwInOtherNamespace() {
-                if (!other_namespace_is_cached) {
-                    load_overrides_other_namespace();
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!other_namespace_is_cached) {
+                        load_overrides_other_namespace();
+                    }
                 }
                 return disabledRwInOtherNamespace;
             }
@@ -625,16 +673,23 @@
             @com.android.aconfig.annotations.AconfigFlagAccessor
             @UnsupportedAppUsage
             public boolean enabledRw() {
-                if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                if (isReadFromNew) {
+                    if (!isCached) {
+                        init();
+                    }
+                } else {
+                    if (!aconfig_test_is_cached) {
+                        load_overrides_aconfig_test();
+                    }
                 }
                 return enabledRw;
             }
         }
         "#;
+
         let mut file_set = HashMap::from([
             ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()),
-            ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content),
+            ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsmpl_content),
             ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT),
             (
                 "com/android/aconfig/test/CustomFeatureFlags.java",
@@ -677,7 +732,7 @@
             modified_parsed_flags.into_iter(),
             mode,
             flag_ids,
-            false,
+            true,
         )
         .unwrap();
 
@@ -716,17 +771,18 @@
 
         let expect_feature_flags_impl_content = r#"
         package com.android.aconfig.test;
+        import android.os.Binder;
         import android.provider.DeviceConfig;
         import android.provider.DeviceConfig.Properties;
         /** @hide */
         public final class FeatureFlagsImpl implements FeatureFlags {
-            private static boolean aconfig_test_is_cached = false;
+            private static volatile boolean aconfig_test_is_cached = false;
             private static boolean disabledRwExported = false;
             private static boolean enabledFixedRoExported = false;
             private static boolean enabledRoExported = false;
 
-
             private void load_overrides_aconfig_test() {
+                final long ident = Binder.clearCallingIdentity();
                 try {
                     Properties properties = DeviceConfig.getProperties("aconfig_test");
                     disabledRwExported =
@@ -744,27 +800,31 @@
                         + "flag declaration.",
                         e
                     );
+                } catch (SecurityException e) {
+                    // for isolated process case, skip loading flag value from the storage, use the default
+                } finally {
+                    Binder.restoreCallingIdentity(ident);
                 }
                 aconfig_test_is_cached = true;
             }
             @Override
             public boolean disabledRwExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return disabledRwExported;
             }
             @Override
             public boolean enabledFixedRoExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return enabledFixedRoExported;
             }
             @Override
             public boolean enabledRoExported() {
                 if (!aconfig_test_is_cached) {
-                    load_overrides_aconfig_test();
+                        load_overrides_aconfig_test();
                 }
                 return enabledRoExported;
             }
@@ -870,7 +930,7 @@
             modified_parsed_flags.into_iter(),
             mode,
             flag_ids,
-            false,
+            true,
         )
         .unwrap();
 
@@ -991,7 +1051,7 @@
             modified_parsed_flags.into_iter(),
             mode,
             flag_ids,
-            false,
+            true,
         )
         .unwrap();
         let expect_featureflags_content = r#"
diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs
index 45488b0..82a6ebc 100644
--- a/tools/aconfig/aconfig/src/codegen/rust.rs
+++ b/tools/aconfig/aconfig/src/codegen/rust.rs
@@ -113,41 +113,35 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw
-    static ref CACHED_disabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for disabled_rw_in_other_namespace
-    static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.other_namespace",
         "com.android.aconfig.test.disabled_rw_in_other_namespace",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_rw
-    static ref CACHED_enabled_rw: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_rw",
-        "true") == "true";
-
-}
+        "true") == "true");
 
 impl FlagProvider {
     /// query flag disabled_ro
@@ -259,223 +253,182 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
 
-    static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe {
-        get_mapped_storage_file("system", StorageFileType::PackageMap)
-        .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test"))
-        .map(|context| context.map(|c| c.boolean_start_index))
-    };
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe {
+    get_mapped_storage_file("system", StorageFileType::FlagVal)
+});
 
-    static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe {
-        get_mapped_storage_file("system", StorageFileType::FlagVal)
-    };
-    /// flag value cache for disabled_rw
+/// flag value cache for disabled_rw
+static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| {
+    // This will be called multiple times. Subsequent calls after the first are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device("aconfig_rust_codegen")
+            .with_max_level(LevelFilter::Info));
 
-    static ref CACHED_disabled_rw: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.disabled_rw",
-            "false") == "true";
+    let flag_value_result = FLAG_VAL_MAP
+        .as_ref()
+        .map_err(|err| format!("failed to get flag val map: {err}"))
+        .and_then(|flag_val_map| {
+            PACKAGE_OFFSET
+               .as_ref()
+               .map_err(|err| format!("failed to get package read offset: {err}"))
+               .and_then(|package_offset| {
+                   match package_offset {
+                       Some(offset) => {
+                           get_boolean_flag_value(&flag_val_map, offset + 1)
+                               .map_err(|err| format!("failed to get flag: {err}"))
+                       },
+                       None => {
+                           log!(Level::Error, "no context found for package com.android.aconfig.test");
+                           Err(format!("failed to flag package com.android.aconfig.test"))
+                       }
+                    }
+                })
+            });
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+    match flag_value_result {
+        Ok(flag_value) => {
+            return flag_value;
+        },
+        Err(err) => {
+            log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+            return false;
+        }
+    }
+});
 
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 1)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+/// flag value cache for disabled_rw_exported
+static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device("aconfig_rust_codegen")
+                .with_max_level(LevelFilter::Info));
+
+        let flag_value_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 2)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => {
+                                log!(Level::Error, "no context found for package com.android.aconfig.test");
+                                Err(format!("failed to flag package com.android.aconfig.test"))
                             }
-                        })
-                    });
+                        }
+                    })
+                });
 
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw' contained correct value. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}")
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
+            },
+            Err(err) => {
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                return false;
             }
         }
+});
 
-        result
-        };
+/// flag value cache for disabled_rw_in_other_namespace
+static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device("aconfig_rust_codegen")
+                .with_max_level(LevelFilter::Info));
 
-    /// flag value cache for disabled_rw_exported
-
-    static ref CACHED_disabled_rw_exported: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.disabled_rw_exported",
-            "false") == "true";
-
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
-
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 2)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+        let flag_value_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 3)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => {
+                                log!(Level::Error, "no context found for package com.android.aconfig.test");
+                                Err(format!("failed to flag package com.android.aconfig.test"))
                             }
-                        })
-                    });
+                        }
+                    })
+                });
 
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw_exported' contained correct value. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}")
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
+            },
+            Err(err) => {
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                return false;
             }
         }
+});
 
-        result
-        };
 
-    /// flag value cache for disabled_rw_in_other_namespace
+/// flag value cache for enabled_rw
+static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| {
+        // This will be called multiple times. Subsequent calls after the first are noops.
+        logger::init(
+            logger::Config::default()
+                .with_tag_on_device("aconfig_rust_codegen")
+                .with_max_level(LevelFilter::Info));
 
-    static ref CACHED_disabled_rw_in_other_namespace: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.other_namespace",
-            "com.android.aconfig.test.disabled_rw_in_other_namespace",
-            "false") == "true";
-
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
-
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 3)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
+        let flag_value_result = FLAG_VAL_MAP
+            .as_ref()
+            .map_err(|err| format!("failed to get flag val map: {err}"))
+            .and_then(|flag_val_map| {
+                PACKAGE_OFFSET
+                    .as_ref()
+                    .map_err(|err| format!("failed to get package read offset: {err}"))
+                    .and_then(|package_offset| {
+                        match package_offset {
+                            Some(offset) => {
+                                get_boolean_flag_value(&flag_val_map, offset + 8)
+                                    .map_err(|err| format!("failed to get flag: {err}"))
+                            },
+                            None => {
+                                log!(Level::Error, "no context found for package com.android.aconfig.test");
+                                Err(format!("failed to flag package com.android.aconfig.test"))
                             }
-                        })
-                    });
+                        }
+                    })
+                });
 
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw_in_other_namespace' contained correct value. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}")
-                }
+        match flag_value_result {
+            Ok(flag_value) => {
+                 return flag_value;
+            },
+            Err(err) => {
+                log!(Level::Error, "aconfig_rust_codegen: error: {err}");
+                return true;
             }
         }
-
-        result
-        };
-
-    /// flag value cache for enabled_rw
-
-    static ref CACHED_enabled_rw: bool = {
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.aconfig_test",
-            "com.android.aconfig.test.enabled_rw",
-            "true") == "true";
-
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
-
-            let aconfig_storage_result = FLAG_VAL_MAP
-                .as_ref()
-                .map_err(|err| format!("failed to get flag val map: {err}"))
-                .and_then(|flag_val_map| {
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: {err}"))
-                        .and_then(|package_offset| {
-                            match package_offset {
-                                Some(offset) => {
-                                    get_boolean_flag_value(&flag_val_map, offset + 8)
-                                        .map_err(|err| format!("failed to get flag: {err}"))
-                                },
-                                None => Err("no context found for package 'com.android.aconfig.test'".to_string())
-                            }
-                        })
-                    });
-
-            match aconfig_storage_result {
-                Ok(storage_result) if storage_result == result => {
-                    log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_rw' contained correct value. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Ok(storage_result) => {
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}");
-                },
-                Err(err) => {
-                    log!(Level::Error, "AconfigTestMission1: error: {err}")
-                }
-            }
-        }
-
-        result
-        };
-
-}
+});
 
 impl FlagProvider {
 
@@ -535,66 +488,7 @@
 /// query flag disabled_ro
 #[inline(always)]
 pub fn disabled_ro() -> bool {
-
-
-    let result = false;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 0 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'disabled_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = false;
-            log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}");
-        }
-    }
-
-    result
-
+   false
 }
 
 /// query flag disabled_rw
@@ -618,261 +512,25 @@
 /// query flag enabled_fixed_ro
 #[inline(always)]
 pub fn enabled_fixed_ro() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 4 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-            log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_fixed_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}");
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_fixed_ro_exported
 #[inline(always)]
 pub fn enabled_fixed_ro_exported() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 5 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro_exported'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-            log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_fixed_ro_exported' contained correct value. Legacy storage was {default_value}, new storage was {value}");
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_ro
 #[inline(always)]
 pub fn enabled_ro() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 6 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-            log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}");
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_ro_exported
 #[inline(always)]
 pub fn enabled_ro_exported() -> bool {
-
-
-    let result = true;
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() {
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe {
-        let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) {
-            Ok(file) => file,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") {
-            Ok(Some(context)) => context,
-            Ok(None) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': did not get context");
-                return result;
-            },
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) {
-            Ok(val_map) => val_map,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, 7 + package_read_context.boolean_start_index) {
-            Ok(val) => val,
-            Err(err) => {
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}");
-                return result;
-            }
-        };
-
-        if result != value {
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro_exported'. Legacy storage was {result}, new storage was {value}");
-        } else {
-            let default_value = true;
-            log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_ro_exported' contained correct value. Legacy storage was {default_value}, new storage was {value}");
-        }
-    }
-
-    result
-
+    true
 }
 
 /// query flag enabled_rw
@@ -1144,35 +802,29 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
-lazy_static::lazy_static! {
     /// flag value cache for disabled_rw_exported
-    static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.disabled_rw_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_fixed_ro_exported
-    static ref CACHED_enabled_fixed_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_fixed_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_fixed_ro_exported",
-        "false") == "true";
+        "false") == "true");
 
     /// flag value cache for enabled_ro_exported
-    static ref CACHED_enabled_ro_exported: bool = flags_rust::GetServerConfigurableFlag(
+    static CACHED_enabled_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
         "aconfig_flags.aconfig_test",
         "com.android.aconfig.test.enabled_ro_exported",
-        "false") == "true";
-
-}
+        "false") == "true");
 
 impl FlagProvider {
     /// query flag disabled_rw_exported
@@ -1218,12 +870,9 @@
 use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs
index 59f0662..81119f3 100644
--- a/tools/aconfig/aconfig/src/commands.rs
+++ b/tools/aconfig/aconfig/src/commands.rs
@@ -18,6 +18,7 @@
 use itertools::Itertools;
 use protobuf::Message;
 use std::collections::HashMap;
+use std::hash::Hasher;
 use std::io::Read;
 use std::path::PathBuf;
 
@@ -31,6 +32,7 @@
     ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag,
     ProtoParsedFlags, ProtoTracepoint,
 };
+use aconfig_storage_file::sip_hasher13::SipHasher13;
 use aconfig_storage_file::StorageFileType;
 
 pub struct Input {
@@ -67,6 +69,7 @@
     declarations: Vec<Input>,
     values: Vec<Input>,
     default_permission: ProtoFlagPermission,
+    allow_read_write: bool,
 ) -> Result<Vec<u8>> {
     let mut parsed_flags = ProtoParsedFlags::new();
 
@@ -77,8 +80,18 @@
             .read_to_string(&mut contents)
             .with_context(|| format!("failed to read {}", input.source))?;
 
-        let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents)
-            .with_context(|| input.error_context())?;
+        let mut flag_declarations =
+            aconfig_protos::flag_declarations::try_from_text_proto(&contents)
+                .with_context(|| input.error_context())?;
+
+        // system_ext flags should be treated as system flags as we are combining /system_ext
+        // and /system as one container
+        // TODO: remove this logic when we start enforcing that system_ext cannot be set as
+        // container in aconfig declaration files.
+        if flag_declarations.container() == "system_ext" {
+            flag_declarations.set_container(String::from("system"));
+        }
+
         ensure!(
             package == flag_declarations.package(),
             "failed to parse {}: expected package {}, got {}",
@@ -183,6 +196,16 @@
         }
     }
 
+    if !allow_read_write {
+        if let Some(pf) = parsed_flags
+            .parsed_flag
+            .iter()
+            .find(|pf| pf.permission() == ProtoFlagPermission::READ_WRITE)
+        {
+            bail!("flag {} has permission READ_WRITE, but allow_read_write is false", pf.name());
+        }
+    }
+
     // Create a sorted parsed_flags
     aconfig_protos::parsed_flags::sort_parsed_flags(&mut parsed_flags);
     aconfig_protos::parsed_flags::verify_fields(&parsed_flags)?;
@@ -268,10 +291,11 @@
     caches: Vec<Input>,
     container: &str,
     file: &StorageFileType,
+    version: u32,
 ) -> Result<Vec<u8>> {
     let parsed_flags_vec: Vec<ProtoParsedFlags> =
         caches.into_iter().map(|mut input| input.try_parse_flags()).collect::<Result<Vec<_>>>()?;
-    generate_storage_file(container, parsed_flags_vec.iter(), file)
+    generate_storage_file(container, parsed_flags_vec.iter(), file, version)
 }
 
 pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> {
@@ -410,12 +434,89 @@
     Ok(flag_ids)
 }
 
+#[allow(dead_code)] // TODO: b/316357686 - Use fingerprint in codegen to
+                    // protect hardcoded offset reads.
+                    // Creates a fingerprint of the flag names (which requires sorting the vector).
+                    // Fingerprint is used by both codegen and storage files.
+pub fn compute_flags_fingerprint(flag_names: &mut Vec<String>) -> u64 {
+    flag_names.sort();
+
+    let mut hasher = SipHasher13::new();
+    for flag in flag_names {
+        hasher.write(flag.as_bytes());
+    }
+    hasher.finish()
+}
+
+#[allow(dead_code)] // TODO: b/316357686 - Use fingerprint in codegen to
+                    // protect hardcoded offset reads.
+                    // Converts ProtoParsedFlags into a vector of strings containing all of the flag
+                    // names. Helper fn for creating fingerprint for codegen files. Flags must all
+                    // belong to the same package.
+fn extract_flag_names(flags: ProtoParsedFlags) -> Result<Vec<String>> {
+    let separated_flags: Vec<ProtoParsedFlag> = flags.parsed_flag.into_iter().collect::<Vec<_>>();
+
+    // All flags must belong to the same package as the fingerprint is per-package.
+    let Some(_package) = find_unique_package(&separated_flags) else {
+        bail!("No parsed flags, or the parsed flags use different packages.");
+    };
+
+    Ok(separated_flags.into_iter().map(|flag| flag.name.unwrap()).collect::<Vec<_>>())
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
     use aconfig_protos::ProtoFlagPurpose;
 
     #[test]
+    fn test_offset_fingerprint() {
+        let parsed_flags = crate::test::parse_test_flags();
+        let expected_fingerprint: u64 = 5801144784618221668;
+
+        let mut extracted_flags = extract_flag_names(parsed_flags).unwrap();
+        let hash_result = compute_flags_fingerprint(&mut extracted_flags);
+
+        assert_eq!(hash_result, expected_fingerprint);
+    }
+
+    #[test]
+    fn test_offset_fingerprint_matches_from_package() {
+        let parsed_flags: ProtoParsedFlags = crate::test::parse_test_flags();
+
+        // All test flags are in the same package, so fingerprint from all of them.
+        let mut extracted_flags = extract_flag_names(parsed_flags.clone()).unwrap();
+        let result_from_parsed_flags = compute_flags_fingerprint(&mut extracted_flags);
+
+        let mut flag_names_vec = parsed_flags
+            .parsed_flag
+            .clone()
+            .into_iter()
+            .map(|flag| flag.name.unwrap())
+            .map(String::from)
+            .collect::<Vec<_>>();
+        let result_from_names = compute_flags_fingerprint(&mut flag_names_vec);
+
+        // Assert the same hash is generated for each case.
+        assert_eq!(result_from_parsed_flags, result_from_names);
+    }
+
+    #[test]
+    fn test_offset_fingerprint_different_packages_does_not_match() {
+        // Parse flags from two packages.
+        let parsed_flags: ProtoParsedFlags = crate::test::parse_test_flags();
+        let second_parsed_flags = crate::test::parse_second_package_flags();
+
+        let mut extracted_flags = extract_flag_names(parsed_flags).unwrap();
+        let result_from_parsed_flags = compute_flags_fingerprint(&mut extracted_flags);
+        let mut second_extracted_flags = extract_flag_names(second_parsed_flags).unwrap();
+        let second_result = compute_flags_fingerprint(&mut second_extracted_flags);
+
+        // Different flags should have a different fingerprint.
+        assert_ne!(result_from_parsed_flags, second_result);
+    }
+
+    #[test]
     fn test_parse_flags() {
         let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags
         aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap();
@@ -486,6 +587,7 @@
             declaration,
             value,
             ProtoFlagPermission::READ_ONLY,
+            true,
         )
         .unwrap();
         let parsed_flags =
@@ -519,6 +621,7 @@
             declaration,
             value,
             ProtoFlagPermission::READ_WRITE,
+            true,
         )
         .unwrap_err();
         assert_eq!(
@@ -550,6 +653,7 @@
             declaration,
             value,
             ProtoFlagPermission::READ_WRITE,
+            true,
         )
         .unwrap_err();
         assert_eq!(
@@ -557,6 +661,121 @@
             "failed to parse memory: expected container argument.container, got declaration.container"
         );
     }
+    #[test]
+    fn test_parse_flags_no_allow_read_write_default_error() {
+        let first_flag = r#"
+        package: "com.first"
+        container: "com.first.container"
+        flag {
+            name: "first"
+            namespace: "first_ns"
+            description: "This is the description of the first flag."
+            bug: "123"
+        }
+        "#;
+        let declaration =
+            vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+        let error = crate::commands::parse_flags(
+            "com.first",
+            Some("com.first.container"),
+            declaration,
+            vec![],
+            ProtoFlagPermission::READ_WRITE,
+            false,
+        )
+        .unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "flag first has permission READ_WRITE, but allow_read_write is false"
+        );
+    }
+
+    #[test]
+    fn test_parse_flags_no_allow_read_write_value_error() {
+        let first_flag = r#"
+        package: "com.first"
+        container: "com.first.container"
+        flag {
+            name: "first"
+            namespace: "first_ns"
+            description: "This is the description of the first flag."
+            bug: "123"
+        }
+        "#;
+        let declaration =
+            vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+        let first_flag_value = r#"
+        flag_value {
+            package: "com.first"
+            name: "first"
+            state: DISABLED
+            permission: READ_WRITE
+        }
+        "#;
+        let value = vec![Input {
+            source: "memory".to_string(),
+            reader: Box::new(first_flag_value.as_bytes()),
+        }];
+        let error = crate::commands::parse_flags(
+            "com.first",
+            Some("com.first.container"),
+            declaration,
+            value,
+            ProtoFlagPermission::READ_ONLY,
+            false,
+        )
+        .unwrap_err();
+        assert_eq!(
+            format!("{:?}", error),
+            "flag first has permission READ_WRITE, but allow_read_write is false"
+        );
+    }
+
+    #[test]
+    fn test_parse_flags_no_allow_read_write_success() {
+        let first_flag = r#"
+        package: "com.first"
+        container: "com.first.container"
+        flag {
+            name: "first"
+            namespace: "first_ns"
+            description: "This is the description of the first flag."
+            bug: "123"
+        }
+        "#;
+        let declaration =
+            vec![Input { source: "memory".to_string(), reader: Box::new(first_flag.as_bytes()) }];
+
+        let first_flag_value = r#"
+        flag_value {
+            package: "com.first"
+            name: "first"
+            state: DISABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let value = vec![Input {
+            source: "memory".to_string(),
+            reader: Box::new(first_flag_value.as_bytes()),
+        }];
+        let flags_bytes = crate::commands::parse_flags(
+            "com.first",
+            Some("com.first.container"),
+            declaration,
+            value,
+            ProtoFlagPermission::READ_ONLY,
+            false,
+        )
+        .unwrap();
+        let parsed_flags =
+            aconfig_protos::parsed_flags::try_from_binary_proto(&flags_bytes).unwrap();
+        assert_eq!(1, parsed_flags.parsed_flag.len());
+        let parsed_flag = parsed_flags.parsed_flag.first().unwrap();
+        assert_eq!(ProtoFlagState::DISABLED, parsed_flag.state());
+        assert_eq!(ProtoFlagPermission::READ_ONLY, parsed_flag.permission());
+    }
 
     #[test]
     fn test_parse_flags_override_fixed_read_only() {
@@ -592,6 +811,7 @@
             declaration,
             value,
             ProtoFlagPermission::READ_WRITE,
+            true,
         )
         .unwrap_err();
         assert_eq!(
@@ -626,6 +846,7 @@
             declaration,
             value,
             ProtoFlagPermission::READ_ONLY,
+            true,
         )
         .unwrap();
         let parsed_flags =
diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs
index 1fb64f9..c390288 100644
--- a/tools/aconfig/aconfig/src/main.rs
+++ b/tools/aconfig/aconfig/src/main.rs
@@ -16,6 +16,8 @@
 
 //! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
 
+use aconfig_storage_file::DEFAULT_FILE_VERSION;
+use aconfig_storage_file::MAX_SUPPORTED_FILE_VERSION;
 use anyhow::{anyhow, bail, Context, Result};
 use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
 use core::any::Any;
@@ -49,8 +51,7 @@
         .subcommand(
             Command::new("create-cache")
                 .arg(Arg::new("package").long("package").required(true))
-                // TODO(b/312769710): Make this argument required.
-                .arg(Arg::new("container").long("container"))
+                .arg(Arg::new("container").long("container").required(true))
                 .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
                 .arg(Arg::new("values").long("values").action(ArgAction::Append))
                 .arg(
@@ -61,6 +62,12 @@
                             &commands::DEFAULT_FLAG_PERMISSION,
                         )),
                 )
+                .arg(
+                    Arg::new("allow-read-write")
+                        .long("allow-read-write")
+                        .value_parser(clap::value_parser!(bool))
+                        .default_value("true"),
+                )
                 .arg(Arg::new("cache").long("cache").required(true)),
         )
         .subcommand(
@@ -159,7 +166,13 @@
                         .value_parser(|s: &str| StorageFileType::try_from(s)),
                 )
                 .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
-                .arg(Arg::new("out").long("out").required(true)),
+                .arg(Arg::new("out").long("out").required(true))
+                .arg(
+                    Arg::new("version")
+                        .long("version")
+                        .required(false)
+                        .value_parser(|s: &str| s.parse::<u32>()),
+                ),
         )
 }
 
@@ -235,12 +248,15 @@
                 sub_matches,
                 "default-permission",
             )?;
+            let allow_read_write = get_optional_arg::<bool>(sub_matches, "allow-read-write")
+                .expect("failed to parse allow-read-write");
             let output = commands::parse_flags(
                 package,
                 container,
                 declarations,
                 values,
                 *default_permission,
+                *allow_read_write,
             )
             .context("failed to create cache")?;
             let path = get_required_arg::<String>(sub_matches, "cache")?;
@@ -309,12 +325,18 @@
             write_output_to_file_or_stdout(path, &output)?;
         }
         Some(("create-storage", sub_matches)) => {
+            let version =
+                get_optional_arg::<u32>(sub_matches, "version").unwrap_or(&DEFAULT_FILE_VERSION);
+            if *version > MAX_SUPPORTED_FILE_VERSION {
+                bail!("Invalid version selected ({})", version);
+            }
             let file = get_required_arg::<StorageFileType>(sub_matches, "file")
                 .context("Invalid storage file selection")?;
             let cache = open_zero_or_more_files(sub_matches, "cache")?;
             let container = get_required_arg::<String>(sub_matches, "container")?;
             let path = get_required_arg::<String>(sub_matches, "out")?;
-            let output = commands::create_storage(cache, container, file)
+
+            let output = commands::create_storage(cache, container, file, *version)
                 .context("failed to create storage files")?;
             write_output_to_file_or_stdout(path, &output)?;
         }
diff --git a/tools/aconfig/aconfig/src/storage/flag_info.rs b/tools/aconfig/aconfig/src/storage/flag_info.rs
new file mode 100644
index 0000000..0b5a67b
--- /dev/null
+++ b/tools/aconfig/aconfig/src/storage/flag_info.rs
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::commands::assign_flag_ids;
+use crate::storage::FlagPackage;
+use aconfig_protos::ProtoFlagPermission;
+use aconfig_storage_file::{FlagInfoHeader, FlagInfoList, FlagInfoNode, StorageFileType};
+use anyhow::{anyhow, Result};
+
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagInfoHeader {
+    FlagInfoHeader {
+        version,
+        container: String::from(container),
+        file_type: StorageFileType::FlagInfo as u8,
+        file_size: 0,
+        num_flags,
+        boolean_flag_offset: 0,
+    }
+}
+
+pub fn create_flag_info(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagInfoList> {
+    // create list
+    let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
+
+    let mut is_flag_rw = vec![false; num_flags as usize];
+    for pkg in packages.iter() {
+        let start_index = pkg.boolean_start_index as usize;
+        let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?;
+        for pf in pkg.boolean_flags.iter() {
+            let fid = flag_ids
+                .get(pf.name())
+                .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?;
+            is_flag_rw[start_index + (*fid as usize)] =
+                pf.permission() == ProtoFlagPermission::READ_WRITE;
+        }
+    }
+
+    let mut list = FlagInfoList {
+        header: new_header(container, num_flags, version),
+        nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(),
+    };
+
+    // initialize all header fields
+    list.header.boolean_flag_offset = list.header.into_bytes().len() as u32;
+    let bytes_per_node = FlagInfoNode::create(false).into_bytes().len() as u32;
+    list.header.file_size = list.header.boolean_flag_offset + num_flags * bytes_per_node;
+
+    Ok(list)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
+    pub fn create_test_flag_info_list_from_source() -> Result<FlagInfoList> {
+        let caches = parse_all_test_flags();
+        let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION);
+        create_flag_info("mockup", &packages, DEFAULT_FILE_VERSION)
+    }
+
+    #[test]
+    // this test point locks down the flag info creation and each field
+    fn test_list_contents() {
+        let flag_info_list = create_test_flag_info_list_from_source();
+        assert!(flag_info_list.is_ok());
+        let expected_flag_info_list =
+            aconfig_storage_file::test_utils::create_test_flag_info_list(DEFAULT_FILE_VERSION);
+        assert_eq!(flag_info_list.unwrap(), expected_flag_info_list);
+    }
+}
diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs
index a971211..ae5a16c 100644
--- a/tools/aconfig/aconfig/src/storage/flag_table.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_table.rs
@@ -19,13 +19,12 @@
 use aconfig_protos::ProtoFlagPermission;
 use aconfig_storage_file::{
     get_table_size, FlagTable, FlagTableHeader, FlagTableNode, StorageFileType, StoredFlagType,
-    FILE_VERSION,
 };
 use anyhow::{anyhow, Result};
 
-fn new_header(container: &str, num_flags: u32) -> FlagTableHeader {
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagTableHeader {
     FlagTableHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::FlagMap as u8,
         file_size: 0,
@@ -86,12 +85,16 @@
     }
 }
 
-pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<FlagTable> {
+pub fn create_flag_table(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagTable> {
     // create table
     let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
     let num_buckets = get_table_size(num_flags)?;
 
-    let mut header = new_header(container, num_flags);
+    let mut header = new_header(container, num_flags, version);
     let mut buckets = vec![None; num_buckets as usize];
     let mut node_wrappers = packages
         .iter()
@@ -138,13 +141,15 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
     fn create_test_flag_table_from_source() -> Result<FlagTable> {
         let caches = parse_all_test_flags();
-        let packages = group_flags_by_package(caches.iter());
-        create_flag_table("mockup", &packages)
+        let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION);
+        create_flag_table("mockup", &packages, DEFAULT_FILE_VERSION)
     }
 
     #[test]
@@ -152,7 +157,8 @@
     fn test_table_contents() {
         let flag_table = create_test_flag_table_from_source();
         assert!(flag_table.is_ok());
-        let expected_flag_table = aconfig_storage_file::test_utils::create_test_flag_table();
+        let expected_flag_table =
+            aconfig_storage_file::test_utils::create_test_flag_table(DEFAULT_FILE_VERSION);
         assert_eq!(flag_table.unwrap(), expected_flag_table);
     }
 }
diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs
index c15ba54..065b7e3 100644
--- a/tools/aconfig/aconfig/src/storage/flag_value.rs
+++ b/tools/aconfig/aconfig/src/storage/flag_value.rs
@@ -17,12 +17,12 @@
 use crate::commands::assign_flag_ids;
 use crate::storage::FlagPackage;
 use aconfig_protos::ProtoFlagState;
-use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType, FILE_VERSION};
+use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType};
 use anyhow::{anyhow, Result};
 
-fn new_header(container: &str, num_flags: u32) -> FlagValueHeader {
+fn new_header(container: &str, num_flags: u32, version: u32) -> FlagValueHeader {
     FlagValueHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::FlagVal as u8,
         file_size: 0,
@@ -31,12 +31,16 @@
     }
 }
 
-pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<FlagValueList> {
+pub fn create_flag_value(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<FlagValueList> {
     // create list
     let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum();
 
     let mut list = FlagValueList {
-        header: new_header(container, num_flags),
+        header: new_header(container, num_flags, version),
         booleans: vec![false; num_flags as usize],
     };
 
@@ -61,13 +65,15 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
     pub fn create_test_flag_value_list_from_source() -> Result<FlagValueList> {
         let caches = parse_all_test_flags();
-        let packages = group_flags_by_package(caches.iter());
-        create_flag_value("mockup", &packages)
+        let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION);
+        create_flag_value("mockup", &packages, DEFAULT_FILE_VERSION)
     }
 
     #[test]
@@ -76,7 +82,7 @@
         let flag_value_list = create_test_flag_value_list_from_source();
         assert!(flag_value_list.is_ok());
         let expected_flag_value_list =
-            aconfig_storage_file::test_utils::create_test_flag_value_list();
+            aconfig_storage_file::test_utils::create_test_flag_value_list(DEFAULT_FILE_VERSION);
         assert_eq!(flag_value_list.unwrap(), expected_flag_value_list);
     }
 }
diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs
index 73339f2..4bc72f7 100644
--- a/tools/aconfig/aconfig/src/storage/mod.rs
+++ b/tools/aconfig/aconfig/src/storage/mod.rs
@@ -14,15 +14,17 @@
  * limitations under the License.
  */
 
+pub mod flag_info;
 pub mod flag_table;
 pub mod flag_value;
 pub mod package_table;
 
-use anyhow::{anyhow, Result};
+use anyhow::Result;
 use std::collections::{HashMap, HashSet};
 
+use crate::commands::compute_flags_fingerprint;
 use crate::storage::{
-    flag_table::create_flag_table, flag_value::create_flag_value,
+    flag_info::create_flag_info, flag_table::create_flag_table, flag_value::create_flag_value,
     package_table::create_package_table,
 };
 use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags};
@@ -31,6 +33,7 @@
 pub struct FlagPackage<'a> {
     pub package_name: &'a str,
     pub package_id: u32,
+    pub fingerprint: u64,
     pub flag_names: HashSet<&'a str>,
     pub boolean_flags: Vec<&'a ProtoParsedFlag>,
     // The index of the first boolean flag in this aconfig package among all boolean
@@ -43,6 +46,7 @@
         FlagPackage {
             package_name,
             package_id,
+            fingerprint: 0,
             flag_names: HashSet::new(),
             boolean_flags: vec![],
             boolean_start_index: 0,
@@ -56,7 +60,7 @@
     }
 }
 
-pub fn group_flags_by_package<'a, I>(parsed_flags_vec_iter: I) -> Vec<FlagPackage<'a>>
+pub fn group_flags_by_package<'a, I>(parsed_flags_vec_iter: I, version: u32) -> Vec<FlagPackage<'a>>
 where
     I: Iterator<Item = &'a ProtoParsedFlags>,
 {
@@ -73,11 +77,18 @@
         }
     }
 
-    // cacluate boolean flag start index for each package
+    // Calculate boolean flag start index for each package
     let mut boolean_start_index = 0;
     for p in packages.iter_mut() {
         p.boolean_start_index = boolean_start_index;
         boolean_start_index += p.boolean_flags.len() as u32;
+
+        if version >= 2 {
+            let mut flag_names_vec =
+                p.flag_names.clone().into_iter().map(String::from).collect::<Vec<_>>();
+            let fingerprint = compute_flags_fingerprint(&mut flag_names_vec);
+            p.fingerprint = fingerprint;
+        }
     }
 
     packages
@@ -87,31 +98,37 @@
     container: &str,
     parsed_flags_vec_iter: I,
     file: &StorageFileType,
+    version: u32,
 ) -> Result<Vec<u8>>
 where
     I: Iterator<Item = &'a ProtoParsedFlags>,
 {
-    let packages = group_flags_by_package(parsed_flags_vec_iter);
+    let packages = group_flags_by_package(parsed_flags_vec_iter, version);
 
     match file {
         StorageFileType::PackageMap => {
-            let package_table = create_package_table(container, &packages)?;
+            let package_table = create_package_table(container, &packages, version)?;
             Ok(package_table.into_bytes())
         }
         StorageFileType::FlagMap => {
-            let flag_table = create_flag_table(container, &packages)?;
+            let flag_table = create_flag_table(container, &packages, version)?;
             Ok(flag_table.into_bytes())
         }
         StorageFileType::FlagVal => {
-            let flag_value = create_flag_value(container, &packages)?;
+            let flag_value = create_flag_value(container, &packages, version)?;
             Ok(flag_value.into_bytes())
         }
-        _ => Err(anyhow!("aconfig does not support the creation of this storage file type")),
+        StorageFileType::FlagInfo => {
+            let flag_info = create_flag_info(container, &packages, version)?;
+            Ok(flag_info.into_bytes())
+        }
     }
 }
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::DEFAULT_FILE_VERSION;
+
     use super::*;
     use crate::Input;
 
@@ -154,6 +171,7 @@
                         reader: Box::new(value_content),
                     }],
                     crate::commands::DEFAULT_FLAG_PERMISSION,
+                    true,
                 )
                 .unwrap();
                 aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
@@ -164,7 +182,7 @@
     #[test]
     fn test_flag_package() {
         let caches = parse_all_test_flags();
-        let packages = group_flags_by_package(caches.iter());
+        let packages = group_flags_by_package(caches.iter(), DEFAULT_FILE_VERSION);
 
         for pkg in packages.iter() {
             let pkg_name = pkg.package_name;
@@ -184,6 +202,7 @@
         assert!(packages[0].flag_names.contains("disabled_rw"));
         assert!(packages[0].flag_names.contains("enabled_ro"));
         assert_eq!(packages[0].boolean_start_index, 0);
+        assert_eq!(packages[0].fingerprint, 0);
 
         assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2");
         assert_eq!(packages[1].package_id, 1);
@@ -192,6 +211,7 @@
         assert!(packages[1].flag_names.contains("disabled_rw"));
         assert!(packages[1].flag_names.contains("enabled_fixed_ro"));
         assert_eq!(packages[1].boolean_start_index, 3);
+        assert_eq!(packages[0].fingerprint, 0);
 
         assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4");
         assert_eq!(packages[2].package_id, 2);
@@ -199,5 +219,49 @@
         assert!(packages[2].flag_names.contains("enabled_rw"));
         assert!(packages[2].flag_names.contains("enabled_fixed_ro"));
         assert_eq!(packages[2].boolean_start_index, 6);
+        assert_eq!(packages[2].fingerprint, 0);
+    }
+
+    #[test]
+    fn test_flag_package_with_fingerprint() {
+        let caches = parse_all_test_flags();
+        let packages = group_flags_by_package(caches.iter(), 2);
+
+        for pkg in packages.iter() {
+            let pkg_name = pkg.package_name;
+            assert_eq!(pkg.flag_names.len(), pkg.boolean_flags.len());
+            for pf in pkg.boolean_flags.iter() {
+                assert!(pkg.flag_names.contains(pf.name()));
+                assert_eq!(pf.package(), pkg_name);
+            }
+        }
+
+        assert_eq!(packages.len(), 3);
+
+        assert_eq!(packages[0].package_name, "com.android.aconfig.storage.test_1");
+        assert_eq!(packages[0].package_id, 0);
+        assert_eq!(packages[0].flag_names.len(), 3);
+        assert!(packages[0].flag_names.contains("enabled_rw"));
+        assert!(packages[0].flag_names.contains("disabled_rw"));
+        assert!(packages[0].flag_names.contains("enabled_ro"));
+        assert_eq!(packages[0].boolean_start_index, 0);
+        assert_eq!(packages[0].fingerprint, 15248948510590158086u64);
+
+        assert_eq!(packages[1].package_name, "com.android.aconfig.storage.test_2");
+        assert_eq!(packages[1].package_id, 1);
+        assert_eq!(packages[1].flag_names.len(), 3);
+        assert!(packages[1].flag_names.contains("enabled_ro"));
+        assert!(packages[1].flag_names.contains("disabled_rw"));
+        assert!(packages[1].flag_names.contains("enabled_fixed_ro"));
+        assert_eq!(packages[1].boolean_start_index, 3);
+        assert_eq!(packages[1].fingerprint, 4431940502274857964u64);
+
+        assert_eq!(packages[2].package_name, "com.android.aconfig.storage.test_4");
+        assert_eq!(packages[2].package_id, 2);
+        assert_eq!(packages[2].flag_names.len(), 2);
+        assert!(packages[2].flag_names.contains("enabled_rw"));
+        assert!(packages[2].flag_names.contains("enabled_fixed_ro"));
+        assert_eq!(packages[2].boolean_start_index, 6);
+        assert_eq!(packages[2].fingerprint, 16233229917711622375u64);
     }
 }
diff --git a/tools/aconfig/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs
index c53602f..53daa7f 100644
--- a/tools/aconfig/aconfig/src/storage/package_table.rs
+++ b/tools/aconfig/aconfig/src/storage/package_table.rs
@@ -18,14 +18,13 @@
 
 use aconfig_storage_file::{
     get_table_size, PackageTable, PackageTableHeader, PackageTableNode, StorageFileType,
-    FILE_VERSION,
 };
 
 use crate::storage::FlagPackage;
 
-fn new_header(container: &str, num_packages: u32) -> PackageTableHeader {
+fn new_header(container: &str, num_packages: u32, version: u32) -> PackageTableHeader {
     PackageTableHeader {
-        version: FILE_VERSION,
+        version,
         container: String::from(container),
         file_type: StorageFileType::PackageMap as u8,
         file_size: 0,
@@ -48,6 +47,7 @@
         let node = PackageTableNode {
             package_name: String::from(package.package_name),
             package_id: package.package_id,
+            fingerprint: package.fingerprint,
             boolean_start_index: package.boolean_start_index,
             next_offset: None,
         };
@@ -56,20 +56,26 @@
     }
 }
 
-pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result<PackageTable> {
+pub fn create_package_table(
+    container: &str,
+    packages: &[FlagPackage],
+    version: u32,
+) -> Result<PackageTable> {
     // create table
     let num_packages = packages.len() as u32;
     let num_buckets = get_table_size(num_packages)?;
-    let mut header = new_header(container, num_packages);
+    let mut header = new_header(container, num_packages, version);
     let mut buckets = vec![None; num_buckets as usize];
-    let mut node_wrappers: Vec<_> =
-        packages.iter().map(|pkg| PackageTableNodeWrapper::new(pkg, num_buckets)).collect();
+    let mut node_wrappers: Vec<_> = packages
+        .iter()
+        .map(|pkg: &FlagPackage<'_>| PackageTableNodeWrapper::new(pkg, num_buckets))
+        .collect();
 
     // initialize all header fields
     header.bucket_offset = header.into_bytes().len() as u32;
     header.node_offset = header.bucket_offset + num_buckets * 4;
     header.file_size = header.node_offset
-        + node_wrappers.iter().map(|x| x.node.into_bytes().len()).sum::<usize>() as u32;
+        + node_wrappers.iter().map(|x| x.node.into_bytes(version).len()).sum::<usize>() as u32;
 
     // sort node_wrappers by bucket index for efficiency
     node_wrappers.sort_by(|a, b| a.bucket_index.cmp(&b.bucket_index));
@@ -87,7 +93,7 @@
         if buckets[node_bucket_idx as usize].is_none() {
             buckets[node_bucket_idx as usize] = Some(offset);
         }
-        offset += node_wrappers[i].node.into_bytes().len() as u32;
+        offset += node_wrappers[i].node.into_bytes(version).len() as u32;
 
         if let Some(index) = next_node_bucket_idx {
             if index == node_bucket_idx {
@@ -106,21 +112,59 @@
 
 #[cfg(test)]
 mod tests {
+    use aconfig_storage_file::{DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION};
+
     use super::*;
     use crate::storage::{group_flags_by_package, tests::parse_all_test_flags};
 
-    pub fn create_test_package_table_from_source() -> Result<PackageTable> {
+    pub fn create_test_package_table_from_source(version: u32) -> Result<PackageTable> {
         let caches = parse_all_test_flags();
-        let packages = group_flags_by_package(caches.iter());
-        create_package_table("mockup", &packages)
+        let packages = group_flags_by_package(caches.iter(), version);
+        create_package_table("mockup", &packages, version)
     }
 
     #[test]
     // this test point locks down the table creation and each field
-    fn test_table_contents() {
-        let package_table = create_test_package_table_from_source();
-        assert!(package_table.is_ok());
-        let expected_package_table = aconfig_storage_file::test_utils::create_test_package_table();
-        assert_eq!(package_table.unwrap(), expected_package_table);
+    fn test_table_contents_default_version() {
+        let package_table_result = create_test_package_table_from_source(DEFAULT_FILE_VERSION);
+        assert!(package_table_result.is_ok());
+        let package_table = package_table_result.unwrap();
+
+        let expected_package_table =
+            aconfig_storage_file::test_utils::create_test_package_table(DEFAULT_FILE_VERSION);
+
+        assert_eq!(package_table.header, expected_package_table.header);
+        assert_eq!(package_table.buckets, expected_package_table.buckets);
+        for (node, expected_node) in
+            package_table.nodes.iter().zip(expected_package_table.nodes.iter())
+        {
+            assert_eq!(node.package_name, expected_node.package_name);
+            assert_eq!(node.package_id, expected_node.package_id);
+            assert_eq!(node.boolean_start_index, expected_node.boolean_start_index);
+            assert_eq!(node.next_offset, expected_node.next_offset);
+        }
+    }
+
+    #[test]
+    // this test point locks down the table creation and each field
+    fn test_table_contents_max_version() {
+        let package_table_result =
+            create_test_package_table_from_source(MAX_SUPPORTED_FILE_VERSION);
+        assert!(package_table_result.is_ok());
+        let package_table = package_table_result.unwrap();
+
+        let expected_package_table =
+            aconfig_storage_file::test_utils::create_test_package_table(MAX_SUPPORTED_FILE_VERSION);
+
+        assert_eq!(package_table.header, expected_package_table.header);
+        assert_eq!(package_table.buckets, expected_package_table.buckets);
+        for (node, expected_node) in
+            package_table.nodes.iter().zip(expected_package_table.nodes.iter())
+        {
+            assert_eq!(node.package_name, expected_node.package_name);
+            assert_eq!(node.package_id, expected_node.package_id);
+            assert_eq!(node.boolean_start_index, expected_node.boolean_start_index);
+            assert_eq!(node.next_offset, expected_node.next_offset);
+        }
     }
 }
diff --git a/tools/aconfig/aconfig/src/test.rs b/tools/aconfig/aconfig/src/test.rs
index 7409cda..10da252 100644
--- a/tools/aconfig/aconfig/src/test.rs
+++ b/tools/aconfig/aconfig/src/test.rs
@@ -266,6 +266,7 @@
                 reader: Box::new(include_bytes!("../tests/read_only_test.values").as_slice()),
             }],
             crate::commands::DEFAULT_FLAG_PERMISSION,
+            true,
         )
         .unwrap();
         aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
@@ -290,6 +291,26 @@
                 },
             ],
             crate::commands::DEFAULT_FLAG_PERMISSION,
+            true,
+        )
+        .unwrap();
+        aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
+    }
+
+    pub fn parse_second_package_flags() -> ProtoParsedFlags {
+        let bytes = crate::commands::parse_flags(
+            "com.android.aconfig.second_test",
+            Some("system"),
+            vec![Input {
+                source: "tests/test_second_package.aconfig".to_string(),
+                reader: Box::new(include_bytes!("../tests/test_second_package.aconfig").as_slice()),
+            }],
+            vec![Input {
+                source: "tests/third.values".to_string(),
+                reader: Box::new(include_bytes!("../tests/third.values").as_slice()),
+            }],
+            crate::commands::DEFAULT_FLAG_PERMISSION,
+            true,
         )
         .unwrap();
         aconfig_protos::parsed_flags::try_from_binary_proto(&bytes).unwrap()
diff --git a/tools/aconfig/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/aconfig/templates/FeatureFlags.java.template
index 38c8f13..d2799b2 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlags.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlags.java.template
@@ -19,4 +19,4 @@
 {{ -endif }}
     boolean {item.method_name}();
 {{ -endfor }}
-}
\ No newline at end of file
+}
diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
index cd2e3db..cb52150 100644
--- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template
@@ -1,76 +1,70 @@
 package {package_name};
 {{ -if not is_test_mode }}
+{{ -if allow_instrumentation }}
 {{ if not library_exported- }}
 // TODO(b/303773055): Remove the annotation after access issue is resolved.
 import android.compat.annotation.UnsupportedAppUsage;
 {{ -endif }}
 
 {{ -if runtime_lookup_required }}
+import android.os.Binder;
 import android.provider.DeviceConfig;
 import android.provider.DeviceConfig.Properties;
 
-
-{{ -if allow_instrumentation }}
+{{ -if not library_exported }}
 import android.aconfig.storage.StorageInternalReader;
-import android.util.Log;
-
-import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 {{ -endif }}
 
 {{ -endif }}
 /** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ -if runtime_lookup_required }}
+{{ -if not library_exported }}
+    private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage"));
+    private static volatile boolean isCached = false;
+{{ -endif }}
 {{ -for namespace_with_flags in namespace_flags }}
-    private static boolean {namespace_with_flags.namespace}_is_cached = false;
+    private static volatile boolean {namespace_with_flags.namespace}_is_cached = false;
 {{ -endfor- }}
 
 {{ for flag in flag_elements }}
-{{- if flag.is_read_write }}
+{{ -if flag.is_read_write }}
     private static boolean {flag.method_name} = {flag.default_value};
 {{ -endif }}
 {{ -endfor }}
-{{ -if allow_instrumentation }}
-    StorageInternalReader reader;
-    boolean readFromNewStorage;
 
-    private final static String TAG = "AconfigJavaCodegen";
-
-    public FeatureFlagsImpl() \{
-        File file = new File("/metadata/aconfig_test_missions/mission_1");
-        if (file.exists()) \{
-            readFromNewStorage = true;
+{{ if not library_exported }}
+    private void init() \{
+        StorageInternalReader reader = null;
+        boolean foundPackage = true;
+        try \{
             reader = new StorageInternalReader("{container}", "{package_name}");
+        } catch (Exception e) \{
+            foundPackage = false;
         }
+        {{ for namespace_with_flags in namespace_flags }}
+        {{ -for flag in namespace_with_flags.flags }}
+        {{ if flag.is_read_write }}
+            {flag.method_name} = foundPackage ? reader.getBooleanFlagValue({flag.flag_offset}) : {flag.default_value};
+        {{ endif }}
+        {{ -endfor }}
+        {{ -endfor }}
+        isCached = true;
     }
-{{ -endif }}
+{{ endif }}
+
+
 {{ for namespace_with_flags in namespace_flags }}
     private void load_overrides_{namespace_with_flags.namespace}() \{
+        final long ident = Binder.clearCallingIdentity();
         try \{
-{{ -if allow_instrumentation }}
-            boolean val;
-{{ -endif }}
             Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}");
 {{ -for flag in namespace_with_flags.flags }}
 {{ -if flag.is_read_write }}
             {flag.method_name} =
                 properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value});
-{{ -if allow_instrumentation }}
-            if (readFromNewStorage) \{
-                try \{
-                    val = reader.getBooleanFlagValue({flag.flag_offset});
-                    if (val == {flag.method_name}) \{
-                        Log.i(TAG, "success: {flag.method_name} value matches");
-                    } else \{
-                        Log.i(TAG, String.format(
-                            "error: {flag.method_name} value mismatch, new storage value is %s, old storage value is %s",
-                            val, {flag.method_name}));
-                    }
-                } catch (Exception e) \{
-                    Log.e(TAG,"error: failed to read flag value of {flag.method_name}");
-                }
-            }
-{{ -endif }}
 {{ -endif }}
 {{ -endfor }}
         } catch (NullPointerException e) \{
@@ -82,10 +76,97 @@
                 + "flag declaration.",
                 e
             );
+        } catch (SecurityException e) \{
+            // for isolated process case, skip loading flag value from the storage, use the default
+        } finally \{
+            Binder.restoreCallingIdentity(ident);
         }
         {namespace_with_flags.namespace}_is_cached = true;
     }
 {{ endfor- }}
+
+{{ -endif }}{#- end of runtime_lookup_required #}
+{{ -for flag in flag_elements }}
+    @Override
+{{ -if not library_exported }}
+    @com.android.aconfig.annotations.AconfigFlagAccessor
+    @UnsupportedAppUsage
+{{ -endif }}
+    public boolean {flag.method_name}() \{
+{{ -if not library_exported }}
+{{ -if flag.is_read_write }}
+        if (isReadFromNew) \{
+            if (!isCached) \{
+                init();
+            }
+        } else \{
+            if (!{flag.device_config_namespace}_is_cached) \{
+                load_overrides_{flag.device_config_namespace}();
+            }
+        }
+        return {flag.method_name};
+{{ -else }}
+        return {flag.default_value};
+{{ -endif }}
+{{ else }}
+        if (!{flag.device_config_namespace}_is_cached) \{
+            load_overrides_{flag.device_config_namespace}();
+        }
+        return {flag.method_name};
+{{ -endif }}
+    }
+{{ endfor }}
+}
+
+{{ else }} {#- else for allow_instrumentation is not enabled #}
+{{ if not library_exported- }}
+// TODO(b/303773055): Remove the annotation after access issue is resolved.
+import android.compat.annotation.UnsupportedAppUsage;
+{{ -endif }}
+
+{{ -if runtime_lookup_required }}
+import android.os.Binder;
+import android.provider.DeviceConfig;
+import android.provider.DeviceConfig.Properties;
+{{ -endif }}
+/** @hide */
+public final class FeatureFlagsImpl implements FeatureFlags \{
+{{ -if runtime_lookup_required }}
+{{ -for namespace_with_flags in namespace_flags }}
+    private static volatile boolean {namespace_with_flags.namespace}_is_cached = false;
+{{ -endfor- }}
+
+{{ for flag in flag_elements }}
+{{- if flag.is_read_write }}
+    private static boolean {flag.method_name} = {flag.default_value};
+{{ -endif }}
+{{ -endfor }}
+{{ for namespace_with_flags in namespace_flags }}
+    private void load_overrides_{namespace_with_flags.namespace}() \{
+        final long ident = Binder.clearCallingIdentity();
+        try \{
+            Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}");
+{{ -for flag in namespace_with_flags.flags }}
+{{ -if flag.is_read_write }}
+            {flag.method_name} =
+                properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value});
+{{ -endif }}
+{{ -endfor }}
+        } catch (NullPointerException e) \{
+            throw new RuntimeException(
+                "Cannot read value from namespace {namespace_with_flags.namespace} "
+                + "from DeviceConfig. It could be that the code using flag "
+                + "executed before SettingsProvider initialization. Please use "
+                + "fixed read-only flag by adding is_fixed_read_only: true in "
+                + "flag declaration.",
+                e
+            );
+        } finally \{
+            Binder.restoreCallingIdentity(ident);
+        }
+        {namespace_with_flags.namespace}_is_cached = true;
+}
+{{ endfor- }}
 {{ -endif }}{#- end of runtime_lookup_required #}
 {{ -for flag in flag_elements }}
     @Override
@@ -105,8 +186,8 @@
     }
 {{ endfor }}
 }
-{{ else }}
-{#- Generate only stub if in test mode #}
+{{ endif}} {#- endif for allow_instrumentation #}
+{{ else }} {#- Generate only stub if in test mode #}
 /** @hide */
 public final class FeatureFlagsImpl implements FeatureFlags \{
 {{ for flag in flag_elements }}
diff --git a/tools/aconfig/aconfig/templates/cpp_exported_header.template b/tools/aconfig/aconfig/templates/cpp_exported_header.template
index 0f7853e..4643c97 100644
--- a/tools/aconfig/aconfig/templates/cpp_exported_header.template
+++ b/tools/aconfig/aconfig/templates/cpp_exported_header.template
@@ -27,12 +27,13 @@
     {{ -for item in class_elements}}
     virtual bool {item.flag_name}() = 0;
 
-    {{ -if is_test_mode }}
-    virtual void {item.flag_name}(bool val) = 0;
-    {{ -endif }}
     {{ -endfor }}
 
     {{ -if is_test_mode }}
+    {{ -for item in class_elements}}
+    virtual void {item.flag_name}(bool val) = 0;
+    {{ -endfor }}
+
     virtual void reset_flags() \{}
     {{ -endif }}
 };
diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template
index 38dda7d..9be59e0 100644
--- a/tools/aconfig/aconfig/templates/cpp_source_file.template
+++ b/tools/aconfig/aconfig/templates/cpp_source_file.template
@@ -1,13 +1,13 @@
 #include "{header}.h"
 
 {{ if allow_instrumentation }}
-#include <sys/stat.h>
+{{ if readwrite- }}
+#include <unistd.h>
 #include "aconfig_storage/aconfig_storage_read_api.hpp"
 #include <android/log.h>
-
-#define ALOGI(msg, ...)                                                        \
-  __android_log_print(ANDROID_LOG_INFO, "AconfigTestMission1", (msg), __VA_ARGS__)
-
+#define LOG_TAG "aconfig_cpp_codegen"
+#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
+{{ -endif }}
 {{ endif }}
 
 {{ if readwrite- }}
@@ -66,15 +66,87 @@
     class flag_provider : public flag_provider_interface \{
     public:
 
-        {{ -for item in class_elements }}
+        {{ if allow_instrumentation- }}
+        {{ if readwrite- }}
+        flag_provider()
+            {{ if readwrite- }}
+            : cache_({readwrite_count}, -1)
+            , boolean_start_index_()
+            {{ -else- }}
+            : boolean_start_index_()
+            {{ -endif }}
+            , flag_value_file_(nullptr)
+            , package_exists_in_storage_(true) \{
 
+            auto package_map_file = aconfig_storage::get_mapped_file(
+                 "{container}",
+                 aconfig_storage::StorageFileType::package_map);
+            if (!package_map_file.ok()) \{
+                ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str());
+                package_exists_in_storage_ = false;
+                return;
+            }
+
+            auto context = aconfig_storage::get_package_read_context(
+                **package_map_file, "{package}");
+            if (!context.ok()) \{
+                ALOGE("error: failed to get package read context: %s", context.error().c_str());
+                package_exists_in_storage_ = false;
+                return;
+            }
+
+            if (!(context->package_exists)) \{
+                package_exists_in_storage_ = false;
+                return;
+            }
+
+            // cache package boolean flag start index
+            boolean_start_index_ = context->boolean_start_index;
+
+            // unmap package map file and free memory
+            delete *package_map_file;
+
+            auto flag_value_file = aconfig_storage::get_mapped_file(
+                "{container}",
+                aconfig_storage::StorageFileType::flag_val);
+            if (!flag_value_file.ok()) \{
+                ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str());
+                package_exists_in_storage_ = false;
+                return;
+            }
+
+            // cache flag value file
+            flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>(
+                *flag_value_file);
+
+        }
+        {{ -endif }}
+        {{ -endif }}
+
+        {{ -for item in class_elements }}
         virtual bool {item.flag_name}() override \{
             {{ -if item.readwrite }}
             if (cache_[{item.readwrite_idx}] == -1) \{
+            {{ if allow_instrumentation- }}
+                if (!package_exists_in_storage_) \{
+                    return {item.default_value};
+                }
+
+                auto value = aconfig_storage::get_boolean_flag_value(
+                    *flag_value_file_,
+                    boolean_start_index_ + {item.flag_offset});
+
+                if (!value.ok()) \{
+                    ALOGE("error: failed to read flag value: %s", value.error().c_str());
+                }
+
+                cache_[{item.readwrite_idx}] = *value;
+            {{ -else- }}
                 cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag(
                     "aconfig_flags.{item.device_config_namespace}",
                     "{item.device_config_flag}",
                     "{item.default_value}") == "true";
+            {{ -endif }}
             }
             return cache_[{item.readwrite_idx}];
             {{ -else }}
@@ -86,12 +158,20 @@
             {{ -endif }}
         }
         {{ -endfor }}
+
     {{ if readwrite- }}
     private:
         std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1);
-    {{ -endif }}
-    };
+    {{ if allow_instrumentation- }}
+        uint32_t boolean_start_index_;
 
+        std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_;
+
+        bool package_exists_in_storage_;
+    {{ -endif }}
+    {{ -endif }}
+
+    };
 
 {{ -endif }}
 
@@ -107,62 +187,6 @@
     {{ -if item.readwrite }}
     return {cpp_namespace}::{item.flag_name}();
     {{ -else }}
-    {{ if allow_instrumentation }}
-    auto result =
-        {{ if item.is_fixed_read_only }}
-	    {package_macro}_{item.flag_macro}
-	{{ else }}
-	    {item.default_value}
-	{{ endif }};
-
-    struct stat buffer;
-    if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) != 0) \{
-        return result;
-    }
-
-    auto package_map_file = aconfig_storage::get_mapped_file(
-        "{item.container}",
-        aconfig_storage::StorageFileType::package_map);
-    if (!package_map_file.ok()) \{
-        ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    auto package_read_context = aconfig_storage::get_package_read_context(
-        **package_map_file, "{package}");
-    if (!package_read_context.ok()) \{
-        ALOGI("error: failed to get package read context: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    delete *package_map_file;
-
-    auto flag_val_map = aconfig_storage::get_mapped_file(
-        "{item.container}",
-        aconfig_storage::StorageFileType::flag_val);
-    if (!flag_val_map.ok()) \{
-        ALOGI("error: failed to get flag val map: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    auto value = aconfig_storage::get_boolean_flag_value(
-        **flag_val_map,
-        package_read_context->boolean_start_index + {item.flag_offset});
-    if (!value.ok()) \{
-        ALOGI("error: failed to get flag val: %s", package_map_file.error().c_str());
-        return result;
-    }
-
-    delete *flag_val_map;
-
-    if (*value != result) \{
-        ALOGI("error: new storage value '%d' does not match current value '%d'", *value, result);
-    } else \{
-        ALOGI("success: new storage value was '%d, legacy storage was '%d'", *value, result);
-    }
-
-    return result;
-    {{ else }}
     {{ -if item.is_fixed_read_only }}
     return {package_macro}_{item.flag_macro};
     {{ -else }}
@@ -170,7 +194,6 @@
     {{ -endif }}
     {{ -endif }}
     {{ -endif }}
-    {{ -endif }}
 }
 
 {{ -if is_test_mode }}
@@ -185,5 +208,3 @@
      {cpp_namespace}::reset_flags();
 }
 {{ -endif }}
-
-
diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template
index cfd9d6a..e9e1032 100644
--- a/tools/aconfig/aconfig/templates/rust.template
+++ b/tools/aconfig/aconfig/templates/rust.template
@@ -2,88 +2,77 @@
 use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context};
 use std::path::Path;
 use std::io::Write;
+use std::sync::LazyLock;
 use log::\{log, LevelFilter, Level};
 
-static STORAGE_MIGRATION_MARKER_FILE: &str =
-    "/metadata/aconfig_test_missions/mission_1";
-static MIGRATION_LOG_TAG: &str = "AconfigTestMission1";
-
 /// flag provider
 pub struct FlagProvider;
 
 {{ if has_readwrite- }}
-lazy_static::lazy_static! \{
-    {{ if allow_instrumentation }}
-    static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe \{
-        get_mapped_storage_file("{container}", StorageFileType::PackageMap)
-        .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
-        .map(|context| context.map(|c| c.boolean_start_index))
-    };
+{{ if allow_instrumentation }}
+static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::PackageMap)
+    .and_then(|package_map| get_package_read_context(&package_map, "{package}"))
+    .map(|context| context.map(|c| c.boolean_start_index))
+});
 
-    static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe \{
-        get_mapped_storage_file("{container}", StorageFileType::FlagVal)
-    };
-    {{ -endif }}
-
+static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{
+    get_mapped_storage_file("{container}", StorageFileType::FlagVal)
+});
+{{ -endif }}
 {{ -for flag in template_flags }}
-    {{ -if flag.readwrite }}
-    /// flag value cache for {flag.name}
-    {{ if allow_instrumentation }}
-    static ref CACHED_{flag.name}: bool = \{
-        let result = flags_rust::GetServerConfigurableFlag(
-            "aconfig_flags.{flag.device_config_namespace}",
-            "{flag.device_config_flag}",
-            "{flag.default_value}") == "true";
 
-        if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
-            // This will be called multiple times. Subsequent calls after the first are noops.
-            logger::init(
-                logger::Config::default()
-                    .with_tag_on_device(MIGRATION_LOG_TAG)
-                    .with_max_level(LevelFilter::Info));
+{{ -if flag.readwrite }}
+/// flag value cache for {flag.name}
+{{ if allow_instrumentation }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{
 
-            let aconfig_storage_result = FLAG_VAL_MAP
+    // This will be called multiple times. Subsequent calls after the first are noops.
+    logger::init(
+        logger::Config::default()
+            .with_tag_on_device("aconfig_rust_codegen")
+            .with_max_level(LevelFilter::Info));
+
+    let flag_value_result = FLAG_VAL_MAP
+        .as_ref()
+        .map_err(|err| format!("failed to get flag val map: \{err}"))
+        .and_then(|flag_val_map| \{
+            PACKAGE_OFFSET
                 .as_ref()
-                .map_err(|err| format!("failed to get flag val map: \{err}"))
-                .and_then(|flag_val_map| \{
-                    PACKAGE_OFFSET
-                        .as_ref()
-                        .map_err(|err| format!("failed to get package read offset: \{err}"))
-                        .and_then(|package_offset| \{
-                            match package_offset \{
-                                Some(offset) => \{
-                                    get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset})
-                                        .map_err(|err| format!("failed to get flag: \{err}"))
-                                },
-                                None => Err("no context found for package '{package}'".to_string())
-                            }
-                        })
-                    });
+                .map_err(|err| format!("failed to get package read offset: \{err}"))
+                .and_then(|package_offset| \{
+                    match package_offset \{
+                        Some(offset) => \{
+                            get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset})
+                                .map_err(|err| format!("failed to get flag: \{err}"))
+                        },
+                        None => \{
+                            log!(Level::Error, "no context found for package {package}");
+                            Err(format!("failed to flag package {package}"))
+                        }
+                    }
+                })
+            });
 
-            match aconfig_storage_result \{
-                Ok(storage_result) if storage_result == result => \{
-                    log!(Level::Info, "AconfigTestMission1: success! flag '{flag.name}' contained correct value. Legacy storage was \{result}, new storage was \{storage_result}");
-                },
-                Ok(storage_result) => \{
-                    log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}");
-                },
-                Err(err) => \{
-                    log!(Level::Error, "AconfigTestMission1: error: \{err}")
-                }
-            }
+    match flag_value_result \{
+        Ok(flag_value) => \{
+            return flag_value;
+        },
+        Err(err) => \{
+            log!(Level::Error, "aconfig_rust_codegen: error: \{err}");
+            return {flag.default_value};
         }
+    }
 
-        result
-        };
-    {{ else }}
-    static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag(
-        "aconfig_flags.{flag.device_config_namespace}",
-        "{flag.device_config_flag}",
-        "{flag.default_value}") == "true";
-    {{ endif }}
-    {{ -endif }}
+});
+{{ else }}
+static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag(
+    "aconfig_flags.{flag.device_config_namespace}",
+    "{flag.device_config_flag}",
+    "{flag.default_value}") == "true");
+{{ endif }}
+{{ -endif }}
 {{ -endfor }}
-}
 {{ -endif }}
 
 impl FlagProvider \{
@@ -107,73 +96,11 @@
 {{ for flag in template_flags }}
 /// query flag {flag.name}
 #[inline(always)]
-{{ -if flag.readwrite }}
 pub fn {flag.name}() -> bool \{
+{{ -if flag.readwrite }}
     PROVIDER.{flag.name}()
 {{ -else }}
-pub fn {flag.name}() -> bool \{
-    {{ if not allow_instrumentation }}
     {flag.default_value}
-    {{ else }}
-
-    let result = {flag.default_value};
-    if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{
-        return result;
-    }
-
-    // This will be called multiple times. Subsequent calls after the first
-    // are noops.
-    logger::init(
-        logger::Config::default()
-            .with_tag_on_device(MIGRATION_LOG_TAG)
-            .with_max_level(LevelFilter::Info),
-    );
-
-    unsafe \{
-        let package_map = match get_mapped_storage_file("{flag.container}", StorageFileType::PackageMap) \{
-            Ok(file) => file,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-
-        let package_read_context = match get_package_read_context(&package_map, "{package}") \{
-            Ok(Some(context)) => context,
-            Ok(None) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': did not get context");
-                return result;
-            },
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-        let flag_val_map = match get_mapped_storage_file("{flag.container}", StorageFileType::FlagVal) \{
-            Ok(val_map) => val_map,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-        let value = match get_boolean_flag_value(&flag_val_map, {flag.flag_offset} + package_read_context.boolean_start_index) \{
-            Ok(val) => val,
-            Err(err) => \{
-                log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}");
-                return result;
-            }
-        };
-
-        if result != value \{
-            log!(Level::Error, "AconfigTestMission1: error: flag mismatch for '{flag.name}'. Legacy storage was \{result}, new storage was \{value}");
-        } else \{
-            let default_value = {flag.default_value};
-            log!(Level::Info, "AconfigTestMission1: success! flag '{flag.name}' contained correct value. Legacy storage was \{default_value}, new storage was \{value}");
-        }
-    }
-
-    result
-    {{ endif }}
 {{ -endif }}
 }
 {{ endfor }}
diff --git a/tools/aconfig/aconfig/tests/test_second_package.aconfig b/tools/aconfig/aconfig/tests/test_second_package.aconfig
new file mode 100644
index 0000000..188bc96
--- /dev/null
+++ b/tools/aconfig/aconfig/tests/test_second_package.aconfig
@@ -0,0 +1,10 @@
+package: "com.android.aconfig.second_test"
+container: "system"
+
+flag {
+    name: "testing_flag"
+    namespace: "another_namespace"
+    description: "This is a flag for testing."
+    bug: "123"
+}
+
diff --git a/tools/aconfig/aconfig/tests/third.values b/tools/aconfig/aconfig/tests/third.values
new file mode 100644
index 0000000..675832a
--- /dev/null
+++ b/tools/aconfig/aconfig/tests/third.values
@@ -0,0 +1,6 @@
+flag_value {
+    package: "com.android.aconfig.second_test"
+    name: "testing_flag"
+    state: DISABLED
+    permission: READ_WRITE
+}
diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp
index 2d943de..bdf96ed 100644
--- a/tools/aconfig/aconfig_device_paths/Android.bp
+++ b/tools/aconfig/aconfig_device_paths/Android.bp
@@ -39,8 +39,8 @@
 
 genrule {
     name: "libaconfig_java_device_paths_src",
-    srcs: ["src/DevicePathsTemplate.java"],
-    out: ["DevicePaths.java"],
+    srcs: ["src/DeviceProtosTemplate.java"],
+    out: ["DeviceProtos.java"],
     tool_files: ["partition_aconfig_flags_paths.txt"],
     cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)",
 }
@@ -48,5 +48,28 @@
 java_library {
     name: "aconfig_device_paths_java",
     srcs: [":libaconfig_java_device_paths_src"],
-    sdk_version: "core_current",
+    static_libs: [
+        "libaconfig_java_proto_nano",
+    ],
+    sdk_version: "core_platform",
+    apex_available: [
+        "//apex_available:platform",
+    ],
+}
+
+genrule {
+    name: "libaconfig_java_host_device_paths_src",
+    srcs: ["src/HostDeviceProtosTemplate.java"],
+    out: ["HostDeviceProtos.java"],
+    tool_files: [
+        "partition_aconfig_flags_paths.txt",
+        "mainline_aconfig_flags_paths.txt",
+    ],
+    cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out).tmp && " +
+    "sed -e '/MAINLINE_T/{r$(location mainline_aconfig_flags_paths.txt)' -e 'd}' $(out).tmp > $(out)",
+}
+
+java_library_host {
+    name: "aconfig_host_device_paths_java",
+    srcs: [":libaconfig_java_host_device_paths_src"],
 }
diff --git a/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt
new file mode 100644
index 0000000..af73a84
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt
@@ -0,0 +1,20 @@
+"/apex/com.android.adservices/etc/aconfig_flags.pb",
+"/apex/com.android.appsearch/etc/aconfig_flags.pb",
+"/apex/com.android.art/etc/aconfig_flags.pb",
+"/apex/com.android.btservices/etc/aconfig_flags.pb",
+"/apex/com.android.cellbroadcast/etc/aconfig_flags.pb",
+"/apex/com.android.configinfrastructure/etc/aconfig_flags.pb",
+"/apex/com.android.conscrypt/etc/aconfig_flags.pb",
+"/apex/com.android.devicelock/etc/aconfig_flags.pb",
+"/apex/com.android.healthfitness/etc/aconfig_flags.pb",
+"/apex/com.android.ipsec/etc/aconfig_flags.pb",
+"/apex/com.android.media/etc/aconfig_flags.pb",
+"/apex/com.android.mediaprovider/etc/aconfig_flags.pb",
+"/apex/com.android.ondevicepersonalization/etc/aconfig_flags.pb",
+"/apex/com.android.os.statsd/etc/aconfig_flags.pb",
+"/apex/com.android.permission/etc/aconfig_flags.pb",
+"/apex/com.android.profiling/etc/aconfig_flags.pb",
+"/apex/com.android.tethering/etc/aconfig_flags.pb",
+"/apex/com.android.uwb/etc/aconfig_flags.pb",
+"/apex/com.android.virt/etc/aconfig_flags.pb",
+"/apex/com.android.wifi/etc/aconfig_flags.pb",
diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
index 140cd21..e997e3d 100644
--- a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
+++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt
@@ -1,4 +1,3 @@
 "/system/etc/aconfig_flags.pb",
-"/system_ext/etc/aconfig_flags.pb",
 "/product/etc/aconfig_flags.pb",
 "/vendor/etc/aconfig_flags.pb",
diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
similarity index 62%
rename from tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
rename to tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
index 16355a3..4d41199 100644
--- a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java
+++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java
@@ -15,7 +15,12 @@
  */
 package android.aconfig;
 
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.nano.Aconfig.parsed_flags;
+
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -23,14 +28,38 @@
 /**
  * @hide
  */
-public class DevicePaths {
-    static final String[] PATHS = {
+public class DeviceProtos {
+	public static final String[] PATHS = {
         TEMPLATE
     };
 
     private static final String APEX_DIR = "/apex";
     private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
 
+    /**
+     * Returns a list of all on-device aconfig protos.
+     *
+     * May throw an exception if the protos can't be read at the call site. For
+     * example, some of the protos are in the apex/ partition, which is mounted
+     * somewhat late in the boot process.
+     *
+     * @throws IOException if we can't read one of the protos yet
+     * @return a list of all on-device aconfig protos
+     */
+    public static List<parsed_flag> loadAndParseFlagProtos() throws IOException {
+        ArrayList<parsed_flag> result = new ArrayList();
+
+        for (String path : parsedFlagsProtoPaths()) {
+            try (FileInputStream inputStream = new FileInputStream(path)) {
+                parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
+                for (parsed_flag flag : parsedFlags.parsedFlag) {
+                    result.add(flag);
+                }
+            }
+        }
+
+        return result;
+    }
 
     /**
      * Returns the list of all on-device aconfig protos paths.
diff --git a/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java
new file mode 100644
index 0000000..e7d0a76
--- /dev/null
+++ b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.aconfig;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * A host lib that can read all aconfig proto file paths on a given device.
+ * This lib is only available on device with root access (userdebug/eng).
+ */
+public class HostDeviceProtos {
+    /**
+     * An interface that executes ADB command and return the result.
+     */
+    public static interface AdbCommandExecutor {
+        /** Executes the ADB command. */
+        String executeAdbCommand(String command);
+    }
+
+    static final String[] PATHS = {
+        TEMPLATE
+    };
+
+    static final String[] MAINLINE_PATHS = {
+        MAINLINE_T
+    };
+
+    private static final String APEX_DIR = "/apex";
+    private static final String RECURSIVELY_LIST_APEX_DIR_COMMAND =
+        "shell su 0 find /apex | grep aconfig_flags";
+    private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb";
+
+
+    /**
+     * Returns the list of all on-device aconfig proto paths from host side.
+     */
+    public static List<String> parsedFlagsProtoPaths(AdbCommandExecutor adbCommandExecutor) {
+        ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
+
+        String adbCommandOutput = adbCommandExecutor.executeAdbCommand(
+            RECURSIVELY_LIST_APEX_DIR_COMMAND);
+
+        if (adbCommandOutput == null || adbCommandOutput.isEmpty()) {
+            paths.addAll(Arrays.asList(MAINLINE_PATHS));
+            return paths;
+        }
+
+        Set<String> allFiles = new HashSet<>(Arrays.asList(adbCommandOutput.split("\n")));
+
+        Set<String> subdirs = allFiles.stream().map(file -> {
+            String[] filePaths = file.split("/");
+            // The first element is "", the second element is "apex".
+            return filePaths.length > 2 ? filePaths[2] : "";
+        }).collect(Collectors.toSet());
+
+        for (String prefix : subdirs) {
+            // For each mainline modules, there are two directories, one <modulepackage>/,
+            // and one <modulepackage>@<versioncode>/. Just read the former.
+            if (prefix.contains("@")) {
+                continue;
+            }
+
+            String protoPath = APEX_DIR + "/" + prefix + APEX_ACONFIG_PATH_SUFFIX;
+            if (allFiles.contains(protoPath)) {
+                paths.add(protoPath);
+            }
+        }
+        return paths;
+    }
+}
diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs
index 9ab9cea..8871b4f 100644
--- a/tools/aconfig/aconfig_device_paths/src/lib.rs
+++ b/tools/aconfig/aconfig_device_paths/src/lib.rs
@@ -62,13 +62,12 @@
 
     #[test]
     fn test_read_partition_paths() {
-        assert_eq!(read_partition_paths().len(), 4);
+        assert_eq!(read_partition_paths().len(), 3);
 
         assert_eq!(
             read_partition_paths(),
             vec![
                 PathBuf::from("/system/etc/aconfig_flags.pb"),
-                PathBuf::from("/system_ext/etc/aconfig_flags.pb"),
                 PathBuf::from("/product/etc/aconfig_flags.pb"),
                 PathBuf::from("/vendor/etc/aconfig_flags.pb")
             ]
diff --git a/tools/aconfig/aconfig_flags/Android.bp b/tools/aconfig/aconfig_flags/Android.bp
new file mode 100644
index 0000000..4c1fd4e
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/Android.bp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+rust_library {
+    name: "libaconfig_flags",
+    crate_name: "aconfig_flags",
+    srcs: [
+        "src/lib.rs",
+    ],
+    rustlibs: [
+        "libaconfig_flags_rust",
+    ],
+    host_supported: true,
+}
+
+aconfig_declarations {
+    name: "aconfig_flags",
+    package: "com.android.aconfig.flags",
+    container: "system",
+    srcs: ["flags.aconfig"],
+}
+
+rust_aconfig_library {
+    name: "libaconfig_flags_rust",
+    crate_name: "aconfig_flags_rust",
+    aconfig_declarations: "aconfig_flags",
+    host_supported: true,
+}
+
+cc_aconfig_library {
+    name: "libaconfig_flags_cc",
+    aconfig_declarations: "aconfig_flags",
+}
+
+java_aconfig_library {
+    name: "aconfig_flags_java",
+    aconfig_declarations: "aconfig_flags",
+}
diff --git a/tools/aconfig/aconfig_flags/Cargo.toml b/tools/aconfig/aconfig_flags/Cargo.toml
new file mode 100644
index 0000000..6eb9f14
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+name = "aconfig_flags"
+version = "0.1.0"
+edition = "2021"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_flags/flags.aconfig b/tools/aconfig/aconfig_flags/flags.aconfig
new file mode 100644
index 0000000..b66d282
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/flags.aconfig
@@ -0,0 +1,24 @@
+package: "com.android.aconfig.flags"
+container: "system"
+
+flag {
+  name: "enable_only_new_storage"
+  namespace: "core_experiments_team_internal"
+  bug: "312235596"
+  description: "When enabled, aconfig flags are read from the new aconfig storage only."
+}
+
+flag {
+  name: "enable_aconfigd_from_mainline"
+  namespace: "core_experiments_team_internal"
+  bug: "369808805"
+  description: "When enabled, launch aconfigd from config infra module."
+}
+
+flag {
+  name: "enable_system_aconfigd_rust"
+  namespace: "core_experiments_team_internal"
+  bug: "378079539"
+  description: "When enabled, the aconfigd cc_binary target becomes a no-op, and the rust_binary aconfigd-system target starts up."
+  is_fixed_read_only: true
+}
diff --git a/tools/aconfig/aconfig_flags/src/lib.rs b/tools/aconfig/aconfig_flags/src/lib.rs
new file mode 100644
index 0000000..b413c62
--- /dev/null
+++ b/tools/aconfig/aconfig_flags/src/lib.rs
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! `aconfig_flags` is a crate for reading aconfig flags from Rust
+// When building with the Android tool-chain
+//
+//   - the flag functions will read from aconfig_flags_inner
+//   - the feature "cargo" will be disabled
+//
+// When building with cargo
+//
+//   - the flag functions will all return some trivial value, like true
+//   - the feature "cargo" will be enabled
+//
+// This module hides these differences from the rest of aconfig.
+
+/// Module used when building with the Android tool-chain
+#[cfg(not(feature = "cargo"))]
+pub mod auto_generated {
+    /// Returns the value for the enable_only_new_storage flag.
+    pub fn enable_only_new_storage() -> bool {
+        aconfig_flags_rust::enable_only_new_storage()
+    }
+
+    /// Returns the value for the enable_aconfigd_from_mainline flag.
+    pub fn enable_aconfigd_from_mainline() -> bool {
+        aconfig_flags_rust::enable_only_new_storage()
+    }
+
+    /// Returns the value for the enable_system_aconfigd_rust flag.
+    pub fn enable_system_aconfigd_rust() -> bool {
+        aconfig_flags_rust::enable_system_aconfigd_rust()
+    }
+}
+
+/// Module used when building with cargo
+#[cfg(feature = "cargo")]
+pub mod auto_generated {
+    /// Returns a placeholder value for the enable_only_new_storage flag.
+    pub fn enable_only_new_storage() -> bool {
+        // Used only to enable typechecking and testing with cargo
+        true
+    }
+
+    /// Returns a placeholder value for the enable_aconfigd_from_mainline flag.
+    pub fn enable_aconfigd_from_mainline() -> bool {
+        // Used only to enable typechecking and testing with cargo
+        true
+    }
+
+    /// Returns a placeholder value for the enable_system_aconfigd_rust flag.
+    pub fn enable_system_aconfigd_rust() -> bool {
+        // Used only to enable typechecking and testing with cargo
+        true
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp
index 3859194..e875c7b 100644
--- a/tools/aconfig/aconfig_storage_file/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/Android.bp
@@ -14,6 +14,7 @@
         "libclap",
         "libcxx",
         "libaconfig_storage_protos",
+        "libserde",
     ],
 }
 
@@ -36,7 +37,10 @@
     name: "aconfig-storage",
     defaults: ["aconfig_storage_file.defaults"],
     srcs: ["src/main.rs"],
-    rustlibs: ["libaconfig_storage_file"],
+    rustlibs: [
+        "libaconfig_storage_file",
+        "libserde_json",
+    ],
 }
 
 rust_test_host {
@@ -138,11 +142,28 @@
     double_loadable: true,
 }
 
-// storage file parse api java cc_library
+// storage file parse api java library
 java_library {
     name: "aconfig_storage_file_java",
     srcs: [
         "srcs/**/*.java",
     ],
     sdk_version: "core_current",
-}
\ No newline at end of file
+    min_sdk_version: "29",
+    host_supported: true,
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+}
+
+// storage file parse api java library for core library
+java_library {
+    name: "aconfig_storage_file_java_none",
+    srcs: [
+        "srcs/**/*.java",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+}
diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml
index 192dfad..a405578 100644
--- a/tools/aconfig/aconfig_storage_file/Cargo.toml
+++ b/tools/aconfig/aconfig_storage_file/Cargo.toml
@@ -14,6 +14,8 @@
 thiserror = "1.0.56"
 clap = { version = "4.1.8", features = ["derive"] }
 cxx = "1.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
 
 [[bin]]
 name = "aconfig-storage"
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_info.rs b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
index beac38d..cf16834 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_info.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_info.rs
@@ -20,10 +20,11 @@
 use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag info header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagInfoHeader {
     pub version: u32,
     pub container: String,
@@ -89,7 +90,7 @@
 }
 
 /// bit field for flag info
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
 pub enum FlagInfoBit {
     HasServerOverride = 1 << 0,
     IsReadWrite = 1 << 1,
@@ -97,7 +98,7 @@
 }
 
 /// Flag info node struct
-#[derive(PartialEq, Clone)]
+#[derive(PartialEq, Clone, Serialize, Deserialize)]
 pub struct FlagInfoNode {
     pub attributes: u8,
 }
@@ -138,7 +139,7 @@
 }
 
 /// Flag info list struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagInfoList {
     pub header: FlagInfoHeader,
     pub nodes: Vec<FlagInfoNode>,
@@ -193,12 +194,15 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::test_utils::create_test_flag_info_list;
+    use crate::{
+        test_utils::create_test_flag_info_list, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION,
+    };
 
-    #[test]
     // this test point locks down the value list serialization
-    fn test_serialization() {
-        let flag_info_list = create_test_flag_info_list();
+    // TODO: b/376108268 - Use parameterized tests.
+    #[test]
+    fn test_serialization_default() {
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
 
         let header: &FlagInfoHeader = &flag_info_list.header;
         let reinterpreted_header = FlagInfoHeader::from_bytes(&header.into_bytes());
@@ -219,20 +223,42 @@
     }
 
     #[test]
-    // this test point locks down that version number should be at the top of serialized
-    // bytes
-    fn test_version_number() {
-        let flag_info_list = create_test_flag_info_list();
-        let bytes = &flag_info_list.into_bytes();
-        let mut head = 0;
-        let version = read_u32_from_bytes(bytes, &mut head).unwrap();
-        assert_eq!(version, 1);
+    fn test_serialization_max() {
+        let flag_info_list = create_test_flag_info_list(MAX_SUPPORTED_FILE_VERSION);
+
+        let header: &FlagInfoHeader = &flag_info_list.header;
+        let reinterpreted_header = FlagInfoHeader::from_bytes(&header.into_bytes());
+        assert!(reinterpreted_header.is_ok());
+        assert_eq!(header, &reinterpreted_header.unwrap());
+
+        let nodes: &Vec<FlagInfoNode> = &flag_info_list.nodes;
+        for node in nodes.iter() {
+            let reinterpreted_node = FlagInfoNode::from_bytes(&node.into_bytes()).unwrap();
+            assert_eq!(node, &reinterpreted_node);
+        }
+
+        let flag_info_bytes = flag_info_list.into_bytes();
+        let reinterpreted_info_list = FlagInfoList::from_bytes(&flag_info_bytes);
+        assert!(reinterpreted_info_list.is_ok());
+        assert_eq!(&flag_info_list, &reinterpreted_info_list.unwrap());
+        assert_eq!(flag_info_bytes.len() as u32, header.file_size);
     }
 
+    // this test point locks down that version number should be at the top of serialized
+    // bytes
     #[test]
+    fn test_version_number() {
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
+        let bytes = &flag_info_list.into_bytes();
+        let mut head = 0;
+        let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap();
+        assert_eq!(version_from_file, DEFAULT_FILE_VERSION);
+    }
+
     // this test point locks down file type check
+    #[test]
     fn test_file_type_check() {
-        let mut flag_info_list = create_test_flag_info_list();
+        let mut flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
         flag_info_list.header.file_type = 123u8;
         let error = FlagInfoList::from_bytes(&flag_info_list.into_bytes()).unwrap_err();
         assert_eq!(
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
index 64b90ea..6fbee02 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs
@@ -23,10 +23,11 @@
 };
 use crate::{AconfigStorageError, StorageFileType, StoredFlagType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag table header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagTableHeader {
     pub version: u32,
     pub container: String,
@@ -95,7 +96,7 @@
 }
 
 /// Flag table node struct
-#[derive(PartialEq, Clone)]
+#[derive(PartialEq, Clone, Serialize, Deserialize)]
 pub struct FlagTableNode {
     pub package_id: u32,
     pub flag_name: String,
@@ -150,11 +151,11 @@
     /// Calculate node bucket index
     pub fn find_bucket_index(package_id: u32, flag_name: &str, num_buckets: u32) -> u32 {
         let full_flag_name = package_id.to_string() + "/" + flag_name;
-        get_bucket_index(&full_flag_name, num_buckets)
+        get_bucket_index(full_flag_name.as_bytes(), num_buckets)
     }
 }
 
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagTable {
     pub header: FlagTableHeader,
     pub buckets: Vec<Option<u32>>,
@@ -219,12 +220,15 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::test_utils::create_test_flag_table;
+    use crate::{
+        test_utils::create_test_flag_table, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION,
+    };
 
-    #[test]
     // this test point locks down the table serialization
-    fn test_serialization() {
-        let flag_table = create_test_flag_table();
+    // TODO: b/376108268 - Use parameterized tests.
+    #[test]
+    fn test_serialization_default() {
+        let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION);
 
         let header: &FlagTableHeader = &flag_table.header;
         let reinterpreted_header = FlagTableHeader::from_bytes(&header.into_bytes());
@@ -245,20 +249,42 @@
     }
 
     #[test]
-    // this test point locks down that version number should be at the top of serialized
-    // bytes
-    fn test_version_number() {
-        let flag_table = create_test_flag_table();
-        let bytes = &flag_table.into_bytes();
-        let mut head = 0;
-        let version = read_u32_from_bytes(bytes, &mut head).unwrap();
-        assert_eq!(version, 1);
+    fn test_serialization_max() {
+        let flag_table = create_test_flag_table(MAX_SUPPORTED_FILE_VERSION);
+
+        let header: &FlagTableHeader = &flag_table.header;
+        let reinterpreted_header = FlagTableHeader::from_bytes(&header.into_bytes());
+        assert!(reinterpreted_header.is_ok());
+        assert_eq!(header, &reinterpreted_header.unwrap());
+
+        let nodes: &Vec<FlagTableNode> = &flag_table.nodes;
+        for node in nodes.iter() {
+            let reinterpreted_node = FlagTableNode::from_bytes(&node.into_bytes()).unwrap();
+            assert_eq!(node, &reinterpreted_node);
+        }
+
+        let flag_table_bytes = flag_table.into_bytes();
+        let reinterpreted_table = FlagTable::from_bytes(&flag_table_bytes);
+        assert!(reinterpreted_table.is_ok());
+        assert_eq!(&flag_table, &reinterpreted_table.unwrap());
+        assert_eq!(flag_table_bytes.len() as u32, header.file_size);
     }
 
+    // this test point locks down that version number should be at the top of serialized
+    // bytes
     #[test]
+    fn test_version_number() {
+        let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION);
+        let bytes = &flag_table.into_bytes();
+        let mut head = 0;
+        let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap();
+        assert_eq!(version_from_file, DEFAULT_FILE_VERSION);
+    }
+
     // this test point locks down file type check
+    #[test]
     fn test_file_type_check() {
-        let mut flag_table = create_test_flag_table();
+        let mut flag_table = create_test_flag_table(DEFAULT_FILE_VERSION);
         flag_table.header.file_type = 123u8;
         let error = FlagTable::from_bytes(&flag_table.into_bytes()).unwrap_err();
         assert_eq!(
diff --git a/tools/aconfig/aconfig_storage_file/src/flag_value.rs b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
index 506924b..9a14bec 100644
--- a/tools/aconfig/aconfig_storage_file/src/flag_value.rs
+++ b/tools/aconfig/aconfig_storage_file/src/flag_value.rs
@@ -20,10 +20,11 @@
 use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Flag value header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagValueHeader {
     pub version: u32,
     pub container: String,
@@ -89,7 +90,7 @@
 }
 
 /// Flag value list struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct FlagValueList {
     pub header: FlagValueHeader,
     pub booleans: Vec<bool>,
@@ -131,12 +132,32 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::test_utils::create_test_flag_value_list;
+    use crate::{
+        test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION,
+    };
 
     #[test]
     // this test point locks down the value list serialization
-    fn test_serialization() {
-        let flag_value_list = create_test_flag_value_list();
+    // TODO: b/376108268 - Use parameterized tests.
+    fn test_serialization_default() {
+        let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
+
+        let header: &FlagValueHeader = &flag_value_list.header;
+        let reinterpreted_header = FlagValueHeader::from_bytes(&header.into_bytes());
+        assert!(reinterpreted_header.is_ok());
+        assert_eq!(header, &reinterpreted_header.unwrap());
+
+        let flag_value_bytes = flag_value_list.into_bytes();
+        let reinterpreted_value_list = FlagValueList::from_bytes(&flag_value_bytes);
+        assert!(reinterpreted_value_list.is_ok());
+        assert_eq!(&flag_value_list, &reinterpreted_value_list.unwrap());
+        assert_eq!(flag_value_bytes.len() as u32, header.file_size);
+    }
+
+    #[test]
+    // this test point locks down the value list serialization
+    fn test_serialization_max() {
+        let flag_value_list = create_test_flag_value_list(MAX_SUPPORTED_FILE_VERSION);
 
         let header: &FlagValueHeader = &flag_value_list.header;
         let reinterpreted_header = FlagValueHeader::from_bytes(&header.into_bytes());
@@ -154,17 +175,17 @@
     // this test point locks down that version number should be at the top of serialized
     // bytes
     fn test_version_number() {
-        let flag_value_list = create_test_flag_value_list();
+        let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
         let bytes = &flag_value_list.into_bytes();
         let mut head = 0;
-        let version = read_u32_from_bytes(bytes, &mut head).unwrap();
-        assert_eq!(version, 1);
+        let version_from_file = read_u32_from_bytes(bytes, &mut head).unwrap();
+        assert_eq!(version_from_file, DEFAULT_FILE_VERSION);
     }
 
     #[test]
     // this test point locks down file type check
     fn test_file_type_check() {
-        let mut flag_value_list = create_test_flag_value_list();
+        let mut flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
         flag_value_list.header.file_type = 123u8;
         let error = FlagValueList::from_bytes(&flag_value_list.into_bytes()).unwrap_err();
         assert_eq!(
diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs
index 26e9c1a..e991320 100644
--- a/tools/aconfig/aconfig_storage_file/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_file/src/lib.rs
@@ -37,26 +37,33 @@
 pub mod flag_value;
 pub mod package_table;
 pub mod protos;
+pub mod sip_hasher13;
 pub mod test_utils;
 
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::cmp::Ordering;
-use std::collections::hash_map::DefaultHasher;
 use std::fs::File;
-use std::hash::{Hash, Hasher};
+use std::hash::Hasher;
 use std::io::Read;
 
 pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode};
 pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode};
 pub use crate::flag_value::{FlagValueHeader, FlagValueList};
 pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode};
+pub use crate::sip_hasher13::SipHasher13;
 
 use crate::AconfigStorageError::{
     BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType,
 };
 
-/// Storage file version
-pub const FILE_VERSION: u32 = 1;
+/// The max storage file version from which we can safely read/write. May be
+/// experimental.
+pub const MAX_SUPPORTED_FILE_VERSION: u32 = 2;
+
+/// The newest fully-released version. Unless otherwise specified, this is the
+/// version we will write.
+pub const DEFAULT_FILE_VERSION: u32 = 1;
 
 /// Good hash table prime number
 pub(crate) const HASH_PRIMES: [u32; 29] = [
@@ -106,7 +113,7 @@
 
 /// Flag type enum as stored by storage file
 /// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16.
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
 pub enum StoredFlagType {
     ReadWriteBoolean = 0,
     ReadOnlyBoolean = 1,
@@ -211,10 +218,12 @@
 }
 
 /// Get the corresponding bucket index given the key and number of buckets
-pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 {
-    let mut s = DefaultHasher::new();
-    val.hash(&mut s);
-    (s.finish() % num_buckets as u64) as u32
+pub(crate) fn get_bucket_index(val: &[u8], num_buckets: u32) -> u32 {
+    let mut s = SipHasher13::new();
+    s.write(val);
+    s.write_u8(0xff);
+    let ret = (s.finish() % num_buckets as u64) as u32;
+    ret
 }
 
 /// Read and parse bytes as u8
@@ -240,6 +249,11 @@
     Ok(val)
 }
 
+/// Read and parse the first 4 bytes of buf as u32.
+pub fn read_u32_from_start_of_bytes(buf: &[u8]) -> Result<u32, AconfigStorageError> {
+    read_u32_from_bytes(buf, &mut 0)
+}
+
 /// Read and parse bytes as u32
 pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigStorageError> {
     let val =
@@ -250,6 +264,16 @@
     Ok(val)
 }
 
+// Read and parse bytes as u64
+pub fn read_u64_from_bytes(buf: &[u8], head: &mut usize) -> Result<u64, AconfigStorageError> {
+    let val =
+        u64::from_le_bytes(buf[*head..*head + 8].try_into().map_err(|errmsg| {
+            BytesParseFail(anyhow!("fail to parse u64 from bytes: {}", errmsg))
+        })?);
+    *head += 8;
+    Ok(val)
+}
+
 /// Read and parse bytes as string
 pub(crate) fn read_str_from_bytes(
     buf: &[u8],
@@ -512,10 +536,15 @@
     // this test point locks down the flag list api
     fn test_list_flag() {
         let package_table =
-            write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap();
-        let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap();
-        let flag_value_list =
-            write_bytes_to_temp_file(&create_test_flag_value_list().into_bytes()).unwrap();
+            write_bytes_to_temp_file(&create_test_package_table(DEFAULT_FILE_VERSION).into_bytes())
+                .unwrap();
+        let flag_table =
+            write_bytes_to_temp_file(&create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes())
+                .unwrap();
+        let flag_value_list = write_bytes_to_temp_file(
+            &create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(),
+        )
+        .unwrap();
 
         let package_table_path = package_table.path().display().to_string();
         let flag_table_path = flag_table.path().display().to_string();
@@ -580,12 +609,19 @@
     // this test point locks down the flag list with info api
     fn test_list_flag_with_info() {
         let package_table =
-            write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap();
-        let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap();
-        let flag_value_list =
-            write_bytes_to_temp_file(&create_test_flag_value_list().into_bytes()).unwrap();
-        let flag_info_list =
-            write_bytes_to_temp_file(&create_test_flag_info_list().into_bytes()).unwrap();
+            write_bytes_to_temp_file(&create_test_package_table(DEFAULT_FILE_VERSION).into_bytes())
+                .unwrap();
+        let flag_table =
+            write_bytes_to_temp_file(&create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes())
+                .unwrap();
+        let flag_value_list = write_bytes_to_temp_file(
+            &create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes(),
+        )
+        .unwrap();
+        let flag_info_list = write_bytes_to_temp_file(
+            &create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes(),
+        )
+        .unwrap();
 
         let package_table_path = package_table.path().display().to_string();
         let flag_table_path = flag_table.path().display().to_string();
diff --git a/tools/aconfig/aconfig_storage_file/src/main.rs b/tools/aconfig/aconfig_storage_file/src/main.rs
index 8b9e38d..a9cfd19 100644
--- a/tools/aconfig/aconfig_storage_file/src/main.rs
+++ b/tools/aconfig/aconfig_storage_file/src/main.rs
@@ -20,9 +20,29 @@
     list_flags, list_flags_with_info, read_file_to_bytes, AconfigStorageError, FlagInfoList,
     FlagTable, FlagValueList, PackageTable, StorageFileType,
 };
-
 use clap::{builder::ArgAction, Arg, Command};
+use serde::Serialize;
+use serde_json;
+use std::fmt;
+use std::fs;
+use std::fs::File;
+use std::io::Write;
 
+/**
+ * Usage Examples
+ *
+ * Print file:
+ * $ aconfig-storage print --file=path/to/flag.map --type=flag_map
+ *
+ * List flags:
+ * $ aconfig-storage list --flag-map=path/to/flag.map \
+ * --flag-val=path/to/flag.val --package-map=path/to/package.map
+ *
+ * Write binary file for testing:
+ * $ aconfig-storage print --file=path/to/flag.map --type=flag_map --format=json > flag_map.json
+ * $ vim flag_map.json // Manually make updates
+ * $ aconfig-storage write-bytes --input-file=flag_map.json --output-file=path/to/flag.map --type=flag_map
+ */
 fn cli() -> Command {
     Command::new("aconfig-storage")
         .subcommand_required(true)
@@ -34,7 +54,8 @@
                         .long("type")
                         .required(true)
                         .value_parser(|s: &str| StorageFileType::try_from(s)),
-                ),
+                )
+                .arg(Arg::new("format").long("format").required(false).action(ArgAction::Set)),
         )
         .subcommand(
             Command::new("list")
@@ -50,41 +71,75 @@
                     Arg::new("flag-info").long("flag-info").required(false).action(ArgAction::Set),
                 ),
         )
+        .subcommand(
+            Command::new("write-bytes")
+                // Where to write the output bytes. Suggest to use the StorageFileType names (e.g. flag.map).
+                .arg(
+                    Arg::new("output-file")
+                        .long("output-file")
+                        .required(true)
+                        .action(ArgAction::Set),
+                )
+                // Input file should be json.
+                .arg(
+                    Arg::new("input-file").long("input-file").required(true).action(ArgAction::Set),
+                )
+                .arg(
+                    Arg::new("type")
+                        .long("type")
+                        .required(true)
+                        .value_parser(|s: &str| StorageFileType::try_from(s)),
+                ),
+        )
 }
 
 fn print_storage_file(
     file_path: &str,
     file_type: &StorageFileType,
+    as_json: bool,
 ) -> Result<(), AconfigStorageError> {
     let bytes = read_file_to_bytes(file_path)?;
     match file_type {
         StorageFileType::PackageMap => {
             let package_table = PackageTable::from_bytes(&bytes)?;
-            println!("{:?}", package_table);
+            println!("{}", to_print_format(package_table, as_json));
         }
         StorageFileType::FlagMap => {
             let flag_table = FlagTable::from_bytes(&bytes)?;
-            println!("{:?}", flag_table);
+            println!("{}", to_print_format(flag_table, as_json));
         }
         StorageFileType::FlagVal => {
             let flag_value = FlagValueList::from_bytes(&bytes)?;
-            println!("{:?}", flag_value);
+            println!("{}", to_print_format(flag_value, as_json));
         }
         StorageFileType::FlagInfo => {
             let flag_info = FlagInfoList::from_bytes(&bytes)?;
-            println!("{:?}", flag_info);
+            println!("{}", to_print_format(flag_info, as_json));
         }
     }
     Ok(())
 }
 
+fn to_print_format<T>(file_contents: T, as_json: bool) -> String
+where
+    T: Serialize + fmt::Debug,
+{
+    if as_json {
+        serde_json::to_string(&file_contents).unwrap()
+    } else {
+        format!("{:?}", file_contents)
+    }
+}
+
 fn main() -> Result<(), AconfigStorageError> {
     let matches = cli().get_matches();
     match matches.subcommand() {
         Some(("print", sub_matches)) => {
             let file_path = sub_matches.get_one::<String>("file").unwrap();
             let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap();
-            print_storage_file(file_path, file_type)?
+            let format = sub_matches.get_one::<String>("format");
+            let as_json: bool = format == Some(&"json".to_string());
+            print_storage_file(file_path, file_type, as_json)?
         }
         Some(("list", sub_matches)) => {
             let package_map = sub_matches.get_one::<String>("package-map").unwrap();
@@ -96,10 +151,10 @@
                     let flags = list_flags_with_info(package_map, flag_map, flag_val, info_file)?;
                     for flag in flags.iter() {
                         println!(
-                            "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}",
-                            flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
-                            flag.is_readwrite, flag.has_server_override, flag.has_local_override,
-                        );
+                          "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}",
+                          flag.package_name, flag.flag_name, flag.flag_value, flag.value_type,
+                          flag.is_readwrite, flag.has_server_override, flag.has_local_override,
+                      );
                     }
                 }
                 None => {
@@ -113,6 +168,40 @@
                 }
             }
         }
+        // Converts JSON of the file into raw bytes (as is used on-device).
+        // Intended to generate/easily update these files for testing.
+        Some(("write-bytes", sub_matches)) => {
+            let input_file_path = sub_matches.get_one::<String>("input-file").unwrap();
+            let input_json = fs::read_to_string(input_file_path).unwrap();
+
+            let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap();
+            let output_bytes: Vec<u8>;
+            match file_type {
+                StorageFileType::FlagVal => {
+                    let list: FlagValueList = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = list.into_bytes();
+                }
+                StorageFileType::FlagInfo => {
+                    let list: FlagInfoList = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = list.into_bytes();
+                }
+                StorageFileType::FlagMap => {
+                    let table: FlagTable = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = table.into_bytes();
+                }
+                StorageFileType::PackageMap => {
+                    let table: PackageTable = serde_json::from_str(&input_json).unwrap();
+                    output_bytes = table.into_bytes();
+                }
+            }
+
+            let output_file_path = sub_matches.get_one::<String>("output-file").unwrap();
+            let file = File::create(output_file_path);
+            if file.is_err() {
+                panic!("can't make file");
+            }
+            let _ = file.unwrap().write_all(&output_bytes);
+        }
         _ => unreachable!(),
     }
     Ok(())
diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs
index b734972..21357c7 100644
--- a/tools/aconfig/aconfig_storage_file/src/package_table.rs
+++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs
@@ -17,13 +17,17 @@
 //! package table module defines the package table file format and methods for serialization
 //! and deserialization
 
-use crate::{get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes};
+use crate::{
+    get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u64_from_bytes,
+    read_u8_from_bytes,
+};
 use crate::{AconfigStorageError, StorageFileType};
 use anyhow::anyhow;
+use serde::{Deserialize, Serialize};
 use std::fmt;
 
 /// Package table header struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTableHeader {
     pub version: u32,
     pub container: String,
@@ -92,10 +96,11 @@
 }
 
 /// Package table node struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTableNode {
     pub package_name: String,
     pub package_id: u32,
+    pub fingerprint: u64,
     // The index of the first boolean flag in this aconfig package among all boolean
     // flags in this container.
     pub boolean_start_index: u32,
@@ -107,8 +112,12 @@
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         writeln!(
             f,
-            "Package: {}, Id: {}, Boolean flag start index: {}, Next: {:?}",
-            self.package_name, self.package_id, self.boolean_start_index, self.next_offset
+            "Package: {}, Id: {}, Fingerprint: {}, Boolean flag start index: {}, Next: {:?}",
+            self.package_name,
+            self.package_id,
+            self.fingerprint,
+            self.boolean_start_index,
+            self.next_offset
         )?;
         Ok(())
     }
@@ -116,7 +125,16 @@
 
 impl PackageTableNode {
     /// Serialize to bytes
-    pub fn into_bytes(&self) -> Vec<u8> {
+    pub fn into_bytes(&self, version: u32) -> Vec<u8> {
+        match version {
+            1 => Self::into_bytes_v1(self),
+            2 => Self::into_bytes_v2(self),
+            // TODO(b/316357686): into_bytes should return a Result.
+            _ => Self::into_bytes_v2(&self),
+        }
+    }
+
+    fn into_bytes_v1(&self) -> Vec<u8> {
         let mut result = Vec::new();
         let name_bytes = self.package_name.as_bytes();
         result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
@@ -127,18 +145,60 @@
         result
     }
 
-    /// Deserialize from bytes
-    pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
+    fn into_bytes_v2(&self) -> Vec<u8> {
+        let mut result = Vec::new();
+        let name_bytes = self.package_name.as_bytes();
+        result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes());
+        result.extend_from_slice(name_bytes);
+        result.extend_from_slice(&self.package_id.to_le_bytes());
+        result.extend_from_slice(&self.fingerprint.to_le_bytes());
+        result.extend_from_slice(&self.boolean_start_index.to_le_bytes());
+        result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes());
+        result
+    }
+
+    /// Deserialize from bytes based on file version.
+    pub fn from_bytes(bytes: &[u8], version: u32) -> Result<Self, AconfigStorageError> {
+        match version {
+            1 => Self::from_bytes_v1(bytes),
+            2 => Self::from_bytes_v2(bytes),
+            _ => {
+                return Err(AconfigStorageError::BytesParseFail(anyhow!(
+                    "Binary file is an unsupported version: {}",
+                    version
+                )))
+            }
+        }
+    }
+
+    fn from_bytes_v1(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
         let mut head = 0;
-        let node = Self {
-            package_name: read_str_from_bytes(bytes, &mut head)?,
-            package_id: read_u32_from_bytes(bytes, &mut head)?,
-            boolean_start_index: read_u32_from_bytes(bytes, &mut head)?,
-            next_offset: match read_u32_from_bytes(bytes, &mut head)? {
-                0 => None,
-                val => Some(val),
-            },
+        let package_name = read_str_from_bytes(bytes, &mut head)?;
+        let package_id = read_u32_from_bytes(bytes, &mut head)?;
+        // v1 does not have fingerprint, so just set to 0.
+        let fingerprint: u64 = 0;
+        let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?;
+        let next_offset = match read_u32_from_bytes(bytes, &mut head)? {
+            0 => None,
+            val => Some(val),
         };
+
+        let node = Self { package_name, package_id, fingerprint, boolean_start_index, next_offset };
+        Ok(node)
+    }
+
+    fn from_bytes_v2(bytes: &[u8]) -> Result<Self, AconfigStorageError> {
+        let mut head = 0;
+        let package_name = read_str_from_bytes(bytes, &mut head)?;
+        let package_id = read_u32_from_bytes(bytes, &mut head)?;
+        let fingerprint = read_u64_from_bytes(bytes, &mut head)?;
+        let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?;
+        let next_offset = match read_u32_from_bytes(bytes, &mut head)? {
+            0 => None,
+            val => Some(val),
+        };
+
+        let node = Self { package_name, package_id, fingerprint, boolean_start_index, next_offset };
         Ok(node)
     }
 
@@ -146,12 +206,12 @@
     /// construction side (aconfig binary) and consumption side (flag read lib)
     /// use the same method of hashing
     pub fn find_bucket_index(package: &str, num_buckets: u32) -> u32 {
-        get_bucket_index(&package, num_buckets)
+        get_bucket_index(package.as_bytes(), num_buckets)
     }
 }
 
 /// Package table struct
-#[derive(PartialEq)]
+#[derive(PartialEq, Serialize, Deserialize)]
 pub struct PackageTable {
     pub header: PackageTableHeader,
     pub buckets: Vec<Option<u32>>,
@@ -179,7 +239,11 @@
         [
             self.header.into_bytes(),
             self.buckets.iter().map(|v| v.unwrap_or(0).to_le_bytes()).collect::<Vec<_>>().concat(),
-            self.nodes.iter().map(|v| v.into_bytes()).collect::<Vec<_>>().concat(),
+            self.nodes
+                .iter()
+                .map(|v| v.into_bytes(self.header.version))
+                .collect::<Vec<_>>()
+                .concat(),
         ]
         .concat()
     }
@@ -198,8 +262,8 @@
             .collect();
         let nodes = (0..num_packages)
             .map(|_| {
-                let node = PackageTableNode::from_bytes(&bytes[head..])?;
-                head += node.into_bytes().len();
+                let node = PackageTableNode::from_bytes(&bytes[head..], header.version)?;
+                head += node.into_bytes(header.version).len();
                 Ok(node)
             })
             .collect::<Result<Vec<_>, AconfigStorageError>>()
@@ -219,11 +283,13 @@
 mod tests {
     use super::*;
     use crate::test_utils::create_test_package_table;
+    use crate::{read_u32_from_start_of_bytes, DEFAULT_FILE_VERSION, MAX_SUPPORTED_FILE_VERSION};
 
     #[test]
     // this test point locks down the table serialization
-    fn test_serialization() {
-        let package_table = create_test_package_table();
+    // TODO: b/376108268 - Use parameterized tests.
+    fn test_serialization_default() {
+        let package_table = create_test_package_table(DEFAULT_FILE_VERSION);
         let header: &PackageTableHeader = &package_table.header;
         let reinterpreted_header = PackageTableHeader::from_bytes(&header.into_bytes());
         assert!(reinterpreted_header.is_ok());
@@ -231,7 +297,32 @@
 
         let nodes: &Vec<PackageTableNode> = &package_table.nodes;
         for node in nodes.iter() {
-            let reinterpreted_node = PackageTableNode::from_bytes(&node.into_bytes()).unwrap();
+            let reinterpreted_node =
+                PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version)
+                    .unwrap();
+            assert_eq!(node, &reinterpreted_node);
+        }
+
+        let package_table_bytes = package_table.into_bytes();
+        let reinterpreted_table = PackageTable::from_bytes(&package_table_bytes);
+        assert!(reinterpreted_table.is_ok());
+        assert_eq!(&package_table, &reinterpreted_table.unwrap());
+        assert_eq!(package_table_bytes.len() as u32, header.file_size);
+    }
+
+    #[test]
+    fn test_serialization_max() {
+        let package_table = create_test_package_table(MAX_SUPPORTED_FILE_VERSION);
+        let header: &PackageTableHeader = &package_table.header;
+        let reinterpreted_header = PackageTableHeader::from_bytes(&header.into_bytes());
+        assert!(reinterpreted_header.is_ok());
+        assert_eq!(header, &reinterpreted_header.unwrap());
+
+        let nodes: &Vec<PackageTableNode> = &package_table.nodes;
+        for node in nodes.iter() {
+            let reinterpreted_node =
+                PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version)
+                    .unwrap();
             assert_eq!(node, &reinterpreted_node);
         }
 
@@ -246,17 +337,36 @@
     // this test point locks down that version number should be at the top of serialized
     // bytes
     fn test_version_number() {
-        let package_table = create_test_package_table();
+        let package_table = create_test_package_table(DEFAULT_FILE_VERSION);
         let bytes = &package_table.into_bytes();
-        let mut head = 0;
-        let version = read_u32_from_bytes(bytes, &mut head).unwrap();
-        assert_eq!(version, 1);
+        let unpacked_version = read_u32_from_start_of_bytes(bytes).unwrap();
+        assert_eq!(unpacked_version, DEFAULT_FILE_VERSION);
+    }
+
+    #[test]
+    fn test_round_trip_default() {
+        let table: PackageTable = create_test_package_table(DEFAULT_FILE_VERSION);
+        let table_bytes = table.into_bytes();
+
+        let reinterpreted_table = PackageTable::from_bytes(&table_bytes).unwrap();
+
+        assert_eq!(table, reinterpreted_table);
+    }
+
+    #[test]
+    fn test_round_trip_max() {
+        let table: PackageTable = create_test_package_table(MAX_SUPPORTED_FILE_VERSION);
+        let table_bytes = table.into_bytes();
+
+        let reinterpreted_table = PackageTable::from_bytes(&table_bytes).unwrap();
+
+        assert_eq!(table, reinterpreted_table);
     }
 
     #[test]
     // this test point locks down file type check
     fn test_file_type_check() {
-        let mut package_table = create_test_package_table();
+        let mut package_table = create_test_package_table(DEFAULT_FILE_VERSION);
         package_table.header.file_type = 123u8;
         let error = PackageTable::from_bytes(&package_table.into_bytes()).unwrap_err();
         assert_eq!(
diff --git a/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
new file mode 100644
index 0000000..9be3175
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! An implementation of SipHash13
+
+use std::cmp;
+use std::mem;
+use std::ptr;
+use std::slice;
+
+use std::hash::Hasher;
+
+/// An implementation of SipHash 2-4.
+///
+#[derive(Debug, Clone, Default)]
+pub struct SipHasher13 {
+    k0: u64,
+    k1: u64,
+    length: usize, // how many bytes we've processed
+    state: State,  // hash State
+    tail: u64,     // unprocessed bytes le
+    ntail: usize,  // how many bytes in tail are valid
+}
+
+#[derive(Debug, Clone, Copy, Default)]
+#[repr(C)]
+struct State {
+    // v0, v2 and v1, v3 show up in pairs in the algorithm,
+    // and simd implementations of SipHash will use vectors
+    // of v02 and v13. By placing them in this order in the struct,
+    // the compiler can pick up on just a few simd optimizations by itself.
+    v0: u64,
+    v2: u64,
+    v1: u64,
+    v3: u64,
+}
+
+macro_rules! compress {
+    ($state:expr) => {{
+        compress!($state.v0, $state.v1, $state.v2, $state.v3)
+    }};
+    ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{
+        $v0 = $v0.wrapping_add($v1);
+        $v1 = $v1.rotate_left(13);
+        $v1 ^= $v0;
+        $v0 = $v0.rotate_left(32);
+        $v2 = $v2.wrapping_add($v3);
+        $v3 = $v3.rotate_left(16);
+        $v3 ^= $v2;
+        $v0 = $v0.wrapping_add($v3);
+        $v3 = $v3.rotate_left(21);
+        $v3 ^= $v0;
+        $v2 = $v2.wrapping_add($v1);
+        $v1 = $v1.rotate_left(17);
+        $v1 ^= $v2;
+        $v2 = $v2.rotate_left(32);
+    }};
+}
+
+/// Load an integer of the desired type from a byte stream, in LE order. Uses
+/// `copy_nonoverlapping` to let the compiler generate the most efficient way
+/// to load it from a possibly unaligned address.
+///
+/// Unsafe because: unchecked indexing at i..i+size_of(int_ty)
+macro_rules! load_int_le {
+    ($buf:expr, $i:expr, $int_ty:ident) => {{
+        debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len());
+        let mut data = 0 as $int_ty;
+        ptr::copy_nonoverlapping(
+            $buf.get_unchecked($i),
+            &mut data as *mut _ as *mut u8,
+            mem::size_of::<$int_ty>(),
+        );
+        data.to_le()
+    }};
+}
+
+/// Load an u64 using up to 7 bytes of a byte slice.
+///
+/// Unsafe because: unchecked indexing at start..start+len
+#[inline]
+unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
+    debug_assert!(len < 8);
+    let mut i = 0; // current byte index (from LSB) in the output u64
+    let mut out = 0;
+    if i + 3 < len {
+        out = load_int_le!(buf, start + i, u32) as u64;
+        i += 4;
+    }
+    if i + 1 < len {
+        out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8);
+        i += 2
+    }
+    if i < len {
+        out |= (*buf.get_unchecked(start + i) as u64) << (i * 8);
+        i += 1;
+    }
+    debug_assert_eq!(i, len);
+    out
+}
+
+impl SipHasher13 {
+    /// Creates a new `SipHasher13` with the two initial keys set to 0.
+    #[inline]
+    pub fn new() -> SipHasher13 {
+        SipHasher13::new_with_keys(0, 0)
+    }
+
+    /// Creates a `SipHasher13` that is keyed off the provided keys.
+    #[inline]
+    pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 {
+        let mut sip_hasher = SipHasher13 {
+            k0: key0,
+            k1: key1,
+            length: 0,
+            state: State { v0: 0, v1: 0, v2: 0, v3: 0 },
+            tail: 0,
+            ntail: 0,
+        };
+        sip_hasher.reset();
+        sip_hasher
+    }
+
+    #[inline]
+    fn c_rounds(state: &mut State) {
+        compress!(state);
+    }
+
+    #[inline]
+    fn d_rounds(state: &mut State) {
+        compress!(state);
+        compress!(state);
+        compress!(state);
+    }
+
+    #[inline]
+    fn reset(&mut self) {
+        self.length = 0;
+        self.state.v0 = self.k0 ^ 0x736f6d6570736575;
+        self.state.v1 = self.k1 ^ 0x646f72616e646f6d;
+        self.state.v2 = self.k0 ^ 0x6c7967656e657261;
+        self.state.v3 = self.k1 ^ 0x7465646279746573;
+        self.ntail = 0;
+    }
+
+    // Specialized write function that is only valid for buffers with len <= 8.
+    // It's used to force inlining of write_u8 and write_usize, those would normally be inlined
+    // except for composite types (that includes slices and str hashing because of delimiter).
+    // Without this extra push the compiler is very reluctant to inline delimiter writes,
+    // degrading performance substantially for the most common use cases.
+    #[inline]
+    fn short_write(&mut self, msg: &[u8]) {
+        debug_assert!(msg.len() <= 8);
+        let length = msg.len();
+        self.length += length;
+
+        let needed = 8 - self.ntail;
+        let fill = cmp::min(length, needed);
+        if fill == 8 {
+            // safe to call since msg hasn't been loaded
+            self.tail = unsafe { load_int_le!(msg, 0, u64) };
+        } else {
+            // safe to call since msg hasn't been loaded, and fill <= msg.len()
+            self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail);
+            if length < needed {
+                self.ntail += length;
+                return;
+            }
+        }
+        self.state.v3 ^= self.tail;
+        Self::c_rounds(&mut self.state);
+        self.state.v0 ^= self.tail;
+
+        // Buffered tail is now flushed, process new input.
+        self.ntail = length - needed;
+        // safe to call since number of `needed` bytes has been loaded
+        // and self.ntail + needed == msg.len()
+        self.tail = unsafe { u8to64_le(msg, needed, self.ntail) };
+    }
+}
+
+impl Hasher for SipHasher13 {
+    // see short_write comment for explanation
+    #[inline]
+    fn write_usize(&mut self, i: usize) {
+        // safe to call, since convert the pointer to u8
+        let bytes = unsafe {
+            slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>())
+        };
+        self.short_write(bytes);
+    }
+
+    // see short_write comment for explanation
+    #[inline]
+    fn write_u8(&mut self, i: u8) {
+        self.short_write(&[i]);
+    }
+
+    #[inline]
+    fn write(&mut self, msg: &[u8]) {
+        let length = msg.len();
+        self.length += length;
+
+        let mut needed = 0;
+
+        // loading unprocessed byte from last write
+        if self.ntail != 0 {
+            needed = 8 - self.ntail;
+            // safe to call, since msg hasn't been processed
+            // and cmp::min(length, needed) < 8
+            self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
+            if length < needed {
+                self.ntail += length;
+                return;
+            } else {
+                self.state.v3 ^= self.tail;
+                Self::c_rounds(&mut self.state);
+                self.state.v0 ^= self.tail;
+                self.ntail = 0;
+            }
+        }
+
+        // Buffered tail is now flushed, process new input.
+        let len = length - needed;
+        let left = len & 0x7;
+
+        let mut i = needed;
+        while i < len - left {
+            // safe to call since if i < len - left, it means msg has at least 1 byte to load
+            let mi = unsafe { load_int_le!(msg, i, u64) };
+
+            self.state.v3 ^= mi;
+            Self::c_rounds(&mut self.state);
+            self.state.v0 ^= mi;
+
+            i += 8;
+        }
+
+        // safe to call since if left == 0, since this call will load nothing
+        // if left > 0, it means there are number of `left` bytes in msg
+        self.tail = unsafe { u8to64_le(msg, i, left) };
+        self.ntail = left;
+    }
+
+    #[inline]
+    fn finish(&self) -> u64 {
+        let mut state = self.state;
+
+        let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail;
+
+        state.v3 ^= b;
+        Self::c_rounds(&mut state);
+        state.v0 ^= b;
+
+        state.v2 ^= 0xff;
+        Self::d_rounds(&mut state);
+
+        state.v0 ^ state.v1 ^ state.v2 ^ state.v3
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    use std::hash::{Hash, Hasher};
+    use std::string::String;
+
+    #[test]
+    // this test point locks down the value list serialization
+    fn test_sip_hash13_string_hash() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        test_str1.hash(&mut sip_hash13);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let test_str2 = String::from("adfadfadf adfafadadf 1231241241");
+        test_str2.hash(&mut sip_hash13);
+        assert_eq!(13543518987672889310, sip_hash13.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write() {
+        let mut sip_hash13 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash13.write(test_str1.as_bytes());
+        sip_hash13.write_u8(0xff);
+        assert_eq!(17898838669067067585, sip_hash13.finish());
+
+        let mut sip_hash132 = SipHasher13::new();
+        let test_str1 = String::from("com.google.android.test");
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(9685440969685209025, sip_hash132.finish());
+        sip_hash132.write(test_str1.as_bytes());
+        assert_eq!(6719694176662736568, sip_hash132.finish());
+
+        let mut sip_hash133 = SipHasher13::new();
+        let test_str2 = String::from("abcdefg");
+        test_str2.hash(&mut sip_hash133);
+        assert_eq!(2492161047327640297, sip_hash133.finish());
+
+        let mut sip_hash134 = SipHasher13::new();
+        let test_str3 = String::from("abcdefgh");
+        test_str3.hash(&mut sip_hash134);
+        assert_eq!(6689927370435554326, sip_hash134.finish());
+    }
+
+    #[test]
+    fn test_sip_hash13_write_short() {
+        let mut sip_hash13 = SipHasher13::new();
+        sip_hash13.write_u8(0x61);
+        assert_eq!(4644417185603328019, sip_hash13.finish());
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
index 106666c..7c603df 100644
--- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs
+++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs
@@ -24,32 +24,59 @@
 use std::io::Write;
 use tempfile::NamedTempFile;
 
-pub fn create_test_package_table() -> PackageTable {
+pub fn create_test_package_table(version: u32) -> PackageTable {
     let header = PackageTableHeader {
-        version: 1,
+        version: version,
         container: String::from("mockup"),
         file_type: StorageFileType::PackageMap as u8,
-        file_size: 209,
+        file_size: match version {
+            1 => 209,
+            2 => 233,
+            _ => panic!("Unsupported version."),
+        },
         num_packages: 3,
         bucket_offset: 31,
         node_offset: 59,
     };
-    let buckets: Vec<Option<u32>> = vec![Some(59), None, None, Some(109), None, None, None];
+    let buckets: Vec<Option<u32>> = match version {
+        1 => vec![Some(59), None, None, Some(109), None, None, None],
+        2 => vec![Some(59), None, None, Some(117), None, None, None],
+        _ => panic!("Unsupported version."),
+    };
     let first_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_2"),
         package_id: 1,
+        fingerprint: match version {
+            1 => 0,
+            2 => 4431940502274857964u64,
+            _ => panic!("Unsupported version."),
+        },
         boolean_start_index: 3,
         next_offset: None,
     };
     let second_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_1"),
         package_id: 0,
+        fingerprint: match version {
+            1 => 0,
+            2 => 15248948510590158086u64,
+            _ => panic!("Unsupported version."),
+        },
         boolean_start_index: 0,
-        next_offset: Some(159),
+        next_offset: match version {
+            1 => Some(159),
+            2 => Some(175),
+            _ => panic!("Unsupported version."),
+        },
     };
     let third_node = PackageTableNode {
         package_name: String::from("com.android.aconfig.storage.test_4"),
         package_id: 2,
+        fingerprint: match version {
+            1 => 0,
+            2 => 16233229917711622375u64,
+            _ => panic!("Unsupported version."),
+        },
         boolean_start_index: 6,
         next_offset: None,
     };
@@ -76,9 +103,9 @@
     }
 }
 
-pub fn create_test_flag_table() -> FlagTable {
+pub fn create_test_flag_table(version: u32) -> FlagTable {
     let header = FlagTableHeader {
-        version: 1,
+        version: version,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagMap as u8,
         file_size: 321,
@@ -118,9 +145,9 @@
     FlagTable { header, buckets, nodes }
 }
 
-pub fn create_test_flag_value_list() -> FlagValueList {
+pub fn create_test_flag_value_list(version: u32) -> FlagValueList {
     let header = FlagValueHeader {
-        version: 1,
+        version: version,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagVal as u8,
         file_size: 35,
@@ -131,9 +158,9 @@
     FlagValueList { header, booleans }
 }
 
-pub fn create_test_flag_info_list() -> FlagInfoList {
+pub fn create_test_flag_info_list(version: u32) -> FlagInfoList {
     let header = FlagInfoHeader {
-        version: 1,
+        version: version,
         container: String::from("mockup"),
         file_type: StorageFileType::FlagInfo as u8,
         file_size: 35,
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
index 86a75f2..324c55d 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/AconfigStorageException.java
@@ -16,12 +16,122 @@
 
 package android.aconfig.storage;
 
+/**
+ * Exception thrown when an error occurs while accessing Aconfig Storage.
+ *
+ * <p>This exception indicates a general problem with Aconfig Storage, such as an inability to read
+ * or write data.
+ */
 public class AconfigStorageException extends RuntimeException {
+
+    /** Generic error code indicating an unspecified Aconfig Storage error. */
+    public static final int ERROR_GENERIC = 0;
+
+    /** Error code indicating that the Aconfig Storage system is not found on the device. */
+    public static final int ERROR_STORAGE_SYSTEM_NOT_FOUND = 1;
+
+    /** Error code indicating that the requested configuration package is not found. */
+    public static final int ERROR_PACKAGE_NOT_FOUND = 2;
+
+    /** Error code indicating that the specified container is not found. */
+    public static final int ERROR_CONTAINER_NOT_FOUND = 3;
+
+    /** Error code indicating that there was an error reading the Aconfig Storage file. */
+    public static final int ERROR_CANNOT_READ_STORAGE_FILE = 4;
+
+    public static final int ERROR_FILE_FINGERPRINT_MISMATCH = 5;
+
+    private final int mErrorCode;
+
+    /**
+     * Constructs a new {@code AconfigStorageException} with a generic error code and the specified
+     * detail message.
+     *
+     * @param msg The detail message for this exception.
+     */
     public AconfigStorageException(String msg) {
         super(msg);
+        mErrorCode = ERROR_GENERIC;
     }
 
+    /**
+     * Constructs a new {@code AconfigStorageException} with a generic error code, the specified
+     * detail message, and cause.
+     *
+     * @param msg The detail message for this exception.
+     * @param cause The cause of this exception.
+     */
     public AconfigStorageException(String msg, Throwable cause) {
         super(msg, cause);
+        mErrorCode = ERROR_GENERIC;
+    }
+
+    /**
+     * Constructs a new {@code AconfigStorageException} with the specified error code and detail
+     * message.
+     *
+     * @param errorCode The error code for this exception.
+     * @param msg The detail message for this exception.
+     */
+    public AconfigStorageException(int errorCode, String msg) {
+        super(msg);
+        mErrorCode = errorCode;
+    }
+
+    /**
+     * Constructs a new {@code AconfigStorageException} with the specified error code, detail
+     * message, and cause.
+     *
+     * @param errorCode The error code for this exception.
+     * @param msg The detail message for this exception.
+     * @param cause The cause of this exception.
+     */
+    public AconfigStorageException(int errorCode, String msg, Throwable cause) {
+        super(msg, cause);
+        mErrorCode = errorCode;
+    }
+
+    /**
+     * Returns the error code associated with this exception.
+     *
+     * @return The error code.
+     */
+    public int getErrorCode() {
+        return mErrorCode;
+    }
+
+    /**
+     * Returns the error message for this exception, including the error code and the original
+     * message.
+     *
+     * @return The error message.
+     */
+    @Override
+    public String getMessage() {
+        return errorString() + ": " + super.getMessage();
+    }
+
+    /**
+     * Returns a string representation of the error code.
+     *
+     * @return The error code string.
+     */
+    private String errorString() {
+        switch (mErrorCode) {
+            case ERROR_GENERIC:
+                return "ERROR_GENERIC";
+            case ERROR_STORAGE_SYSTEM_NOT_FOUND:
+                return "ERROR_STORAGE_SYSTEM_NOT_FOUND";
+            case ERROR_PACKAGE_NOT_FOUND:
+                return "ERROR_PACKAGE_NOT_FOUND";
+            case ERROR_CONTAINER_NOT_FOUND:
+                return "ERROR_CONTAINER_NOT_FOUND";
+            case ERROR_CANNOT_READ_STORAGE_FILE:
+                return "ERROR_CANNOT_READ_STORAGE_FILE";
+            case ERROR_FILE_FINGERPRINT_MISMATCH:
+                return "ERROR_FILE_FINGERPRINT_MISMATCH";
+            default:
+                return "<Unknown error code " + mErrorCode + ">";
+        }
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
index 1c72364..9571568 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java
@@ -41,13 +41,25 @@
         return this.mByteBuffer.getInt();
     }
 
+    public long readLong() {
+        return this.mByteBuffer.getLong();
+    }
+
     public String readString() {
         int length = readInt();
+        if (length > 1024) {
+            throw new AconfigStorageException(
+                    "String length exceeds maximum allowed size (1024 bytes): " + length);
+        }
         byte[] bytes = new byte[length];
         mByteBuffer.get(bytes, 0, length);
         return new String(bytes, StandardCharsets.UTF_8);
     }
 
+    public int readByte(int i) {
+        return Byte.toUnsignedInt(mByteBuffer.get(i));
+    }
+
     public void position(int newPosition) {
         mByteBuffer.position(newPosition);
     }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java
index b0b1b9b..c354873 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FileType.java
@@ -42,4 +42,20 @@
                 return null;
         }
     }
+
+    @Override
+    public String toString() {
+        switch (type) {
+            case 0:
+                return "PACKAGE_MAP";
+            case 1:
+                return "FLAG_MAP";
+            case 2:
+                return "FLAG_VAL";
+            case 3:
+                return "FLAG_INFO";
+            default:
+                return "unrecognized type";
+        }
+    }
 }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
index e85fdee..757844a 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java
@@ -16,41 +16,57 @@
 
 package android.aconfig.storage;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Objects;
 
 public class FlagTable {
 
     private Header mHeader;
-    private Map<String, Node> mNodeMap;
+    private ByteBufferReader mReader;
 
     public static FlagTable fromBytes(ByteBuffer bytes) {
         FlagTable flagTable = new FlagTable();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        flagTable.mHeader = header;
-        flagTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumFlags));
-        reader.position(header.mNodeOffset);
-        for (int i = 0; i < header.mNumFlags; i++) {
-            Node node = Node.fromBytes(reader);
-            flagTable.mNodeMap.put(makeKey(node.mPackageId, node.mFlagName), node);
-        }
+        flagTable.mReader = new ByteBufferReader(bytes);
+        flagTable.mHeader = Header.fromBytes(flagTable.mReader);
+
         return flagTable;
     }
 
     public Node get(int packageId, String flagName) {
-        return mNodeMap.get(makeKey(packageId, flagName));
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets);
+        int newPosition = mHeader.mBucketOffset + bucketIndex * 4;
+        if (newPosition >= mHeader.mNodeOffset) {
+            return null;
+        }
+
+        mReader.position(newPosition);
+        int nodeIndex = mReader.readInt();
+        if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) {
+            return null;
+        }
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader);
+            if (Objects.equals(flagName, node.mFlagName) && packageId == node.mPackageId) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        return null;
     }
 
     public Header getHeader() {
         return mHeader;
     }
 
-    private static String makeKey(int packageId, String flagName) {
+    private static byte[] makeKey(int packageId, String flagName) {
         StringBuilder ret = new StringBuilder();
-        return ret.append(packageId).append('/').append(flagName).toString();
+        return ret.append(packageId).append('/').append(flagName).toString().getBytes(UTF_8);
     }
 
     public static class Header {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
index 0ddc147..493436d 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java
@@ -17,33 +17,21 @@
 package android.aconfig.storage;
 
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
 
 public class FlagValueList {
 
     private Header mHeader;
-    private List<Boolean> mList;
-
-    private int mSize;
+    private ByteBufferReader mReader;
 
     public static FlagValueList fromBytes(ByteBuffer bytes) {
         FlagValueList flagValueList = new FlagValueList();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        flagValueList.mHeader = header;
-        flagValueList.mList = new ArrayList(header.mNumFlags);
-        reader.position(header.mBooleanValueOffset);
-        for (int i = 0; i < header.mNumFlags; i++) {
-            boolean val = reader.readByte() == 1;
-            flagValueList.mList.add(val);
-        }
-        flagValueList.mSize = flagValueList.mList.size();
+        flagValueList.mReader = new ByteBufferReader(bytes);
+        flagValueList.mHeader = Header.fromBytes(flagValueList.mReader);
         return flagValueList;
     }
 
-    public boolean get(int index) {
-        return mList.get(index);
+    public boolean getBoolean(int index) {
+        return mReader.readByte(mHeader.mBooleanValueOffset + index) == 1;
     }
 
     public Header getHeader() {
@@ -51,7 +39,7 @@
     }
 
     public int size() {
-        return mSize;
+        return mHeader.mNumFlags;
     }
 
     public static class Header {
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
index d04e1ac..a45d12a 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java
@@ -16,32 +16,48 @@
 
 package android.aconfig.storage;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
+
 import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Objects;
 
 public class PackageTable {
 
     private Header mHeader;
-    private Map<String, Node> mNodeMap;
+    private ByteBufferReader mReader;
 
     public static PackageTable fromBytes(ByteBuffer bytes) {
         PackageTable packageTable = new PackageTable();
-        ByteBufferReader reader = new ByteBufferReader(bytes);
-        Header header = Header.fromBytes(reader);
-        packageTable.mHeader = header;
-        packageTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumPackages));
-        reader.position(header.mNodeOffset);
-        for (int i = 0; i < header.mNumPackages; i++) {
-            Node node = Node.fromBytes(reader);
-            packageTable.mNodeMap.put(node.mPackageName, node);
-        }
+        packageTable.mReader = new ByteBufferReader(bytes);
+        packageTable.mHeader = Header.fromBytes(packageTable.mReader);
+
         return packageTable;
     }
 
     public Node get(String packageName) {
-        return mNodeMap.get(packageName);
+        int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4;
+        int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets);
+        int newPosition = mHeader.mBucketOffset + bucketIndex * 4;
+        if (newPosition >= mHeader.mNodeOffset) {
+            return null;
+        }
+        mReader.position(newPosition);
+        int nodeIndex = mReader.readInt();
+
+        if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) {
+            return null;
+        }
+
+        while (nodeIndex != -1) {
+            mReader.position(nodeIndex);
+            Node node = Node.fromBytes(mReader, mHeader.mVersion);
+            if (Objects.equals(packageName, node.mPackageName)) {
+                return node;
+            }
+            nodeIndex = node.mNextOffset;
+        }
+
+        return null;
     }
 
     public Header getHeader() {
@@ -58,7 +74,7 @@
         private int mBucketOffset;
         private int mNodeOffset;
 
-        public static Header fromBytes(ByteBufferReader reader) {
+        private static Header fromBytes(ByteBufferReader reader) {
             Header header = new Header();
             header.mVersion = reader.readInt();
             header.mContainer = reader.readString();
@@ -108,10 +124,24 @@
 
         private String mPackageName;
         private int mPackageId;
+        private long mPackageFingerprint;
         private int mBooleanStartIndex;
         private int mNextOffset;
+        private boolean mHasPackageFingerprint;
 
-        public static Node fromBytes(ByteBufferReader reader) {
+        private static Node fromBytes(ByteBufferReader reader, int version) {
+            switch (version) {
+                case 1:
+                    return fromBytesV1(reader);
+                case 2:
+                    return fromBytesV2(reader);
+                default:
+                    // Do we want to throw here?
+                    return new Node();
+            }
+        }
+
+        private static Node fromBytesV1(ByteBufferReader reader) {
             Node node = new Node();
             node.mPackageName = reader.readString();
             node.mPackageId = reader.readInt();
@@ -121,6 +151,18 @@
             return node;
         }
 
+        private static Node fromBytesV2(ByteBufferReader reader) {
+            Node node = new Node();
+            node.mPackageName = reader.readString();
+            node.mPackageId = reader.readInt();
+            node.mPackageFingerprint = reader.readLong();
+            node.mBooleanStartIndex = reader.readInt();
+            node.mNextOffset = reader.readInt();
+            node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset;
+            node.mHasPackageFingerprint = true;
+            return node;
+        }
+
         @Override
         public int hashCode() {
             return Objects.hash(mPackageName, mPackageId, mBooleanStartIndex, mNextOffset);
@@ -151,6 +193,10 @@
             return mPackageId;
         }
 
+        public long getPackageFingerprint() {
+            return mPackageFingerprint;
+        }
+
         public int getBooleanStartIndex() {
             return mBooleanStartIndex;
         }
@@ -158,5 +204,9 @@
         public int getNextOffset() {
             return mNextOffset;
         }
+
+        public boolean hasPackageFingerprint() {
+            return mHasPackageFingerprint;
+        }
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
new file mode 100644
index 0000000..64714ee
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+public class SipHasher13 {
+    static class State {
+        private long v0;
+        private long v2;
+        private long v1;
+        private long v3;
+
+        public State(long k0, long k1) {
+            v0 = k0 ^ 0x736f6d6570736575L;
+            v1 = k1 ^ 0x646f72616e646f6dL;
+            v2 = k0 ^ 0x6c7967656e657261L;
+            v3 = k1 ^ 0x7465646279746573L;
+        }
+
+        public void compress(long m) {
+            v3 ^= m;
+            cRounds();
+            v0 ^= m;
+        }
+
+        public long finish() {
+            v2 ^= 0xff;
+            dRounds();
+            return v0 ^ v1 ^ v2 ^ v3;
+        }
+
+        private void cRounds() {
+            v0 += v1;
+            v1 = Long.rotateLeft(v1, 13);
+            v1 ^= v0;
+            v0 = Long.rotateLeft(v0, 32);
+            v2 += v3;
+            v3 = Long.rotateLeft(v3, 16);
+            v3 ^= v2;
+            v0 += v3;
+            v3 = Long.rotateLeft(v3, 21);
+            v3 ^= v0;
+            v2 += v1;
+            v1 = Long.rotateLeft(v1, 17);
+            v1 ^= v2;
+            v2 = Long.rotateLeft(v2, 32);
+        }
+
+        private void dRounds() {
+            for (int i = 0; i < 3; i++) {
+                v0 += v1;
+                v1 = Long.rotateLeft(v1, 13);
+                v1 ^= v0;
+                v0 = Long.rotateLeft(v0, 32);
+                v2 += v3;
+                v3 = Long.rotateLeft(v3, 16);
+                v3 ^= v2;
+                v0 += v3;
+                v3 = Long.rotateLeft(v3, 21);
+                v3 ^= v0;
+                v2 += v1;
+                v1 = Long.rotateLeft(v1, 17);
+                v1 ^= v2;
+                v2 = Long.rotateLeft(v2, 32);
+            }
+        }
+    }
+
+    public static long hash(byte[] data) {
+        State state = new State(0, 0);
+        int len = data.length;
+        int left = len & 0x7;
+        int index = 0;
+
+        while (index < len - left) {
+            long mi = loadLe(data, index, 8);
+            index += 8;
+            state.compress(mi);
+        }
+
+        // padding the end with 0xff to be consistent with rust
+        long m = (0xffL << (left * 8)) | loadLe(data, index, left);
+        if (left == 0x7) {
+            // compress the m w-2
+            state.compress(m);
+            m = 0L;
+        }
+        // len adds 1 since padded 0xff
+        m |= (((len + 1) & 0xffL) << 56);
+        state.compress(m);
+
+        return state.finish();
+    }
+
+    private static long loadLe(byte[] data, int offset, int size) {
+        long m = 0;
+        for (int i = 0; i < size; i++) {
+            m |= (data[i + offset] & 0xffL) << (i * 8);
+        }
+        return m;
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java
new file mode 100644
index 0000000..f1a4e26
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/StorageFileProvider.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage;
+
+import java.io.Closeable;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/** @hide */
+public class StorageFileProvider {
+
+    private static final String DEFAULT_MAP_PATH = "/metadata/aconfig/maps/";
+    private static final String DEFAULT_BOOT_PATH = "/metadata/aconfig/boot/";
+    private static final String PMAP_FILE_EXT = ".package.map";
+    private static final String FMAP_FILE_EXT = ".flag.map";
+    private static final String VAL_FILE_EXT = ".val";
+
+    private final String mMapPath;
+    private final String mBootPath;
+
+    /** @hide */
+    public static StorageFileProvider getDefaultProvider() {
+        return new StorageFileProvider(DEFAULT_MAP_PATH, DEFAULT_BOOT_PATH);
+    }
+
+    /** @hide */
+    public StorageFileProvider(String mapPath, String bootPath) {
+        mMapPath = mapPath;
+        mBootPath = bootPath;
+    }
+
+    /** @hide */
+    public List<String> listContainers(String[] excludes) {
+        List<String> result = new ArrayList<>();
+        Set<String> set = new HashSet<>(Arrays.asList(excludes));
+
+        try {
+            DirectoryStream<Path> stream =
+                    Files.newDirectoryStream(Paths.get(mMapPath), "*" + PMAP_FILE_EXT);
+            for (Path entry : stream) {
+                String fileName = entry.getFileName().toString();
+                String container =
+                        fileName.substring(0, fileName.length() - PMAP_FILE_EXT.length());
+                if (!set.contains(container)) {
+                    result.add(container);
+                }
+            }
+        } catch (NoSuchFileException e) {
+            return result;
+        } catch (Exception e) {
+            throw new AconfigStorageException(
+                    String.format("Fail to list map files in path %s", mMapPath), e);
+        }
+
+        return result;
+    }
+
+    /** @hide */
+    public PackageTable getPackageTable(String container) {
+        return getPackageTable(Paths.get(mMapPath, container + PMAP_FILE_EXT));
+    }
+
+    /** @hide */
+    public FlagTable getFlagTable(String container) {
+        return FlagTable.fromBytes(
+                mapStorageFile(Paths.get(mMapPath, container + FMAP_FILE_EXT), FileType.FLAG_MAP));
+    }
+
+    /** @hide */
+    public FlagValueList getFlagValueList(String container) {
+        return FlagValueList.fromBytes(
+                mapStorageFile(Paths.get(mBootPath, container + VAL_FILE_EXT), FileType.FLAG_VAL));
+    }
+
+    /** @hide */
+    public static PackageTable getPackageTable(Path path) {
+        return PackageTable.fromBytes(mapStorageFile(path, FileType.PACKAGE_MAP));
+    }
+
+    // Map a storage file given file path
+    private static MappedByteBuffer mapStorageFile(Path file, FileType type) {
+        FileChannel channel = null;
+        try {
+            channel = FileChannel.open(file, StandardOpenOption.READ);
+            return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
+        } catch (Exception e) {
+            throw new AconfigStorageException(
+                    AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                    String.format("Fail to mmap storage %s file %s", type.toString(), file),
+                    e);
+        } finally {
+            quietlyDispose(channel);
+        }
+    }
+
+    private static void quietlyDispose(Closeable closable) {
+        try {
+            if (closable != null) {
+                closable.close();
+            }
+        } catch (Exception e) {
+            // no need to care, at least as of now
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
index 714b53b..81168f5 100644
--- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
+++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java
@@ -58,4 +58,9 @@
         }
         throw new AconfigStorageException("Number of items in a hash table exceeds limit");
     }
+
+    public static int getBucketIndex(byte[] val, int numBuckets) {
+        long hashVal = SipHasher13.hash(val);
+        return (int) Long.remainderUnsigned(hashVal, numBuckets);
+    }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp
index c33127f..bd46d5f 100644
--- a/tools/aconfig/aconfig_storage_file/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp
@@ -10,10 +10,14 @@
         "libbase",
     ],
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info",
+        "data/v1/package_v1.map",
+        "data/v1/flag_v1.map",
+        "data/v1/flag_v1.val",
+        "data/v1/flag_v1.info",
+        "data/v2/package_v2.map",
+        "data/v2/flag_v2.map",
+        "data/v2/flag_v2.val",
+        "data/v2/flag_v2.info",
     ],
     test_suites: [
         "device-tests",
@@ -28,19 +32,24 @@
         "srcs/**/*.java",
     ],
     static_libs: [
-        "aconfig_storage_file_java",
         "androidx.test.runner",
         "junit",
+        "aconfig_storage_file_java",
     ],
-    sdk_version: "test_current",
     test_config: "AndroidStorageJaveTest.xml",
+    sdk_version: "test_current",
     data: [
-        "package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info",
+        "data/v1/package_v1.map",
+        "data/v1/flag_v1.map",
+        "data/v1/flag_v1.val",
+        "data/v1/flag_v1.info",
+        "data/v2/package_v2.map",
+        "data/v2/flag_v2.map",
+        "data/v2/flag_v2.val",
+        "data/v2/flag_v2.info",
     ],
     test_suites: [
         "general-tests",
     ],
+    jarjar_rules: "jarjar.txt",
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml
index 2d52d44..bfc238e 100644
--- a/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml
+++ b/tools/aconfig/aconfig_storage_file/tests/AndroidStorageJaveTest.xml
@@ -21,13 +21,18 @@
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
         <option name="cleanup" value="true" />
-        <option name="push" value="package.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/package.map" />
-        <option name="push" value="flag.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.map" />
-        <option name="push" value="flag.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.val" />
-        <option name="push" value="flag.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/flag.info" />
+        <option name="push" value="package_v1.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.package.map" />
+        <option name="push" value="flag_v1.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.flag.map" />
+        <option name="push" value="flag_v1.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.val" />
+        <option name="push" value="flag_v1.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v1.info" />
+        <option name="push" value="package_v2.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.package.map" />
+        <option name="push" value="flag_v2.map->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.flag.map" />
+        <option name="push" value="flag_v2.val->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.val" />
+        <option name="push" value="flag_v2.info->/data/local/tmp/aconfig_storage_file_test_java/testdata/mock.v2.info" />
+        <option name="post-push" value="chmod +r /data/local/tmp/aconfig_storage_file_test_java/testdata/" />
     </target_preparer>
     <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
         <option name="package" value="android.aconfig.storage.test" />
         <option name="runtime-hint" value="1m" />
     </test>
-</configuration>
\ No newline at end of file
+</configuration>
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.info b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.info
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.info
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.map
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.val b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.val
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/flag.val
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/flag_v1.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/package.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/package_v1.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_file/tests/package.map
rename to tools/aconfig/aconfig_storage_file/tests/data/v1/package_v1.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info
new file mode 100644
index 0000000..9db7fde
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map
new file mode 100644
index 0000000..cf4685c
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val
new file mode 100644
index 0000000..37d4750
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/flag_v2.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map b/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map
new file mode 100644
index 0000000..0a9f95e
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/data/v2/package_v2.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_file/tests/jarjar.txt b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
new file mode 100644
index 0000000..24952ec
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt
@@ -0,0 +1,17 @@
+rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException
+rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable
+rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable
+rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader
+rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType
+rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13
+rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType
+rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList
+rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils
+rule android.aconfig.storage.AconfigPackageImpl android.aconfig.storage.test.AconfigPackageImpl
+rule android.aconfig.storage.StorageFileProvider android.aconfig.storage.test.StorageFileProvider
+
+
+rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1
+rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1
+rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1
+rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java
index fd40d4c..dc465b6 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagTableTest.java
@@ -31,7 +31,7 @@
 
     @Test
     public void testFlagTable_rightHeader() throws Exception {
-        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1));
         FlagTable.Header header = flagTable.getHeader();
         assertEquals(1, header.getVersion());
         assertEquals("mockup", header.getContainer());
@@ -44,7 +44,7 @@
 
     @Test
     public void testFlagTable_rightNode() throws Exception {
-        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1));
 
         FlagTable.Node node1 = flagTable.get(0, "enabled_ro");
         FlagTable.Node node2 = flagTable.get(0, "enabled_rw");
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
index c18590a..306df7d 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java
@@ -34,7 +34,7 @@
     @Test
     public void testFlagValueList_rightHeader() throws Exception {
         FlagValueList flagValueList =
-                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1));
         FlagValueList.Header header = flagValueList.getHeader();
         assertEquals(1, header.getVersion());
         assertEquals("mockup", header.getContainer());
@@ -47,31 +47,31 @@
     @Test
     public void testFlagValueList_rightNode() throws Exception {
         FlagValueList flagValueList =
-                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1));
 
         boolean[] expected = new boolean[] {false, true, true, false, true, true, true, true};
         assertEquals(expected.length, flagValueList.size());
 
         for (int i = 0; i < flagValueList.size(); i++) {
-            assertEquals(expected[i], flagValueList.get(i));
+            assertEquals(expected[i], flagValueList.getBoolean(i));
         }
     }
 
     @Test
     public void testFlagValueList_getValue() throws Exception {
         PackageTable packageTable =
-                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
-        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer());
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1));
+        FlagTable flagTable = FlagTable.fromBytes(TestDataUtils.getTestFlagMapByteBuffer(1));
 
         FlagValueList flagValueList =
-                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer());
+                FlagValueList.fromBytes(TestDataUtils.getTestFlagValByteBuffer(1));
 
         PackageTable.Node pNode = packageTable.get("com.android.aconfig.storage.test_1");
         FlagTable.Node fNode = flagTable.get(pNode.getPackageId(), "enabled_rw");
-        assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
 
         pNode = packageTable.get("com.android.aconfig.storage.test_4");
         fNode = flagTable.get(pNode.getPackageId(), "enabled_fixed_ro");
-        assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
+        assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex()));
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java
index e7e19d8..5906d8b 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/PackageTableTest.java
@@ -17,6 +17,8 @@
 package android.aconfig.storage.test;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import android.aconfig.storage.FileType;
 import android.aconfig.storage.PackageTable;
@@ -31,7 +33,7 @@
     @Test
     public void testPackageTable_rightHeader() throws Exception {
         PackageTable packageTable =
-                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1));
         PackageTable.Header header = packageTable.getHeader();
         assertEquals(1, header.getVersion());
         assertEquals("mockup", header.getContainer());
@@ -43,9 +45,23 @@
     }
 
     @Test
+    public void testPackageTable_rightHeader_v2() throws Exception {
+        PackageTable packageTable =
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(2));
+        PackageTable.Header header = packageTable.getHeader();
+        assertEquals(2, header.getVersion());
+        assertEquals("mockup", header.getContainer());
+        assertEquals(FileType.PACKAGE_MAP, header.getFileType());
+        assertEquals(233, header.getFileSize());
+        assertEquals(3, header.getNumPackages());
+        assertEquals(31, header.getBucketOffset());
+        assertEquals(59, header.getNodeOffset());
+    }
+
+    @Test
     public void testPackageTable_rightNode() throws Exception {
         PackageTable packageTable =
-                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer());
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(1));
 
         PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
         PackageTable.Node node2 = packageTable.get("com.android.aconfig.storage.test_2");
@@ -66,5 +82,43 @@
         assertEquals(159, node1.getNextOffset());
         assertEquals(-1, node2.getNextOffset());
         assertEquals(-1, node4.getNextOffset());
+
+        assertFalse(node1.hasPackageFingerprint());
+        assertFalse(node2.hasPackageFingerprint());
+        assertFalse(node4.hasPackageFingerprint());
+    }
+
+    @Test
+    public void testPackageTable_rightNode_v2() throws Exception {
+        PackageTable packageTable =
+                PackageTable.fromBytes(TestDataUtils.getTestPackageMapByteBuffer(2));
+
+        PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+        PackageTable.Node node2 = packageTable.get("com.android.aconfig.storage.test_2");
+        PackageTable.Node node4 = packageTable.get("com.android.aconfig.storage.test_4");
+
+        assertEquals("com.android.aconfig.storage.test_1", node1.getPackageName());
+        assertEquals("com.android.aconfig.storage.test_2", node2.getPackageName());
+        assertEquals("com.android.aconfig.storage.test_4", node4.getPackageName());
+
+        assertEquals(0, node1.getPackageId());
+        assertEquals(1, node2.getPackageId());
+        assertEquals(2, node4.getPackageId());
+
+        assertEquals(0, node1.getBooleanStartIndex());
+        assertEquals(3, node2.getBooleanStartIndex());
+        assertEquals(6, node4.getBooleanStartIndex());
+
+        assertEquals(175, node1.getNextOffset());
+        assertEquals(-1, node2.getNextOffset());
+        assertEquals(-1, node4.getNextOffset());
+
+        assertTrue(node1.hasPackageFingerprint());
+        assertTrue(node2.hasPackageFingerprint());
+        assertTrue(node4.hasPackageFingerprint());
+
+        assertEquals(-3197795563119393530L, node1.getPackageFingerprint());
+        assertEquals(4431940502274857964L, node2.getPackageFingerprint());
+        assertEquals(-2213514155997929241L, node4.getPackageFingerprint());
     }
 }
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
new file mode 100644
index 0000000..10620d2
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import android.aconfig.storage.SipHasher13;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class SipHasher13Test {
+    @Test
+    public void testSipHash_hashString() throws Exception {
+        String testStr = "com.google.android.test";
+        long result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0xF86572EFF9C4A0C1L, result);
+
+        testStr = "abcdefg";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x2295EF44BD078AE9L, result);
+
+        testStr = "abcdefgh";
+        result = SipHasher13.hash(testStr.getBytes(UTF_8));
+        assertEquals(0x5CD7657FA7F96C16L, result);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java
new file mode 100644
index 0000000..a820970
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/StorageFileProviderTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import android.aconfig.storage.FlagTable;
+import android.aconfig.storage.FlagValueList;
+import android.aconfig.storage.PackageTable;
+import android.aconfig.storage.StorageFileProvider;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+@RunWith(JUnit4.class)
+public class StorageFileProviderTest {
+
+    @Test
+    public void testlistContainers() throws Exception {
+        StorageFileProvider p =
+                new StorageFileProvider(TestDataUtils.TESTDATA_PATH, TestDataUtils.TESTDATA_PATH);
+        String[] excludes = {};
+        List<String> containers = p.listContainers(excludes);
+        assertEquals(2, containers.size());
+
+        excludes = new String[] {"mock.v1"};
+        containers = p.listContainers(excludes);
+        assertEquals(1, containers.size());
+
+        p = new StorageFileProvider("fake/path/", "fake/path/");
+        containers = p.listContainers(excludes);
+        assertTrue(containers.isEmpty());
+    }
+
+    @Test
+    public void testLoadFiles() throws Exception {
+        StorageFileProvider p =
+                new StorageFileProvider(TestDataUtils.TESTDATA_PATH, TestDataUtils.TESTDATA_PATH);
+        PackageTable pt = p.getPackageTable("mock.v1");
+        assertNotNull(pt);
+        pt =
+                StorageFileProvider.getPackageTable(
+                        Paths.get(TestDataUtils.TESTDATA_PATH, "mock.v1.package.map"));
+        assertNotNull(pt);
+        FlagTable f = p.getFlagTable("mock.v1");
+        assertNotNull(f);
+        FlagValueList v = p.getFlagValueList("mock.v1");
+        assertNotNull(v);
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java
index f35952d..388971e 100644
--- a/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java
+++ b/tools/aconfig/aconfig_storage_file/tests/srcs/TestDataUtils.java
@@ -21,28 +21,27 @@
 import java.nio.ByteBuffer;
 
 public final class TestDataUtils {
-    private static final String TEST_PACKAGE_MAP_PATH = "package.map";
-    private static final String TEST_FLAG_MAP_PATH = "flag.map";
-    private static final String TEST_FLAG_VAL_PATH = "flag.val";
-    private static final String TEST_FLAG_INFO_PATH = "flag.info";
+    private static final String TEST_PACKAGE_MAP_PATH = "mock.v%d.package.map";
+    private static final String TEST_FLAG_MAP_PATH = "mock.v%d.flag.map";
+    private static final String TEST_FLAG_VAL_PATH = "mock.v%d.val";
+    private static final String TEST_FLAG_INFO_PATH = "mock.v%d.info";
 
-    private static final String TESTDATA_PATH =
-            "/data/local/tmp/aconfig_storage_file_test_java/testdata/";
+    public static final String TESTDATA_PATH = "/data/local/tmp/aconfig_storage_file_test_java/testdata/";
 
-    public static ByteBuffer getTestPackageMapByteBuffer() throws Exception {
-        return readFile(TESTDATA_PATH + TEST_PACKAGE_MAP_PATH);
+    public static ByteBuffer getTestPackageMapByteBuffer(int version) throws Exception {
+        return readFile(TESTDATA_PATH + String.format(TEST_PACKAGE_MAP_PATH, version));
     }
 
-    public static ByteBuffer getTestFlagMapByteBuffer() throws Exception {
-        return readFile(TESTDATA_PATH + TEST_FLAG_MAP_PATH);
+    public static ByteBuffer getTestFlagMapByteBuffer(int version) throws Exception {
+        return readFile(TESTDATA_PATH + String.format(TEST_FLAG_MAP_PATH, version));
     }
 
-    public static ByteBuffer getTestFlagValByteBuffer() throws Exception {
-        return readFile(TESTDATA_PATH + TEST_FLAG_VAL_PATH);
+    public static ByteBuffer getTestFlagValByteBuffer(int version) throws Exception {
+        return readFile(TESTDATA_PATH + String.format(TEST_FLAG_VAL_PATH, version));
     }
 
-    public static ByteBuffer getTestFlagInfoByteBuffer() throws Exception {
-        return readFile(TESTDATA_PATH + TEST_FLAG_INFO_PATH);
+    public static ByteBuffer getTestFlagInfoByteBuffer(int version) throws Exception {
+        return readFile(TESTDATA_PATH + String.format(TEST_FLAG_INFO_PATH, version));
     }
 
     private static ByteBuffer readFile(String fileName) throws Exception {
diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
index ebd1dd8..5c008af 100644
--- a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
+++ b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp
@@ -24,10 +24,8 @@
 using namespace android::base;
 using namespace aconfig_storage;
 
-void verify_value(const FlagValueSummary& flag,
-                  const std::string& package_name,
-                  const std::string& flag_name,
-                  const std::string& flag_val,
+void verify_value(const FlagValueSummary& flag, const std::string& package_name,
+                  const std::string& flag_name, const std::string& flag_val,
                   const std::string& value_type) {
   ASSERT_EQ(flag.package_name, package_name);
   ASSERT_EQ(flag.flag_name, flag_name);
@@ -39,10 +37,8 @@
                        const std::string& package_name,
                        const std::string& flag_name,
                        const std::string& flag_val,
-                       const std::string& value_type,
-                       bool is_readwrite,
-                       bool has_server_override,
-                       bool has_local_override) {
+                       const std::string& value_type, bool is_readwrite,
+                       bool has_server_override, bool has_local_override) {
   ASSERT_EQ(flag.package_name, package_name);
   ASSERT_EQ(flag.flag_name, flag_name);
   ASSERT_EQ(flag.flag_value, flag_val);
@@ -52,61 +48,137 @@
   ASSERT_EQ(flag.has_local_override, has_local_override);
 }
 
+Result<std::vector<FlagValueSummary>> get_flag_list_result(
+    const std::string version) {
+  auto const test_base_dir = GetExecutableDirectory();
+  auto const test_dir = test_base_dir + "/data/v" + version;
+  auto const package_map = test_dir + "/package_v" + version + ".map";
+  auto const flag_map = test_dir + "/flag_v" + version + ".map";
+  auto const flag_val = test_dir + "/flag_v" + version + ".val";
+  return aconfig_storage::list_flags(package_map, flag_map, flag_val);
+}
+
+Result<std::vector<FlagValueAndInfoSummary>> get_flag_list_result_with_info(
+    const std::string version) {
+  auto const test_base_dir = GetExecutableDirectory();
+  auto const test_dir = test_base_dir + "/data/v" + version;
+  auto const package_map = test_dir + "/package_v" + version + ".map";
+  auto const flag_map = test_dir + "/flag_v" + version + ".map";
+  auto const flag_val = test_dir + "/flag_v" + version + ".val";
+  auto const flag_info = test_dir + "/flag_v" + version + ".info";
+  return aconfig_storage::list_flags_with_info(package_map, flag_map, flag_val,
+                                               flag_info);
+}
+
 TEST(AconfigStorageFileTest, test_list_flag) {
-  auto const test_dir = GetExecutableDirectory();
-  auto const package_map = test_dir + "/package.map";
-  auto const flag_map = test_dir + "/flag.map";
-  auto const flag_val = test_dir + "/flag.val";
-  auto flag_list_result = aconfig_storage::list_flags(
-      package_map, flag_map, flag_val);
+  auto flag_list_result = get_flag_list_result("1");
   ASSERT_TRUE(flag_list_result.ok());
 
   auto const& flag_list = *flag_list_result;
   ASSERT_EQ(flag_list.size(), 8);
-  verify_value(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw",
-               "false", "ReadWriteBoolean");
+  verify_value(flag_list[0], "com.android.aconfig.storage.test_1",
+               "disabled_rw", "false", "ReadWriteBoolean");
   verify_value(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro",
                "true", "ReadOnlyBoolean");
   verify_value(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw",
                "true", "ReadWriteBoolean");
-  verify_value(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw",
-               "false", "ReadWriteBoolean");
-  verify_value(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro",
-               "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[3], "com.android.aconfig.storage.test_2",
+               "disabled_rw", "false", "ReadWriteBoolean");
+  verify_value(flag_list[4], "com.android.aconfig.storage.test_2",
+               "enabled_fixed_ro", "true", "FixedReadOnlyBoolean");
   verify_value(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro",
                "true", "ReadOnlyBoolean");
-  verify_value(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro",
-               "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[6], "com.android.aconfig.storage.test_4",
+               "enabled_fixed_ro", "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw",
+               "true", "ReadWriteBoolean");
+}
+
+// TODO: b/376256472 - Use parameterized tests.
+TEST(AconfigStorageFileTest, test_list_flag_v2) {
+  auto flag_list_result = get_flag_list_result("2");
+  ASSERT_TRUE(flag_list_result.ok());
+
+  auto const& flag_list = *flag_list_result;
+  ASSERT_EQ(flag_list.size(), 8);
+  verify_value(flag_list[0], "com.android.aconfig.storage.test_1",
+               "disabled_rw", "false", "ReadWriteBoolean");
+  verify_value(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro",
+               "true", "ReadOnlyBoolean");
+  verify_value(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw",
+               "true", "ReadWriteBoolean");
+  verify_value(flag_list[3], "com.android.aconfig.storage.test_2",
+               "disabled_rw", "false", "ReadWriteBoolean");
+  verify_value(flag_list[4], "com.android.aconfig.storage.test_2",
+               "enabled_fixed_ro", "true", "FixedReadOnlyBoolean");
+  verify_value(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro",
+               "true", "ReadOnlyBoolean");
+  verify_value(flag_list[6], "com.android.aconfig.storage.test_4",
+               "enabled_fixed_ro", "true", "FixedReadOnlyBoolean");
   verify_value(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw",
                "true", "ReadWriteBoolean");
 }
 
 TEST(AconfigStorageFileTest, test_list_flag_with_info) {
-  auto const test_dir = GetExecutableDirectory();
-  auto const package_map = test_dir + "/package.map";
-  auto const flag_map = test_dir + "/flag.map";
-  auto const flag_val = test_dir + "/flag.val";
-  auto const flag_info = test_dir + "/flag.info";
-  auto flag_list_result = aconfig_storage::list_flags_with_info(
-      package_map, flag_map, flag_val, flag_info);
+  auto flag_list_result = get_flag_list_result_with_info("1");
   ASSERT_TRUE(flag_list_result.ok());
 
   auto const& flag_list = *flag_list_result;
   ASSERT_EQ(flag_list.size(), 8);
-  verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1", "disabled_rw",
-                    "false", "ReadWriteBoolean", true, false, false);
-  verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1", "enabled_ro",
-                    "true", "ReadOnlyBoolean", false, false, false);
-  verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1", "enabled_rw",
-                    "true", "ReadWriteBoolean", true, false, false);
-  verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2", "disabled_rw",
-                    "false", "ReadWriteBoolean", true, false, false);
-  verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2", "enabled_fixed_ro",
-                    "true", "FixedReadOnlyBoolean", false, false, false);
-  verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2", "enabled_ro",
-                    "true", "ReadOnlyBoolean", false, false, false);
-  verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4", "enabled_fixed_ro",
-                    "true", "FixedReadOnlyBoolean", false, false, false);
-  verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4", "enabled_rw",
-                    "true", "ReadWriteBoolean", true, false, false);
+  verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1",
+                    "disabled_rw", "false", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1",
+                    "enabled_ro", "true", "ReadOnlyBoolean", false, false,
+                    false);
+  verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1",
+                    "enabled_rw", "true", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2",
+                    "disabled_rw", "false", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2",
+                    "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false,
+                    false, false);
+  verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2",
+                    "enabled_ro", "true", "ReadOnlyBoolean", false, false,
+                    false);
+  verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4",
+                    "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false,
+                    false, false);
+  verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4",
+                    "enabled_rw", "true", "ReadWriteBoolean", true, false,
+                    false);
+}
+
+TEST(AconfigStorageFileTest, test_list_flag_with_info_v2) {
+  auto flag_list_result = get_flag_list_result_with_info("2");
+  ASSERT_TRUE(flag_list_result.ok());
+
+  auto const& flag_list = *flag_list_result;
+  ASSERT_EQ(flag_list.size(), 8);
+  verify_value_info(flag_list[0], "com.android.aconfig.storage.test_1",
+                    "disabled_rw", "false", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[1], "com.android.aconfig.storage.test_1",
+                    "enabled_ro", "true", "ReadOnlyBoolean", false, false,
+                    false);
+  verify_value_info(flag_list[2], "com.android.aconfig.storage.test_1",
+                    "enabled_rw", "true", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[3], "com.android.aconfig.storage.test_2",
+                    "disabled_rw", "false", "ReadWriteBoolean", true, false,
+                    false);
+  verify_value_info(flag_list[4], "com.android.aconfig.storage.test_2",
+                    "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false,
+                    false, false);
+  verify_value_info(flag_list[5], "com.android.aconfig.storage.test_2",
+                    "enabled_ro", "true", "ReadOnlyBoolean", false, false,
+                    false);
+  verify_value_info(flag_list[6], "com.android.aconfig.storage.test_4",
+                    "enabled_fixed_ro", "true", "FixedReadOnlyBoolean", false,
+                    false, false);
+  verify_value_info(flag_list[7], "com.android.aconfig.storage.test_4",
+                    "enabled_rw", "true", "ReadWriteBoolean", true, false,
+                    false);
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp
index c2f4c18..09daeea 100644
--- a/tools/aconfig/aconfig_storage_read_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/Android.bp
@@ -36,10 +36,10 @@
         "librand",
     ],
     data: [
-        "tests/package.map",
-        "tests/flag.map",
-        "tests/flag.val",
-        "tests/flag.info",
+        "tests/data/v1/package_v1.map",
+        "tests/data/v1/flag_v1.map",
+        "tests/data/v1/flag_v1.val",
+        "tests/data/v1/flag_v1.info",
     ],
 }
 
@@ -87,6 +87,9 @@
     generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"],
     whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"],
     export_include_dirs: ["include"],
+    static_libs: [
+        "libbase",
+    ],
     host_supported: true,
     vendor_available: true,
     product_available: true,
@@ -104,31 +107,12 @@
     afdo: true,
 }
 
-soong_config_module_type {
-    name: "aconfig_lib_cc_shared_link_defaults",
-    module_type: "cc_defaults",
-    config_namespace: "Aconfig",
-    bool_variables: [
-        "read_from_new_storage",
-    ],
-    properties: [
-        "shared_libs",
-    ],
-}
-
-soong_config_bool_variable {
-    name: "read_from_new_storage",
-}
-
-aconfig_lib_cc_shared_link_defaults {
+cc_defaults {
     name: "aconfig_lib_cc_shared_link.defaults",
-    soong_config_variables: {
-        read_from_new_storage: {
-            shared_libs: [
-                "libaconfig_storage_read_api_cc",
-            ],
-        },
-    },
+    shared_libs: select(release_flag("RELEASE_READ_FROM_NEW_STORAGE"), {
+        true: ["libaconfig_storage_read_api_cc"],
+        default: [],
+    }),
 }
 
 cc_defaults {
@@ -144,6 +128,7 @@
     crate_name: "aconfig_storage_read_api_rust_jni",
     srcs: ["srcs/lib.rs"],
     rustlibs: [
+        "libaconfig_storage_file",
         "libaconfig_storage_read_api",
         "libanyhow",
         "libjni",
@@ -170,9 +155,39 @@
     name: "aconfig_storage_reader_java",
     srcs: [
         "srcs/android/aconfig/storage/StorageInternalReader.java",
+        "srcs/android/os/flagging/PlatformAconfigPackageInternal.java",
+    ],
+    libs: [
+        "unsupportedappusage",
+        "strict_mode_stub",
+        "aconfig_storage_stub",
     ],
     static_libs: [
         "aconfig_storage_file_java",
     ],
     sdk_version: "core_current",
+    host_supported: true,
+    min_sdk_version: "29",
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+}
+
+java_library {
+    name: "aconfig_storage_reader_java_none",
+    srcs: [
+        "srcs/android/aconfig/storage/StorageInternalReader.java",
+        "srcs/android/os/flagging/PlatformAconfigPackageInternal.java",
+    ],
+    libs: [
+        "unsupportedappusage-sdk-none",
+        "fake_device_config",
+    ],
+    static_libs: [
+        "aconfig_storage_file_java_none",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
index 97ada3a..8e0c4e1 100644
--- a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp
@@ -1,3 +1,4 @@
+#include <android-base/unique_fd.h>
 #include <sys/mman.h>
 #include <sys/stat.h>
 #include <fcntl.h>
@@ -59,22 +60,22 @@
 
 /// Map a storage file
 Result<MappedStorageFile*> map_storage_file(std::string const& file) {
-  int fd = open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY);
-  if (fd == -1) {
+  android::base::unique_fd ufd(open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY));
+  if (ufd.get() == -1) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("failed to open ") + file + ": " + strerror(errno);
     return result;
   };
 
   struct stat fd_stat;
-  if (fstat(fd, &fd_stat) < 0) {
+  if (fstat(ufd.get(), &fd_stat) < 0) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("fstat failed: ") + strerror(errno);
     return result;
   }
   size_t file_size = fd_stat.st_size;
 
-  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, fd, 0);
+  void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, ufd.get(), 0);
   if (map_result == MAP_FAILED) {
     auto result = Result<MappedStorageFile*>();
     result.errmsg = std::string("mmap failed: ") + strerror(errno);
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
index 6d03377..68b6193 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs
@@ -16,8 +16,10 @@
 
 //! flag value query module defines the flag value file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
-use aconfig_storage_file::{flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType};
+use crate::AconfigStorageError;
+use aconfig_storage_file::{
+    flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType, MAX_SUPPORTED_FILE_VERSION,
+};
 use anyhow::anyhow;
 
 /// Get flag attribute bitfield
@@ -27,11 +29,11 @@
     flag_index: u32,
 ) -> Result<u8, AconfigStorageError> {
     let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -53,12 +55,14 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::{test_utils::create_test_flag_info_list, FlagInfoBit};
+    use aconfig_storage_file::{
+        test_utils::create_test_flag_info_list, FlagInfoBit, DEFAULT_FILE_VERSION,
+    };
 
     #[test]
     // this test point locks down query if flag has server override
     fn test_is_flag_sticky() {
-        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes();
         for offset in 0..8 {
             let attribute =
                 find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap();
@@ -69,7 +73,7 @@
     #[test]
     // this test point locks down query if flag is readwrite
     fn test_is_flag_readwrite() {
-        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes();
         let baseline: Vec<bool> = vec![true, false, true, true, false, false, false, true];
         for offset in 0..8 {
             let attribute =
@@ -84,7 +88,7 @@
     #[test]
     // this test point locks down query if flag has local override
     fn test_flag_has_override() {
-        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes();
         for offset in 0..8 {
             let attribute =
                 find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, offset).unwrap();
@@ -95,7 +99,7 @@
     #[test]
     // this test point locks down query beyond the end of boolean section
     fn test_boolean_out_of_range() {
-        let flag_info_list = create_test_flag_info_list().into_bytes();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION).into_bytes();
         let error =
             find_flag_attribute(&flag_info_list[..], FlagValueType::Boolean, 8).unwrap_err();
         assert_eq!(
@@ -107,16 +111,16 @@
     #[test]
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
-        let mut info_list = create_test_flag_info_list();
-        info_list.header.version = crate::FILE_VERSION + 1;
+        let mut info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
+        info_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_info = info_list.into_bytes();
         let error = find_flag_attribute(&flag_info[..], FlagValueType::Boolean, 4).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
index a1a4793..3e87acc 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs
@@ -16,9 +16,10 @@
 
 //! flag table query module defines the flag table file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
+use crate::AconfigStorageError;
 use aconfig_storage_file::{
     flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes, StoredFlagType,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -36,11 +37,11 @@
     flag: &str,
 ) -> Result<Option<FlagReadContext>, AconfigStorageError> {
     let interpreted_header = FlagTableHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -73,12 +74,12 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::test_utils::create_test_flag_table;
+    use aconfig_storage_file::{test_utils::create_test_flag_table, DEFAULT_FILE_VERSION};
 
     #[test]
     // this test point locks down table query
     fn test_flag_query() {
-        let flag_table = create_test_flag_table().into_bytes();
+        let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes();
         let baseline = vec![
             (0, "enabled_ro", StoredFlagType::ReadOnlyBoolean, 1u16),
             (0, "enabled_rw", StoredFlagType::ReadWriteBoolean, 2u16),
@@ -100,7 +101,7 @@
     #[test]
     // this test point locks down table query of a non exist flag
     fn test_not_existed_flag_query() {
-        let flag_table = create_test_flag_table().into_bytes();
+        let flag_table = create_test_flag_table(DEFAULT_FILE_VERSION).into_bytes();
         let flag_context = find_flag_read_context(&flag_table[..], 1, "disabled_fixed_ro").unwrap();
         assert_eq!(flag_context, None);
         let flag_context = find_flag_read_context(&flag_table[..], 2, "disabled_rw").unwrap();
@@ -110,16 +111,16 @@
     #[test]
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
-        let mut table = create_test_flag_table();
-        table.header.version = crate::FILE_VERSION + 1;
+        let mut table = create_test_flag_table(DEFAULT_FILE_VERSION);
+        table.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_table = table.into_bytes();
         let error = find_flag_read_context(&flag_table[..], 0, "enabled_ro").unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
index 9d32a16..35f5692 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs
@@ -16,18 +16,20 @@
 
 //! flag value query module defines the flag value file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
-use aconfig_storage_file::{flag_value::FlagValueHeader, read_u8_from_bytes};
+use crate::AconfigStorageError;
+use aconfig_storage_file::{
+    flag_value::FlagValueHeader, read_u8_from_bytes, MAX_SUPPORTED_FILE_VERSION,
+};
 use anyhow::anyhow;
 
 /// Query flag value
 pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
-    if interpreted_header.version > crate::FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -46,12 +48,12 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::test_utils::create_test_flag_value_list;
+    use aconfig_storage_file::{test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION};
 
     #[test]
     // this test point locks down flag value query
     fn test_flag_value_query() {
-        let flag_value_list = create_test_flag_value_list().into_bytes();
+        let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes();
         let baseline: Vec<bool> = vec![false, true, true, false, true, true, true, true];
         for (offset, expected_value) in baseline.into_iter().enumerate() {
             let flag_value = find_boolean_flag_value(&flag_value_list[..], offset as u32).unwrap();
@@ -62,7 +64,7 @@
     #[test]
     // this test point locks down query beyond the end of boolean section
     fn test_boolean_out_of_range() {
-        let flag_value_list = create_test_flag_value_list().into_bytes();
+        let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes();
         let error = find_boolean_flag_value(&flag_value_list[..], 8).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
@@ -73,16 +75,16 @@
     #[test]
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
-        let mut value_list = create_test_flag_value_list();
-        value_list.header.version = crate::FILE_VERSION + 1;
+        let mut value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
+        value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let flag_value = value_list.into_bytes();
         let error = find_boolean_flag_value(&flag_value[..], 4).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
index d76cf3f..6e98fe9 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs
@@ -44,9 +44,10 @@
 
 pub use aconfig_storage_file::{AconfigStorageError, FlagValueType, StorageFileType};
 pub use flag_table_query::FlagReadContext;
+pub use mapped_file::map_file;
 pub use package_table_query::PackageReadContext;
 
-use aconfig_storage_file::{read_u32_from_bytes, FILE_VERSION};
+use aconfig_storage_file::read_u32_from_bytes;
 use flag_info_query::find_flag_attribute;
 use flag_table_query::find_flag_read_context;
 use flag_value_query::find_boolean_flag_value;
@@ -114,13 +115,13 @@
 
 /// Get the boolean flag value.
 ///
-/// \input file: mapped flag file
+/// \input file: a byte slice, can be either &Mmap or &MapMut
 /// \input index: boolean flag offset
 ///
 /// \return
 /// If the provide offset is valid, it returns the boolean flag value, otherwise it
 /// returns the error message.
-pub fn get_boolean_flag_value(file: &Mmap, index: u32) -> Result<bool, AconfigStorageError> {
+pub fn get_boolean_flag_value(file: &[u8], index: u32) -> Result<bool, AconfigStorageError> {
     find_boolean_flag_value(file, index)
 }
 
@@ -148,7 +149,7 @@
 
 /// Get the flag attribute.
 ///
-/// \input file: mapped flag info file
+/// \input file: a byte slice, can be either &Mmap or &MapMut
 /// \input flag_type: flag value type
 /// \input flag_index: flag index
 ///
@@ -156,7 +157,7 @@
 /// If the provide offset is valid, it returns the flag attribute bitfiled, otherwise it
 /// returns the error message.
 pub fn get_flag_attribute(
-    file: &Mmap,
+    file: &[u8],
     flag_type: FlagValueType,
     flag_index: u32,
 ) -> Result<u8, AconfigStorageError> {
@@ -412,10 +413,10 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./tests/package.map", &package_map).unwrap();
-        fs::copy("./tests/flag.map", &flag_map).unwrap();
-        fs::copy("./tests/flag.val", &flag_val).unwrap();
-        fs::copy("./tests/flag.info", &flag_info).unwrap();
+        fs::copy("./tests/data/v1/package_v1.map", &package_map).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.map", &flag_map).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.val", &flag_val).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.info", &flag_info).unwrap();
 
         return storage_dir;
     }
@@ -507,9 +508,9 @@
     #[test]
     // this test point locks down flag storage file version number query api
     fn test_storage_version_query() {
-        assert_eq!(get_storage_file_version("./tests/package.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.val").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./tests/flag.info").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/package_v1.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./tests/data/v1/flag_v1.info").unwrap(), 1);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
index 5a16645..f4e269e 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs
@@ -28,7 +28,7 @@
 /// The memory mapped file may have undefined behavior if there are writes to this
 /// file after being mapped. Ensure no writes can happen to this file while this
 /// mapping stays alive.
-unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> {
+pub unsafe fn map_file(file_path: &str) -> Result<Mmap, AconfigStorageError> {
     let file = File::open(file_path)
         .map_err(|errmsg| FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)))?;
     unsafe {
@@ -97,10 +97,10 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./tests/package.map", &package_map).unwrap();
-        fs::copy("./tests/flag.map", &flag_map).unwrap();
-        fs::copy("./tests/flag.val", &flag_val).unwrap();
-        fs::copy("./tests/flag.info", &flag_info).unwrap();
+        fs::copy("./tests/data/v1/package_v1.map", &package_map).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.map", &flag_map).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.val", &flag_val).unwrap();
+        fs::copy("./tests/data/v1/flag_v1.info", &flag_info).unwrap();
 
         return storage_dir;
     }
@@ -108,9 +108,9 @@
     #[test]
     fn test_mapped_file_contents() {
         let storage_dir = create_test_storage_files();
-        map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/package.map");
-        map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/flag.map");
-        map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/flag.val");
-        map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/flag.info");
+        map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/data/v1/package_v1.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/data/v1/flag_v1.map");
+        map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/data/v1/flag_v1.val");
+        map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/data/v1/flag_v1.info");
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
index 2cb854b..a4b63ab 100644
--- a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
+++ b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs
@@ -16,9 +16,10 @@
 
 //! package table query module defines the package table file read from mapped bytes
 
-use crate::{AconfigStorageError, FILE_VERSION};
+use crate::AconfigStorageError;
 use aconfig_storage_file::{
     package_table::PackageTableHeader, package_table::PackageTableNode, read_u32_from_bytes,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -35,11 +36,11 @@
     package: &str,
 ) -> Result<Option<PackageReadContext>, AconfigStorageError> {
     let interpreted_header = PackageTableHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot read storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -55,7 +56,8 @@
     }
 
     loop {
-        let interpreted_node = PackageTableNode::from_bytes(&buf[package_node_offset..])?;
+        let interpreted_node =
+            PackageTableNode::from_bytes(&buf[package_node_offset..], interpreted_header.version)?;
         if interpreted_node.package_name == package {
             return Ok(Some(PackageReadContext {
                 package_id: interpreted_node.package_id,
@@ -72,12 +74,12 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::test_utils::create_test_package_table;
+    use aconfig_storage_file::{test_utils::create_test_package_table, DEFAULT_FILE_VERSION};
 
     #[test]
     // this test point locks down table query
     fn test_package_query() {
-        let package_table = create_test_package_table().into_bytes();
+        let package_table = create_test_package_table(DEFAULT_FILE_VERSION).into_bytes();
         let package_context =
             find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1")
                 .unwrap()
@@ -102,7 +104,7 @@
     // this test point locks down table query of a non exist package
     fn test_not_existed_package_query() {
         // this will land at an empty bucket
-        let package_table = create_test_package_table().into_bytes();
+        let package_table = create_test_package_table(DEFAULT_FILE_VERSION).into_bytes();
         let package_context =
             find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_3")
                 .unwrap();
@@ -117,8 +119,8 @@
     #[test]
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
-        let mut table = create_test_package_table();
-        table.header.version = crate::FILE_VERSION + 1;
+        let mut table = create_test_package_table(DEFAULT_FILE_VERSION);
+        table.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let package_table = table.into_bytes();
         let error =
             find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1")
@@ -127,8 +129,8 @@
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})",
-                crate::FILE_VERSION + 1,
-                crate::FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
index 406ff24..850c2b8 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java
@@ -16,18 +16,14 @@
 
 package android.aconfig.storage;
 
+import dalvik.annotation.optimization.FastNative;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
-
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-
-import dalvik.annotation.optimization.FastNative;
 
 public class AconfigStorageReadAPI {
 
@@ -50,9 +46,8 @@
     }
 
     // Map a storage file given container and file type
-    public static MappedByteBuffer getMappedFile(
-        String container,
-        StorageFileType type) throws IOException{
+    public static MappedByteBuffer getMappedFile(String container, StorageFileType type)
+            throws IOException {
         switch (type) {
             case PACKAGE_MAP:
                 return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map");
@@ -73,14 +68,14 @@
     // @throws IOException if the passed in file is not a valid package map file
     @FastNative
     private static native ByteBuffer getPackageReadContextImpl(
-        ByteBuffer mappedFile, String packageName) throws IOException;
+            ByteBuffer mappedFile, String packageName) throws IOException;
 
     // API to get package read context
     // @param mappedFile: memory mapped package map file
     // @param packageName: package name
     // @throws IOException if the passed in file is not a valid package map file
-    static public PackageReadContext getPackageReadContext (
-        ByteBuffer mappedFile, String packageName) throws IOException {
+    public static PackageReadContext getPackageReadContext(
+            ByteBuffer mappedFile, String packageName) throws IOException {
         ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName);
         buffer.order(ByteOrder.LITTLE_ENDIAN);
         return new PackageReadContext(buffer.getInt(), buffer.getInt(4));
@@ -94,7 +89,7 @@
     // @throws IOException if the passed in file is not a valid flag map file
     @FastNative
     private static native ByteBuffer getFlagReadContextImpl(
-        ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException;
 
     // API to get flag read context
     // @param mappedFile: memory mapped flag map file
@@ -103,7 +98,7 @@
     // @param flagName: flag name
     // @throws IOException if the passed in file is not a valid flag map file
     public static FlagReadContext getFlagReadContext(
-        ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
+            ByteBuffer mappedFile, int packageId, String flagName) throws IOException {
         ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName);
         buffer.order(ByteOrder.LITTLE_ENDIAN);
         return new FlagReadContext(buffer.getInt(), buffer.getInt(4));
@@ -115,8 +110,11 @@
     // @throws IOException if the passed in file is not a valid flag value file or the
     // flag index went over the file boundary.
     @FastNative
-    public static native boolean getBooleanFlagValue(
-        ByteBuffer mappedFile, int flagIndex) throws IOException;
+    public static native boolean getBooleanFlagValue(ByteBuffer mappedFile, int flagIndex)
+            throws IOException;
+
+    @FastNative
+    public static native long hash(String packageName) throws IOException;
 
     static {
         System.loadLibrary("aconfig_storage_read_api_rust_jni");
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
index 5f31017..6fbcdb3 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java
@@ -16,10 +16,16 @@
 
 package android.aconfig.storage;
 
-import java.io.FileInputStream;
+import android.compat.annotation.UnsupportedAppUsage;
+import android.os.StrictMode;
+
+import java.io.Closeable;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
 
+/** @hide */
 public class StorageInternalReader {
 
     private static final String MAP_PATH = "/metadata/aconfig/maps/";
@@ -30,22 +36,24 @@
 
     private int mPackageBooleanStartOffset;
 
+    @UnsupportedAppUsage
     public StorageInternalReader(String container, String packageName) {
         this(packageName, MAP_PATH + container + ".package.map", BOOT_PATH + container + ".val");
     }
 
+    @UnsupportedAppUsage
     public StorageInternalReader(String packageName, String packageMapFile, String flagValueFile) {
+        StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
         mPackageTable = PackageTable.fromBytes(mapStorageFile(packageMapFile));
         mFlagValueList = FlagValueList.fromBytes(mapStorageFile(flagValueFile));
+        StrictMode.setThreadPolicy(oldPolicy);
         mPackageBooleanStartOffset = getPackageBooleanStartOffset(packageName);
     }
 
+    @UnsupportedAppUsage
     public boolean getBooleanFlagValue(int index) {
         index += mPackageBooleanStartOffset;
-        if (index >= mFlagValueList.size()) {
-            throw new AconfigStorageException("Fail to get boolean flag value");
-        }
-        return mFlagValueList.get(index);
+        return mFlagValueList.getBoolean(index);
     }
 
     private int getPackageBooleanStartOffset(String packageName) {
@@ -62,13 +70,25 @@
 
     // Map a storage file given file path
     private static MappedByteBuffer mapStorageFile(String file) {
+        FileChannel channel = null;
         try {
-            FileInputStream stream = new FileInputStream(file);
-            FileChannel channel = stream.getChannel();
+            channel = FileChannel.open(Paths.get(file), StandardOpenOption.READ);
             return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size());
         } catch (Exception e) {
             throw new AconfigStorageException(
                     String.format("Fail to mmap storage file %s", file), e);
+        } finally {
+            quietlyDispose(channel);
+        }
+    }
+
+    private static void quietlyDispose(Closeable closable) {
+        try {
+            if (closable != null) {
+                closable.close();
+            }
+        } catch (Exception e) {
+            // no need to care, at least as of now
         }
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java
new file mode 100644
index 0000000..83310be
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/os/flagging/PlatformAconfigPackageInternal.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os.flagging;
+
+import android.aconfig.storage.AconfigStorageException;
+import android.aconfig.storage.FlagTable;
+import android.aconfig.storage.FlagValueList;
+import android.aconfig.storage.PackageTable;
+import android.aconfig.storage.StorageFileProvider;
+import android.compat.annotation.UnsupportedAppUsage;
+import android.os.StrictMode;
+
+/**
+ * An {@code aconfig} package containing the enabled state of its flags.
+ *
+ * <p><strong>Note: this is intended only to be used by generated code. To determine if a given flag
+ * is enabled in app code, the generated android flags should be used.</strong>
+ *
+ * <p>This class is not part of the public API and should be used by Acnofig Flag internally </b> It
+ * is intended for internal use only and will be changed or removed without notice.
+ *
+ * <p>This class is used to read the flag from Aconfig Package.Each instance of this class will
+ * cache information related to one package. To read flags from a different package, a new instance
+ * of this class should be {@link #load loaded}.
+ *
+ * @hide
+ */
+public class PlatformAconfigPackageInternal {
+
+    private final FlagTable mFlagTable;
+    private final FlagValueList mFlagValueList;
+    private final int mPackageId;
+    private final int mPackageBooleanStartOffset;
+    private final AconfigStorageException mException;
+
+    private PlatformAconfigPackageInternal(
+            FlagValueList flagValueList,
+            FlagTable flagTable,
+            int packageBooleanStartOffset,
+            int packageId,
+            AconfigStorageException exception) {
+        this.mFlagValueList = flagValueList;
+        this.mFlagTable = flagTable;
+        this.mPackageBooleanStartOffset = packageBooleanStartOffset;
+        this.mPackageId = packageId;
+        this.mException = exception;
+    }
+
+    /**
+     * Loads an Aconfig Package from platform Aconfig Storage.
+     *
+     * <p>This method is intended for internal use only and may be changed or removed without
+     * notice.
+     *
+     * <p>This method loads the specified Aconfig Package from the given container.
+     *
+     * <p>AconfigStorageException will be stored if there is an error reading from Aconfig Storage.
+     * The specific error code can be got using {@link #getException()}.
+     *
+     * @param container The name of the container.
+     * @param packageName The name of the Aconfig package to load.
+     * @return An instance of {@link PlatformAconfigPackageInternal}
+     * @hide
+     */
+    @UnsupportedAppUsage
+    public static PlatformAconfigPackageInternal load(String container, String packageName) {
+        return load(container, packageName, StorageFileProvider.getDefaultProvider());
+    }
+
+    /** @hide */
+    public static PlatformAconfigPackageInternal load(
+            String container, String packageName, StorageFileProvider fileProvider) {
+        StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
+        try {
+            PackageTable.Node pNode = fileProvider.getPackageTable(container).get(packageName);
+
+            if (pNode == null) {
+                return createExceptionInstance(
+                        AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
+                        "package "
+                                + packageName
+                                + " in container "
+                                + container
+                                + " cannot be found on the device");
+            }
+
+            return new PlatformAconfigPackageInternal(
+                    fileProvider.getFlagValueList(container),
+                    fileProvider.getFlagTable(container),
+                    pNode.getBooleanStartIndex(),
+                    pNode.getPackageId(),
+                    null);
+
+        } catch (AconfigStorageException e) {
+            return createExceptionInstance(e.getErrorCode(), e.getMessage());
+        } finally {
+            StrictMode.setThreadPolicy(oldPolicy);
+        }
+    }
+
+    /**
+     * Loads an Aconfig package from the specified container and verifies its fingerprint.
+     *
+     * <p>This method is intended for internal use only and may be changed or removed without
+     * notice.
+     *
+     * <p>AconfigStorageException will be stored if there is an error reading from Aconfig Storage.
+     * The specific error code can be got using {@link #getException()}.
+     *
+     * @param container The name of the container.
+     * @param packageName The name of the Aconfig package.
+     * @param packageFingerprint The expected fingerprint of the package.
+     * @return An instance of {@link PlatformAconfigPackageInternal} representing the loaded
+     *     package.
+     * @hide
+     */
+    @UnsupportedAppUsage
+    public static PlatformAconfigPackageInternal load(
+            String container, String packageName, long packageFingerprint) {
+        return load(
+                container,
+                packageName,
+                packageFingerprint,
+                StorageFileProvider.getDefaultProvider());
+    }
+
+    /** @hide */
+    public static PlatformAconfigPackageInternal load(
+            String container,
+            String packageName,
+            long packageFingerprint,
+            StorageFileProvider fileProvider) {
+        StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads();
+        try {
+            PackageTable.Node pNode = fileProvider.getPackageTable(container).get(packageName);
+
+            if (pNode == null) {
+                return createExceptionInstance(
+                        AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
+                        "package "
+                                + packageName
+                                + " in container "
+                                + container
+                                + " cannot be found on the device");
+            }
+
+            if (pNode.hasPackageFingerprint()
+                    && packageFingerprint != pNode.getPackageFingerprint()) {
+                return new PlatformAconfigPackageInternal(
+                        fileProvider.getFlagValueList(container),
+                        fileProvider.getFlagTable(container),
+                        pNode.getBooleanStartIndex(),
+                        pNode.getPackageId(),
+                        new AconfigStorageException(
+                                AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
+                                "The fingerprint provided for the Aconfig package "
+                                        + packageName
+                                        + " in container "
+                                        + container
+                                        + " does not match"
+                                        + " the fingerprint of the package found on the device."));
+            }
+
+            return new PlatformAconfigPackageInternal(
+                    fileProvider.getFlagValueList(container),
+                    null,
+                    pNode.getBooleanStartIndex(),
+                    0,
+                    null);
+
+        } catch (AconfigStorageException e) {
+            return createExceptionInstance(e.getErrorCode(), e.getMessage());
+        } finally {
+            StrictMode.setThreadPolicy(oldPolicy);
+        }
+    }
+
+    /**
+     * Retrieves the value of a boolean flag using its index.
+     *
+     * <p>This method is intended for internal use only and may be changed or removed without
+     * notice.
+     *
+     * <p>This method retrieves the value of a flag within the loaded Aconfig package using its
+     * index. The index is generated at build time and may vary between builds.
+     *
+     * <p>To ensure you are using the correct index, verify that the package's fingerprint matches
+     * the expected fingerprint before calling this method. If the fingerprints do not match, use
+     * {@link #getBooleanFlagValue(String, boolean)} instead.
+     *
+     * @param index The index of the flag within the package.
+     * @return The boolean value of the flag.
+     * @hide
+     */
+    @UnsupportedAppUsage
+    public boolean getBooleanFlagValue(int index) {
+        return mFlagValueList.getBoolean(index + mPackageBooleanStartOffset);
+    }
+
+    /**
+     * Retrieves the value of a boolean flag using its name.
+     *
+     * <p>This method is intended for internal use only and may be changed or removed without
+     * notice.
+     *
+     * <p>This method retrieves the value of a flag within the loaded Aconfig package using its
+     * name.
+     *
+     * @param flagName The name of the flag.
+     * @param defaultValue The default value to return if the flag is not found.
+     * @return The boolean value of the flag.
+     * @hide
+     */
+    @UnsupportedAppUsage
+    public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
+        FlagTable.Node fNode = mFlagTable.get(mPackageId, flagName);
+        if (fNode == null) {
+            return defaultValue;
+        }
+        return mFlagValueList.getBoolean(fNode.getFlagIndex() + mPackageBooleanStartOffset);
+    }
+
+    /**
+     * Returns any exception that occurred during the loading of the Aconfig package.
+     *
+     * <p>This method is intended for internal use only and may be changed or removed without
+     * notice.
+     *
+     * @return The exception that occurred, or {@code null} if no exception occurred.
+     * @hide
+     */
+    @UnsupportedAppUsage
+    public Exception getException() {
+        return mException;
+    }
+
+    /**
+     * Creates a new {@link PlatformAconfigPackageInternal} instance with an {@link
+     * AconfigStorageException}.
+     *
+     * @param errorCode The error code for the exception.
+     * @param message The error message for the exception.
+     * @return A new {@link PlatformAconfigPackageInternal} instance with the specified exception.
+     */
+    private static PlatformAconfigPackageInternal createExceptionInstance(
+            int errorCode, String message) {
+        return new PlatformAconfigPackageInternal(
+                null, null, 0, 0, new AconfigStorageException(errorCode, message));
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
index 304a059..f5f12bb 100644
--- a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
+++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs
@@ -1,5 +1,6 @@
 //! aconfig storage read api java rust interlop
 
+use aconfig_storage_file::SipHasher13;
 use aconfig_storage_read_api::flag_table_query::find_flag_read_context;
 use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
 use aconfig_storage_read_api::package_table_query::find_package_read_context;
@@ -7,8 +8,9 @@
 
 use anyhow::Result;
 use jni::objects::{JByteBuffer, JClass, JString};
-use jni::sys::{jboolean, jint};
+use jni::sys::{jboolean, jint, jlong};
 use jni::JNIEnv;
+use std::hash::Hasher;
 
 /// Call rust find package read context
 fn get_package_read_context_java(
@@ -158,3 +160,30 @@
         }
     }
 }
+
+/// Get flag value JNI
+#[no_mangle]
+#[allow(unused)]
+pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_hash<'local>(
+    mut env: JNIEnv<'local>,
+    class: JClass<'local>,
+    package_name: JString<'local>,
+) -> jlong {
+    match siphasher13_hash(&mut env, package_name) {
+        Ok(value) => value as jlong,
+        Err(errmsg) => {
+            env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw");
+            0i64
+        }
+    }
+}
+
+fn siphasher13_hash(env: &mut JNIEnv, package_name: JString) -> Result<u64> {
+    // SAFETY:
+    // The safety here is ensured as the flag name is guaranteed to be a java string
+    let flag_name: String = unsafe { env.get_string_unchecked(&package_name)?.into() };
+    let mut s = SipHasher13::new();
+    s.write(flag_name.as_bytes());
+    s.write_u8(0xff);
+    Ok(s.finish())
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadFunctionalTest.xml
similarity index 89%
rename from tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml
rename to tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadFunctionalTest.xml
index 99c9e25..ee50060 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidTest.xml
+++ b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadFunctionalTest.xml
@@ -26,7 +26,7 @@
     </target_preparer>
 
     <target_preparer class="com.android.tradefed.targetprep.TestAppInstallSetup">
-        <option name="test-file-name" value="aconfig_storage_read_api.test.java.apk" />
+        <option name="test-file-name" value="aconfig_storage_read_functional.apk" />
     </target_preparer>
 
     <target_preparer class="com.android.tradefed.targetprep.DisableSELinuxTargetPreparer" />
@@ -35,17 +35,17 @@
     <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
         <option name="cleanup" value="true" />
         <option name="abort-on-push-failure" value="true" />
-        <option name="push-file" key="package.map"
+        <option name="push-file" key="package_v1.map"
                 value="/data/local/tmp/aconfig_java_api_test/maps/mockup.package.map" />
-        <option name="push-file" key="flag.map"
+        <option name="push-file" key="flag_v1.map"
                 value="/data/local/tmp/aconfig_java_api_test/maps/mockup.flag.map" />
-        <option name="push-file" key="flag.val"
+        <option name="push-file" key="flag_v1.val"
                 value="/data/local/tmp/aconfig_java_api_test/boot/mockup.val" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
         <option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
-        <option name="package" value="android.aconfig_storage.test" />
+        <option name="package" value="android.aconfig.storage.test" />
         <option name="runtime-hint" value="1m" />
     </test>
-</configuration>
+</configuration>
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadUnitTest.xml b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadUnitTest.xml
new file mode 100644
index 0000000..e528dd5
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/AconfigStorageReadUnitTest.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  ~ Copyright (C) 2024 The Android Open Source Project
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<configuration description="Test aconfig storage java tests">
+    <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+        <option name="cleanup-apks" value="true" />
+        <option name="test-file-name" value="aconfig_storage_read_unit.apk" />
+    </target_preparer>
+    <target_preparer class="com.android.compatibility.common.tradefed.targetprep.FilePusher">
+        <option name="cleanup" value="true" />
+        <option name="push" value="package_v2.map->/data/local/tmp/aconfig_storage_read_unit/testdata/mockup.package.map" />
+        <option name="push" value="flag_v2.map->/data/local/tmp/aconfig_storage_read_unit/testdata/mockup.flag.map" />
+        <option name="push" value="flag_v2.val->/data/local/tmp/aconfig_storage_read_unit/testdata/mockup.val" />
+        <option name="push" value="flag_v2.info->/data/local/tmp/aconfig_storage_read_unit/testdata/mockup.info" />
+        <option name="post-push" value="chmod +r /data/local/tmp/aconfig_storage_read_unit/testdata/" />
+    </target_preparer>
+    <test class="com.android.tradefed.testtype.AndroidJUnitTest" >
+        <option name="package" value="android.aconfig.storage.test" />
+        <option name="runtime-hint" value="1m" />
+    </test>
+</configuration>
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
index ed0c728..fb2fc77 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp
@@ -1,9 +1,14 @@
 filegroup {
     name: "read_api_test_storage_files",
-    srcs: ["package.map",
-        "flag.map",
-        "flag.val",
-        "flag.info"
+    srcs: [
+        "data/v1/package_v1.map",
+        "data/v1/flag_v1.map",
+        "data/v1/flag_v1.val",
+        "data/v1/flag_v1.info",
+        "data/v2/package_v2.map",
+        "data/v2/flag_v2.map",
+        "data/v2/flag_v2.val",
+        "data/v2/flag_v2.info",
     ],
 }
 
@@ -43,3 +48,53 @@
         "general-tests",
     ],
 }
+
+android_test {
+    name: "aconfig_storage_read_functional",
+    srcs: [
+        "functional/srcs/**/*.java",
+    ],
+    static_libs: [
+        "aconfig_device_paths_java",
+        "aconfig_storage_file_java",
+        "aconfig_storage_reader_java",
+        "androidx.test.rules",
+        "libaconfig_storage_read_api_java",
+        "junit",
+    ],
+    jni_libs: [
+        "libaconfig_storage_read_api_rust_jni",
+    ],
+    data: [
+        ":read_api_test_storage_files",
+    ],
+    platform_apis: true,
+    certificate: "platform",
+    test_suites: [
+        "general-tests",
+    ],
+    test_config: "AconfigStorageReadFunctionalTest.xml",
+    team: "trendy_team_android_core_experiments",
+}
+
+android_test {
+    name: "aconfig_storage_read_unit",
+    team: "trendy_team_android_core_experiments",
+    srcs: [
+        "unit/srcs/**/*.java",
+    ],
+    static_libs: [
+        "androidx.test.runner",
+        "junit",
+        "aconfig_storage_reader_java",
+    ],
+    sdk_version: "test_current",
+    data: [
+        ":read_api_test_storage_files",
+    ],
+    test_suites: [
+        "general-tests",
+    ],
+    test_config: "AconfigStorageReadUnitTest.xml",
+    jarjar_rules: "jarjar.txt",
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml b/tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml
similarity index 89%
rename from tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml
rename to tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml
index 78bfb37..5e01879 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/AndroidManifest.xml
+++ b/tools/aconfig/aconfig_storage_read_api/tests/AndroidManifest.xml
@@ -15,12 +15,13 @@
   ~ limitations under the License.
   -->
 
-<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="android.aconfig_storage.test">
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="android.aconfig.storage.test">
     <application>
         <uses-library android:name="android.test.runner" />
     </application>
 
     <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
-                     android:targetPackage="android.aconfig_storage.test" />
+                     android:targetPackage="android.aconfig.storage.test" />
 
 </manifest>
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.info
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.info
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.map
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.val
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/flag.val
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag_v1.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/package.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package_v1.map
similarity index 100%
rename from tools/aconfig/aconfig_storage_read_api/tests/package.map
rename to tools/aconfig/aconfig_storage_read_api/tests/data/v1/package_v1.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info
new file mode 100644
index 0000000..9db7fde
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.info
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map
new file mode 100644
index 0000000..cf4685c
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val
new file mode 100644
index 0000000..37d4750
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/flag_v2.val
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map
new file mode 100644
index 0000000..0a9f95e
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v2/package_v2.map
Binary files differ
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/AconfigStorageReadAPITest.java
new file mode 100644
index 0000000..6dd1bce
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/AconfigStorageReadAPITest.java
@@ -0,0 +1,270 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import android.aconfig.DeviceProtos;
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.storage.AconfigStorageReadAPI;
+import android.aconfig.storage.FlagReadContext;
+import android.aconfig.storage.FlagReadContext.StoredFlagType;
+import android.aconfig.storage.PackageReadContext;
+import android.aconfig.storage.SipHasher13;
+import android.aconfig.storage.StorageInternalReader;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.IOException;
+import java.nio.MappedByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+@RunWith(JUnit4.class)
+public class AconfigStorageReadAPITest {
+
+    private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
+
+    @Test
+    public void testPackageContextQuery() {
+        MappedByteBuffer packageMap = null;
+        try {
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(packageMap != null);
+
+        try {
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_1");
+            assertEquals(context.mPackageId, 0);
+            assertEquals(context.mBooleanStartIndex, 0);
+
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_2");
+            assertEquals(context.mPackageId, 1);
+            assertEquals(context.mBooleanStartIndex, 3);
+
+            context =
+                    AconfigStorageReadAPI.getPackageReadContext(
+                            packageMap, "com.android.aconfig.storage.test_4");
+            assertEquals(context.mPackageId, 2);
+            assertEquals(context.mBooleanStartIndex, 6);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testNonExistPackageContextQuery() {
+        MappedByteBuffer packageMap = null;
+        try {
+            packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(packageMap != null);
+
+        try {
+            PackageReadContext context =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, "unknown");
+            assertEquals(context.mPackageId, -1);
+            assertEquals(context.mBooleanStartIndex, -1);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testFlagContextQuery() {
+        MappedByteBuffer flagMap = null;
+        try {
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagMap != null);
+
+        class Baseline {
+            public int mPackageId;
+            public String mFlagName;
+            public StoredFlagType mFlagType;
+            public int mFlagIndex;
+
+            public Baseline(
+                    int packageId, String flagName, StoredFlagType flagType, int flagIndex) {
+                mPackageId = packageId;
+                mFlagName = flagName;
+                mFlagType = flagType;
+                mFlagIndex = flagIndex;
+            }
+        }
+
+        List<Baseline> baselines = new ArrayList();
+        baselines.add(new Baseline(0, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 1));
+        baselines.add(new Baseline(0, "enabled_rw", StoredFlagType.ReadWriteBoolean, 2));
+        baselines.add(new Baseline(2, "enabled_rw", StoredFlagType.ReadWriteBoolean, 1));
+        baselines.add(new Baseline(1, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
+        baselines.add(new Baseline(1, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 1));
+        baselines.add(new Baseline(1, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 2));
+        baselines.add(new Baseline(2, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 0));
+        baselines.add(new Baseline(0, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
+
+        try {
+            for (Baseline baseline : baselines) {
+                FlagReadContext context =
+                        AconfigStorageReadAPI.getFlagReadContext(
+                                flagMap, baseline.mPackageId, baseline.mFlagName);
+                assertEquals(context.mFlagType, baseline.mFlagType);
+                assertEquals(context.mFlagIndex, baseline.mFlagIndex);
+            }
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testNonExistFlagContextQuery() {
+        MappedByteBuffer flagMap = null;
+        try {
+            flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagMap != null);
+
+        try {
+            FlagReadContext context =
+                    AconfigStorageReadAPI.getFlagReadContext(flagMap, 0, "unknown");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
+
+            context = AconfigStorageReadAPI.getFlagReadContext(flagMap, 3, "enabled_ro");
+            assertEquals(context.mFlagType, null);
+            assertEquals(context.mFlagIndex, -1);
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+    }
+
+    @Test
+    public void testBooleanFlagValueQuery() {
+        MappedByteBuffer flagVal = null;
+        try {
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagVal != null);
+
+        boolean[] baselines = {false, true, true, false, true, true, true, true};
+        for (int i = 0; i < 8; ++i) {
+            try {
+                Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, i);
+                assertEquals(value, baselines[i]);
+            } catch (IOException ex) {
+                assertTrue(ex.toString(), false);
+            }
+        }
+    }
+
+    @Test
+    public void testInvalidBooleanFlagValueQuery() {
+        MappedByteBuffer flagVal = null;
+        try {
+            flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val");
+        } catch (IOException ex) {
+            assertTrue(ex.toString(), false);
+        }
+        assertTrue(flagVal != null);
+
+        try {
+            Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
+            assertTrue("should throw", false);
+        } catch (IOException ex) {
+            String expectedErrmsg = "invalid storage file byte offset";
+            assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg));
+        }
+    }
+
+    @Test
+    public void testRustJavaEqualHash() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        for (parsed_flag flag : flags) {
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rHash, jHash);
+
+            String fullFlagName = packageName + "/" + flagName;
+            rHash = AconfigStorageReadAPI.hash(fullFlagName);
+            jHash = SipHasher13.hash(fullFlagName.getBytes());
+            assertEquals(rHash, jHash);
+        }
+    }
+
+    @Test
+    public void testRustJavaEqualFlag() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+
+        String mapPath = "/metadata/aconfig/maps/";
+        String flagsPath = "/metadata/aconfig/boot/";
+
+        for (parsed_flag flag : flags) {
+
+            String container = flag.container;
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            String fullFlagName = packageName + "/" + flagName;
+
+            MappedByteBuffer packageMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".package.map");
+            MappedByteBuffer flagMap =
+                    AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".flag.map");
+            MappedByteBuffer flagValList =
+                    AconfigStorageReadAPI.mapStorageFile(flagsPath + container + ".val");
+
+            PackageReadContext packageContext =
+                    AconfigStorageReadAPI.getPackageReadContext(packageMap, packageName);
+
+            FlagReadContext flagContext =
+                    AconfigStorageReadAPI.getFlagReadContext(
+                            flagMap, packageContext.mPackageId, flagName);
+
+            boolean rVal =
+                    AconfigStorageReadAPI.getBooleanFlagValue(
+                            flagValList,
+                            packageContext.mBooleanStartIndex + flagContext.mFlagIndex);
+
+            StorageInternalReader reader = new StorageInternalReader(container, packageName);
+            boolean jVal = reader.getBooleanFlagValue(flagContext.mFlagIndex);
+
+            long rHash = AconfigStorageReadAPI.hash(packageName);
+            long jHash = SipHasher13.hash(packageName.getBytes());
+            assertEquals(rVal, jVal);
+        }
+    }
+}
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java
new file mode 100644
index 0000000..15cfc3b
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/PlatformAconfigPackageInternalTest.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static android.aconfig.nano.Aconfig.ENABLED;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+import android.aconfig.DeviceProtos;
+import android.aconfig.nano.Aconfig.parsed_flag;
+import android.aconfig.storage.AconfigStorageException;
+import android.aconfig.storage.FlagTable;
+import android.aconfig.storage.FlagValueList;
+import android.aconfig.storage.PackageTable;
+import android.aconfig.storage.StorageFileProvider;
+import android.os.flagging.PlatformAconfigPackageInternal;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+@RunWith(JUnit4.class)
+public class PlatformAconfigPackageInternalTest {
+
+    private static final Set<String> PLATFORM_CONTAINERS = Set.of("system", "vendor", "product");
+
+    @Test
+    public void testAconfigPackageInternal_load() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        Map<String, PlatformAconfigPackageInternal> readerMap = new HashMap<>();
+        StorageFileProvider fp = StorageFileProvider.getDefaultProvider();
+
+        for (parsed_flag flag : flags) {
+
+            String container = flag.container;
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            if (!PLATFORM_CONTAINERS.contains(container)) continue;
+
+            PackageTable pTable = fp.getPackageTable(container);
+            PackageTable.Node pNode = pTable.get(packageName);
+            FlagTable fTable = fp.getFlagTable(container);
+            FlagTable.Node fNode = fTable.get(pNode.getPackageId(), flagName);
+            FlagValueList fList = fp.getFlagValueList(container);
+
+            int index = pNode.getBooleanStartIndex() + fNode.getFlagIndex();
+            boolean rVal = fList.getBoolean(index);
+
+            PlatformAconfigPackageInternal reader = readerMap.get(packageName);
+            if (reader == null) {
+                reader = PlatformAconfigPackageInternal.load(container, packageName);
+                assertNull(reader.getException());
+                readerMap.put(packageName, reader);
+            }
+            boolean jVal = reader.getBooleanFlagValue(flagName, !rVal);
+
+            assertEquals(rVal, jVal);
+        }
+    }
+
+    @Test
+    public void testPlatformAconfigPackageInternal_load_with_fingerprint() throws IOException {
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        Map<String, PlatformAconfigPackageInternal> readerMap = new HashMap<>();
+        StorageFileProvider fp = StorageFileProvider.getDefaultProvider();
+
+        for (parsed_flag flag : flags) {
+
+            String container = flag.container;
+            String packageName = flag.package_;
+            String flagName = flag.name;
+            if (!PLATFORM_CONTAINERS.contains(container)) continue;
+
+            PackageTable pTable = fp.getPackageTable(container);
+            PackageTable.Node pNode = pTable.get(packageName);
+            FlagTable fTable = fp.getFlagTable(container);
+            FlagTable.Node fNode = fTable.get(pNode.getPackageId(), flagName);
+            FlagValueList fList = fp.getFlagValueList(container);
+
+            int index = pNode.getBooleanStartIndex() + fNode.getFlagIndex();
+            boolean rVal = fList.getBoolean(index);
+
+            long fingerprint = pNode.getPackageFingerprint();
+
+            PlatformAconfigPackageInternal reader = readerMap.get(packageName);
+            if (reader == null) {
+                reader = PlatformAconfigPackageInternal.load(container, packageName, fingerprint);
+                assertNull(reader.getException());
+                readerMap.put(packageName, reader);
+            }
+            boolean jVal = reader.getBooleanFlagValue(fNode.getFlagIndex());
+
+            assertEquals(rVal, jVal);
+        }
+    }
+
+    @Test
+    public void testAconfigPackage_load_withError() throws IOException {
+        // container not found fake_container
+        PlatformAconfigPackageInternal aPackage =
+                PlatformAconfigPackageInternal.load("fake_container", "fake_package", 0);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) aPackage.getException()).getErrorCode());
+
+        // package not found
+        aPackage = PlatformAconfigPackageInternal.load("system", "fake_container", 0);
+        assertEquals(
+                AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
+                ((AconfigStorageException) aPackage.getException()).getErrorCode());
+
+        // fingerprint doesn't match
+        List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos();
+        StorageFileProvider fp = StorageFileProvider.getDefaultProvider();
+
+        parsed_flag flag = flags.get(0);
+
+        String container = flag.container;
+        String packageName = flag.package_;
+        boolean value = flag.state == ENABLED;
+
+        PackageTable pTable = fp.getPackageTable(container);
+        PackageTable.Node pNode = pTable.get(packageName);
+
+        if (pNode.hasPackageFingerprint()) {
+            long fingerprint = pNode.getPackageFingerprint();
+            aPackage = PlatformAconfigPackageInternal.load(container, packageName, fingerprint + 1);
+            assertEquals(
+                    // AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
+                    5, ((AconfigStorageException) aPackage.getException()).getErrorCode());
+            assertEquals(aPackage.getBooleanFlagValue(flag.name, !value), value);
+        }
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/StorageInternalReaderTest.java
similarity index 99%
rename from tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java
rename to tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/StorageInternalReaderTest.java
index 3a1bba0..8a8f054 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/StorageInternalReaderTest.java
+++ b/tools/aconfig/aconfig_storage_read_api/tests/functional/srcs/StorageInternalReaderTest.java
@@ -42,4 +42,4 @@
         assertFalse(reader.getBooleanFlagValue(0));
         assertTrue(reader.getBooleanFlagValue(1));
     }
-}
+}
\ No newline at end of file
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/jarjar.txt b/tools/aconfig/aconfig_storage_read_api/tests/jarjar.txt
new file mode 100644
index 0000000..49250d4
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/jarjar.txt
@@ -0,0 +1,19 @@
+rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException
+rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable
+rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable
+rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader
+rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType
+rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13
+rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType
+rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList
+rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils
+rule android.aconfig.storage.AconfigPackageImpl android.aconfig.storage.test.AconfigPackageImpl
+rule android.aconfig.storage.StorageFileProvider android.aconfig.storage.test.StorageFileProvider
+
+
+rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1
+rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1
+rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1
+rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1
+
+rule android.os.flagging.PlatformAconfigPackageInternal android.aconfig.storage.test.PlatformAconfigPackageInternal
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
deleted file mode 100644
index a26b257..0000000
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright (C) 2024 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.aconfig.storage.test;
-
-import java.io.IOException;
-import java.nio.MappedByteBuffer;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-import android.aconfig.storage.AconfigStorageReadAPI;
-import android.aconfig.storage.PackageReadContext;
-import android.aconfig.storage.FlagReadContext;
-import android.aconfig.storage.FlagReadContext.StoredFlagType;
-
-@RunWith(JUnit4.class)
-public class AconfigStorageReadAPITest{
-
-    private String mStorageDir = "/data/local/tmp/aconfig_java_api_test";
-
-    @Test
-    public void testPackageContextQuery() {
-        MappedByteBuffer packageMap = null;
-        try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(packageMap != null);
-
-        try {
-            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_1");
-            assertEquals(context.mPackageId, 0);
-            assertEquals(context.mBooleanStartIndex, 0);
-
-            context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_2");
-            assertEquals(context.mPackageId, 1);
-            assertEquals(context.mBooleanStartIndex, 3);
-
-            context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "com.android.aconfig.storage.test_4");
-            assertEquals(context.mPackageId, 2);
-            assertEquals(context.mBooleanStartIndex, 6);
-        } catch (IOException ex) {
-            assertTrue(ex.toString(), false);
-        }
-    }
-
-    @Test
-    public void testNonExistPackageContextQuery() {
-        MappedByteBuffer packageMap = null;
-        try {
-            packageMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.package.map");
-        } catch(IOException ex){
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(packageMap != null);
-
-        try {
-            PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext(
-                packageMap, "unknown");
-            assertEquals(context.mPackageId, -1);
-            assertEquals(context.mBooleanStartIndex, -1);
-        } catch(IOException ex){
-            assertTrue(ex.toString(), false);
-        }
-    }
-
-    @Test
-    public void testFlagContextQuery() {
-        MappedByteBuffer flagMap = null;
-        try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(flagMap!= null);
-
-        class Baseline {
-            public int mPackageId;
-            public String mFlagName;
-            public StoredFlagType mFlagType;
-            public int mFlagIndex;
-
-            public Baseline(int packageId,
-                    String flagName,
-                    StoredFlagType flagType,
-                    int flagIndex) {
-                mPackageId = packageId;
-                mFlagName = flagName;
-                mFlagType = flagType;
-                mFlagIndex = flagIndex;
-            }
-        }
-
-        List<Baseline> baselines = new ArrayList();
-        baselines.add(new Baseline(0, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 1));
-        baselines.add(new Baseline(0, "enabled_rw", StoredFlagType.ReadWriteBoolean, 2));
-        baselines.add(new Baseline(2, "enabled_rw", StoredFlagType.ReadWriteBoolean, 1));
-        baselines.add(new Baseline(1, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
-        baselines.add(new Baseline(1, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 1));
-        baselines.add(new Baseline(1, "enabled_ro", StoredFlagType.ReadOnlyBoolean, 2));
-        baselines.add(new Baseline(2, "enabled_fixed_ro", StoredFlagType.FixedReadOnlyBoolean, 0));
-        baselines.add(new Baseline(0, "disabled_rw", StoredFlagType.ReadWriteBoolean, 0));
-
-        try {
-            for (Baseline baseline : baselines) {
-                FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-                    flagMap, baseline.mPackageId,  baseline.mFlagName);
-                assertEquals(context.mFlagType, baseline.mFlagType);
-                assertEquals(context.mFlagIndex, baseline.mFlagIndex);
-            }
-        } catch (IOException ex) {
-            assertTrue(ex.toString(), false);
-        }
-    }
-
-    @Test
-    public void testNonExistFlagContextQuery() {
-        MappedByteBuffer flagMap = null;
-        try {
-            flagMap = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/maps/mockup.flag.map");
-        } catch(IOException ex){
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(flagMap!= null);
-
-        try {
-            FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext(
-                flagMap, 0,  "unknown");
-            assertEquals(context.mFlagType, null);
-            assertEquals(context.mFlagIndex, -1);
-
-            context = AconfigStorageReadAPI.getFlagReadContext(
-                flagMap, 3,  "enabled_ro");
-            assertEquals(context.mFlagType, null);
-            assertEquals(context.mFlagIndex, -1);
-        } catch (IOException ex) {
-            assertTrue(ex.toString(), false);
-        }
-    }
-
-    @Test
-    public void testBooleanFlagValueQuery() {
-        MappedByteBuffer flagVal = null;
-        try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
-        } catch (IOException ex) {
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(flagVal!= null);
-
-        boolean[] baselines = {false, true, true, false, true, true, true, true};
-        for (int i = 0; i < 8; ++i) {
-            try {
-                Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, i);
-                assertEquals(value, baselines[i]);
-            } catch (IOException ex) {
-                assertTrue(ex.toString(), false);
-            }
-        }
-    }
-
-    @Test
-    public void testInvalidBooleanFlagValueQuery() {
-        MappedByteBuffer flagVal = null;
-        try {
-            flagVal = AconfigStorageReadAPI.mapStorageFile(
-                mStorageDir + "/boot/mockup.val");
-        } catch (IOException ex) {
-            assertTrue(ex.toString(), false);
-        }
-        assertTrue(flagVal!= null);
-
-        try {
-            Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9);
-            assertTrue("should throw", false);
-        } catch (IOException ex) {
-            String expectedErrmsg = "invalid storage file byte offset";
-            assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg));
-        }
-    }
- }
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
deleted file mode 100644
index 11b3824..0000000
--- a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp
+++ /dev/null
@@ -1,22 +0,0 @@
-android_test {
-    name: "aconfig_storage_read_api.test.java",
-    srcs: ["./**/*.java"],
-    static_libs: [
-        "aconfig_storage_reader_java",
-        "androidx.test.rules",
-        "libaconfig_storage_read_api_java",
-        "junit",
-    ],
-    jni_libs: [
-        "libaconfig_storage_read_api_rust_jni",
-    ],
-    data: [
-        ":read_api_test_storage_files",
-    ],
-    platform_apis: true,
-    certificate: "platform",
-    test_suites: [
-        "general-tests",
-    ],
-    team: "trendy_team_android_core_experiments",
-}
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
index 6d29045..5289faa 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp
@@ -45,7 +45,8 @@
   }
 
   void SetUp() override {
-    auto const test_dir = android::base::GetExecutableDirectory();
+    auto const test_base_dir = android::base::GetExecutableDirectory();
+    auto const test_dir = test_base_dir + "/data/v1";
     storage_dir = std::string(root_dir.path);
     auto maps_dir = storage_dir + "/maps";
     auto boot_dir = storage_dir + "/boot";
@@ -55,10 +56,10 @@
     flag_map = std::string(maps_dir) + "/mockup.flag.map";
     flag_val = std::string(boot_dir) + "/mockup.val";
     flag_info = std::string(boot_dir) + "/mockup.info";
-    copy_file(test_dir + "/package.map", package_map);
-    copy_file(test_dir + "/flag.map", flag_map);
-    copy_file(test_dir + "/flag.val", flag_val);
-    copy_file(test_dir + "/flag.info", flag_info);
+    copy_file(test_dir + "/package_v1.map", package_map);
+    copy_file(test_dir + "/flag_v1.map", flag_map);
+    copy_file(test_dir + "/flag_v1.val", flag_val);
+    copy_file(test_dir + "/flag_v1.info", flag_info);
   }
 
   void TearDown() override {
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
index afc44d4..5605a41 100644
--- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
+++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs
@@ -9,7 +9,7 @@
     use rand::Rng;
     use std::fs;
 
-    fn create_test_storage_files() -> String {
+    fn create_test_storage_files(version: u32) -> String {
         let mut rng = rand::thread_rng();
         let number: u32 = rng.gen();
         let storage_dir = String::from("/tmp/") + &number.to_string();
@@ -26,17 +26,17 @@
         let flag_map = storage_dir.clone() + "/maps/mockup.flag.map";
         let flag_val = storage_dir.clone() + "/boot/mockup.val";
         let flag_info = storage_dir.clone() + "/boot/mockup.info";
-        fs::copy("./package.map", package_map).unwrap();
-        fs::copy("./flag.map", flag_map).unwrap();
-        fs::copy("./flag.val", flag_val).unwrap();
-        fs::copy("./flag.info", flag_info).unwrap();
+        fs::copy(format!("./data/v{0}/package_v{0}.map", version), package_map).unwrap();
+        fs::copy(format!("./data/v{0}/flag_v{0}.map", version), flag_map).unwrap();
+        fs::copy(format!("./data/v{}/flag_v{0}.val", version), flag_val).unwrap();
+        fs::copy(format!("./data/v{}/flag_v{0}.info", version), flag_info).unwrap();
 
         storage_dir
     }
 
     #[test]
-    fn test_unavailable_stoarge() {
-        let storage_dir = create_test_storage_files();
+    fn test_unavailable_storage() {
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let err = unsafe {
@@ -53,7 +53,7 @@
 
     #[test]
     fn test_package_context_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let package_mapped_file = unsafe {
@@ -84,7 +84,7 @@
 
     #[test]
     fn test_none_exist_package_context_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let package_mapped_file = unsafe {
@@ -99,7 +99,7 @@
 
     #[test]
     fn test_flag_context_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_mapped_file =
@@ -125,7 +125,7 @@
 
     #[test]
     fn test_none_exist_flag_context_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_mapped_file =
@@ -141,7 +141,7 @@
 
     #[test]
     fn test_boolean_flag_value_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_value_file =
@@ -155,7 +155,7 @@
 
     #[test]
     fn test_invalid_boolean_flag_value_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_value_file =
@@ -169,7 +169,7 @@
 
     #[test]
     fn test_flag_info_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_info_file =
@@ -186,7 +186,7 @@
 
     #[test]
     fn test_invalid_boolean_flag_info_query() {
-        let storage_dir = create_test_storage_files();
+        let storage_dir = create_test_storage_files(1);
         // SAFETY:
         // The safety here is ensured as the test process will not write to temp storage file
         let flag_info_file =
@@ -199,10 +199,18 @@
     }
 
     #[test]
-    fn test_storage_version_query() {
-        assert_eq!(get_storage_file_version("./package.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.map").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.val").unwrap(), 1);
-        assert_eq!(get_storage_file_version("./flag.info").unwrap(), 1);
+    fn test_storage_version_query_v1() {
+        assert_eq!(get_storage_file_version("./data/v1/package_v1.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag_v1.map").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag_v1.val").unwrap(), 1);
+        assert_eq!(get_storage_file_version("./data/v1/flag_v1.info").unwrap(), 1);
+    }
+
+    #[test]
+    fn test_storage_version_query_v2() {
+        assert_eq!(get_storage_file_version("./data/v2/package_v2.map").unwrap(), 2);
+        assert_eq!(get_storage_file_version("./data/v2/flag_v2.map").unwrap(), 2);
+        assert_eq!(get_storage_file_version("./data/v2/flag_v2.val").unwrap(), 2);
+        assert_eq!(get_storage_file_version("./data/v2/flag_v2.info").unwrap(), 2);
     }
 }
diff --git a/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java b/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java
new file mode 100644
index 0000000..392f644
--- /dev/null
+++ b/tools/aconfig/aconfig_storage_read_api/tests/unit/srcs/PlatformAconfigPackageInternalTest.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.aconfig.storage.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import android.aconfig.storage.AconfigStorageException;
+import android.aconfig.storage.PackageTable;
+import android.aconfig.storage.StorageFileProvider;
+import android.os.flagging.PlatformAconfigPackageInternal;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class PlatformAconfigPackageInternalTest {
+
+    public static final String TESTDATA_PATH =
+            "/data/local/tmp/aconfig_storage_read_unit/testdata/";
+
+    private StorageFileProvider pr;
+
+    @Before
+    public void setup() {
+        pr = new StorageFileProvider(TESTDATA_PATH, TESTDATA_PATH);
+    }
+
+    @Test
+    public void testLoad_container_package() throws Exception {
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", pr);
+        assertNull(p.getException());
+    }
+
+    @Test
+    public void testLoad_container_package_error() throws Exception {
+        // cannot find package
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_10", pr);
+
+        assertEquals(
+                AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // cannot find container
+        p = PlatformAconfigPackageInternal.load(null, "com.android.aconfig.storage.test_1", pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+        p = PlatformAconfigPackageInternal.load("test", "com.android.aconfig.storage.test_1", pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // new storage doesn't exist
+        pr = new StorageFileProvider("fake/path/", "fake/path/");
+        p = PlatformAconfigPackageInternal.load("mockup", "com.android.aconfig.storage.test_1", pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // file read issue
+        pr = new StorageFileProvider(TESTDATA_PATH, "fake/path/");
+        p = PlatformAconfigPackageInternal.load("mockup", "com.android.aconfig.storage.test_1", pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+    }
+
+    @Test
+    public void testLoad_container_package_fingerprint() throws Exception {
+        PackageTable packageTable = pr.getPackageTable("mockup");
+
+        PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+
+        long fingerprint = node1.getPackageFingerprint();
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertNull(p.getException());
+    }
+
+    @Test
+    public void testLoad_container_package_fingerprint_error() throws Exception {
+
+        PackageTable packageTable = pr.getPackageTable("mockup");
+
+        PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+
+        long fingerprint = node1.getPackageFingerprint();
+
+        // cannot find package
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_10", fingerprint, pr);
+
+        assertEquals(
+                AconfigStorageException.ERROR_PACKAGE_NOT_FOUND,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // cannot find container
+        p =
+                PlatformAconfigPackageInternal.load(
+                        null, "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+        p =
+                PlatformAconfigPackageInternal.load(
+                        "test", "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+        // fingerprint doesn't match
+        p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", fingerprint + 1, pr);
+        assertEquals(
+                // AconfigStorageException.ERROR_FILE_FINGERPRINT_MISMATCH,
+                5, ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // new storage doesn't exist
+        pr = new StorageFileProvider("fake/path/", "fake/path/");
+        p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+
+        // file read issue
+        pr = new StorageFileProvider(TESTDATA_PATH, "fake/path/");
+        p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertEquals(
+                AconfigStorageException.ERROR_CANNOT_READ_STORAGE_FILE,
+                ((AconfigStorageException) p.getException()).getErrorCode());
+    }
+
+    @Test
+    public void testGetBooleanFlagValue_flagName() throws Exception {
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", pr);
+        assertFalse(p.getBooleanFlagValue("disabled_rw", true));
+        assertTrue(p.getBooleanFlagValue("enabled_ro", false));
+        assertTrue(p.getBooleanFlagValue("enabled_rw", false));
+        assertFalse(p.getBooleanFlagValue("fake", false));
+    }
+
+    @Test
+    public void testGetBooleanFlagValue_index() throws Exception {
+
+        PackageTable packageTable = pr.getPackageTable("mockup");
+
+        PackageTable.Node node1 = packageTable.get("com.android.aconfig.storage.test_1");
+
+        long fingerprint = node1.getPackageFingerprint();
+        PlatformAconfigPackageInternal p =
+                PlatformAconfigPackageInternal.load(
+                        "mockup", "com.android.aconfig.storage.test_1", fingerprint, pr);
+        assertFalse(p.getBooleanFlagValue(0));
+        assertTrue(p.getBooleanFlagValue(1));
+        assertTrue(p.getBooleanFlagValue(2));
+    }
+}
diff --git a/tools/aconfig/aconfig_storage_write_api/Android.bp b/tools/aconfig/aconfig_storage_write_api/Android.bp
index 0f1962c..4c882b4 100644
--- a/tools/aconfig/aconfig_storage_write_api/Android.bp
+++ b/tools/aconfig/aconfig_storage_write_api/Android.bp
@@ -16,6 +16,11 @@
         "libaconfig_storage_file",
         "libaconfig_storage_read_api",
     ],
+    min_sdk_version: "34",
+    apex_available: [
+        "//apex_available:anyapex",
+        "//apex_available:platform",
+    ],
 }
 
 rust_library {
diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
index 7b43574..03a8fa2 100644
--- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
+++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp
@@ -100,18 +100,4 @@
   return {};
 }
 
-android::base::Result<void> create_flag_info(
-    std::string const& package_map,
-    std::string const& flag_map,
-    std::string const& flag_info_out) {
-  auto creation_cxx = create_flag_info_cxx(
-      rust::Str(package_map.c_str()),
-      rust::Str(flag_map.c_str()),
-      rust::Str(flag_info_out.c_str()));
-  if (creation_cxx.success) {
-    return {};
-  } else {
-    return android::base::Error() << creation_cxx.error_message;
-  }
-}
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
index 0bba7ff..50a5188 100644
--- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
+++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp
@@ -36,13 +36,4 @@
     uint32_t offset,
     bool value);
 
-/// Create flag info file based on package and flag map
-/// \input package_map: package map file
-/// \input flag_map: flag map file
-/// \input flag_info_out: flag info file to be created
-android::base::Result<void> create_flag_info(
-    std::string const& package_map,
-    std::string const& flag_map,
-    std::string const& flag_info_out);
-
 } // namespace aconfig_storage
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
index 7e60713..5721105 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs
@@ -18,7 +18,7 @@
 
 use aconfig_storage_file::{
     read_u8_from_bytes, AconfigStorageError, FlagInfoBit, FlagInfoHeader, FlagValueType,
-    FILE_VERSION,
+    MAX_SUPPORTED_FILE_VERSION,
 };
 use anyhow::anyhow;
 
@@ -28,11 +28,11 @@
     flag_index: u32,
 ) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagInfoHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot write to storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -94,13 +94,13 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::test_utils::create_test_flag_info_list;
+    use aconfig_storage_file::{test_utils::create_test_flag_info_list, DEFAULT_FILE_VERSION};
     use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
 
     #[test]
     // this test point locks down has server override update
     fn test_update_flag_has_server_override() {
-        let flag_info_list = create_test_flag_info_list();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
         let mut buf = flag_info_list.into_bytes();
         for i in 0..flag_info_list.header.num_flags {
             update_flag_has_server_override(&mut buf, FlagValueType::Boolean, i, true).unwrap();
@@ -115,7 +115,7 @@
     #[test]
     // this test point locks down has local override update
     fn test_update_flag_has_local_override() {
-        let flag_info_list = create_test_flag_info_list();
+        let flag_info_list = create_test_flag_info_list(DEFAULT_FILE_VERSION);
         let mut buf = flag_info_list.into_bytes();
         for i in 0..flag_info_list.header.num_flags {
             update_flag_has_local_override(&mut buf, FlagValueType::Boolean, i, true).unwrap();
diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
index dd15c99..9772db9 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs
@@ -16,7 +16,7 @@
 
 //! flag value update module defines the flag value file write to mapped bytes
 
-use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, FILE_VERSION};
+use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, MAX_SUPPORTED_FILE_VERSION};
 use anyhow::anyhow;
 
 /// Set flag value
@@ -26,11 +26,11 @@
     flag_value: bool,
 ) -> Result<usize, AconfigStorageError> {
     let interpreted_header = FlagValueHeader::from_bytes(buf)?;
-    if interpreted_header.version > FILE_VERSION {
+    if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION {
         return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!(
             "Cannot write to storage file with a higher version of {} with lib version {}",
             interpreted_header.version,
-            FILE_VERSION
+            MAX_SUPPORTED_FILE_VERSION
         )));
     }
 
@@ -49,12 +49,12 @@
 #[cfg(test)]
 mod tests {
     use super::*;
-    use aconfig_storage_file::test_utils::create_test_flag_value_list;
+    use aconfig_storage_file::{test_utils::create_test_flag_value_list, DEFAULT_FILE_VERSION};
 
     #[test]
     // this test point locks down flag value update
     fn test_boolean_flag_value_update() {
-        let flag_value_list = create_test_flag_value_list();
+        let flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
         let value_offset = flag_value_list.header.boolean_value_offset;
         let mut content = flag_value_list.into_bytes();
         let true_byte = u8::from(true).to_le_bytes()[0];
@@ -72,7 +72,7 @@
     #[test]
     // this test point locks down update beyond the end of boolean section
     fn test_boolean_out_of_range() {
-        let mut flag_value_list = create_test_flag_value_list().into_bytes();
+        let mut flag_value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION).into_bytes();
         let error = update_boolean_flag_value(&mut flag_value_list[..], 8, true).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
@@ -83,16 +83,16 @@
     #[test]
     // this test point locks down query error when file has a higher version
     fn test_higher_version_storage_file() {
-        let mut value_list = create_test_flag_value_list();
-        value_list.header.version = FILE_VERSION + 1;
+        let mut value_list = create_test_flag_value_list(DEFAULT_FILE_VERSION);
+        value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1;
         let mut flag_value = value_list.into_bytes();
         let error = update_boolean_flag_value(&mut flag_value[..], 4, true).unwrap_err();
         assert_eq!(
             format!("{:?}", error),
             format!(
                 "HigherStorageFileVersion(Cannot write to storage file with a higher version of {} with lib version {})",
-                FILE_VERSION + 1,
-                FILE_VERSION
+                MAX_SUPPORTED_FILE_VERSION + 1,
+                MAX_SUPPORTED_FILE_VERSION
             )
         );
     }
diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
index 0396a63..09bb41f 100644
--- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs
+++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs
@@ -24,15 +24,10 @@
 #[cfg(test)]
 mod test_utils;
 
-use aconfig_storage_file::{
-    AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, FlagValueType,
-    PackageTable, StorageFileType, StoredFlagType, FILE_VERSION,
-};
+use aconfig_storage_file::{AconfigStorageError, FlagValueType};
 
 use anyhow::anyhow;
 use memmap2::MmapMut;
-use std::fs::File;
-use std::io::{Read, Write};
 
 /// Get read write mapped storage files.
 ///
@@ -104,86 +99,6 @@
     })
 }
 
-/// Read in storage file as bytes
-fn read_file_to_bytes(file_path: &str) -> Result<Vec<u8>, AconfigStorageError> {
-    let mut file = File::open(file_path).map_err(|errmsg| {
-        AconfigStorageError::FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg))
-    })?;
-    let mut buffer = Vec::new();
-    file.read_to_end(&mut buffer).map_err(|errmsg| {
-        AconfigStorageError::FileReadFail(anyhow!(
-            "Failed to read bytes from file {}: {}",
-            file_path,
-            errmsg
-        ))
-    })?;
-    Ok(buffer)
-}
-
-/// Create flag info file given package map file and flag map file
-/// \input package_map: package map file
-/// \input flag_map: flag map file
-/// \output flag_info_out: created flag info file
-pub fn create_flag_info(
-    package_map: &str,
-    flag_map: &str,
-    flag_info_out: &str,
-) -> Result<(), AconfigStorageError> {
-    let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?;
-    let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?;
-
-    if package_table.header.container != flag_table.header.container {
-        return Err(AconfigStorageError::FileCreationFail(anyhow!(
-            "container for package map {} and flag map {} does not match",
-            package_table.header.container,
-            flag_table.header.container,
-        )));
-    }
-
-    let mut package_start_index = vec![0; package_table.header.num_packages as usize];
-    for node in package_table.nodes.iter() {
-        package_start_index[node.package_id as usize] = node.boolean_start_index;
-    }
-
-    let mut is_flag_rw = vec![false; flag_table.header.num_flags as usize];
-    for node in flag_table.nodes.iter() {
-        let flag_index = package_start_index[node.package_id as usize] + node.flag_index as u32;
-        is_flag_rw[flag_index as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean;
-    }
-
-    let mut list = FlagInfoList {
-        header: FlagInfoHeader {
-            version: FILE_VERSION,
-            container: flag_table.header.container,
-            file_type: StorageFileType::FlagInfo as u8,
-            file_size: 0,
-            num_flags: flag_table.header.num_flags,
-            boolean_flag_offset: 0,
-        },
-        nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(),
-    };
-
-    list.header.boolean_flag_offset = list.header.into_bytes().len() as u32;
-    list.header.file_size = list.into_bytes().len() as u32;
-
-    let mut file = File::create(flag_info_out).map_err(|errmsg| {
-        AconfigStorageError::FileCreationFail(anyhow!(
-            "fail to create file {}: {}",
-            flag_info_out,
-            errmsg
-        ))
-    })?;
-    file.write_all(&list.into_bytes()).map_err(|errmsg| {
-        AconfigStorageError::FileCreationFail(anyhow!(
-            "fail to write to file {}: {}",
-            flag_info_out,
-            errmsg
-        ))
-    })?;
-
-    Ok(())
-}
-
 // *************************************** //
 // CC INTERLOP
 // *************************************** //
@@ -212,12 +127,6 @@
         pub error_message: String,
     }
 
-    // Flag info file creation return for cc interlop
-    pub struct FlagInfoCreationCXX {
-        pub success: bool,
-        pub error_message: String,
-    }
-
     // Rust export to c++
     extern "Rust" {
         pub fn update_boolean_flag_value_cxx(
@@ -239,12 +148,6 @@
             offset: u32,
             value: bool,
         ) -> FlagHasLocalOverrideUpdateCXX;
-
-        pub fn create_flag_info_cxx(
-            package_map: &str,
-            flag_map: &str,
-            flag_info_out: &str,
-        ) -> FlagInfoCreationCXX;
     }
 }
 
@@ -329,34 +232,15 @@
     }
 }
 
-/// Create flag info file cc interlop
-pub(crate) fn create_flag_info_cxx(
-    package_map: &str,
-    flag_map: &str,
-    flag_info_out: &str,
-) -> ffi::FlagInfoCreationCXX {
-    match create_flag_info(package_map, flag_map, flag_info_out) {
-        Ok(()) => ffi::FlagInfoCreationCXX { success: true, error_message: String::from("") },
-        Err(errmsg) => {
-            ffi::FlagInfoCreationCXX { success: false, error_message: format!("{:?}", errmsg) }
-        }
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use super::*;
     use crate::test_utils::copy_to_temp_file;
-    use aconfig_storage_file::test_utils::{
-        create_test_flag_info_list, create_test_flag_table, create_test_package_table,
-        write_bytes_to_temp_file,
-    };
     use aconfig_storage_file::FlagInfoBit;
     use aconfig_storage_read_api::flag_info_query::find_flag_attribute;
     use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value;
     use std::fs::File;
     use std::io::Read;
-    use tempfile::NamedTempFile;
 
     fn get_boolean_flag_value_at_offset(file: &str, offset: u32) -> bool {
         let mut f = File::open(&file).unwrap();
@@ -439,31 +323,4 @@
             }
         }
     }
-
-    fn create_empty_temp_file() -> Result<NamedTempFile, AconfigStorageError> {
-        let file = NamedTempFile::new().map_err(|_| {
-            AconfigStorageError::FileCreationFail(anyhow!("Failed to create temp file"))
-        })?;
-        Ok(file)
-    }
-
-    #[test]
-    // this test point locks down the flag info creation
-    fn test_create_flag_info() {
-        let package_table =
-            write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap();
-        let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap();
-        let flag_info = create_empty_temp_file().unwrap();
-
-        let package_table_path = package_table.path().display().to_string();
-        let flag_table_path = flag_table.path().display().to_string();
-        let flag_info_path = flag_info.path().display().to_string();
-
-        assert!(create_flag_info(&package_table_path, &flag_table_path, &flag_info_path).is_ok());
-
-        let flag_info =
-            FlagInfoList::from_bytes(&read_file_to_bytes(&flag_info_path).unwrap()).unwrap();
-        let expected_flag_info = create_test_flag_info_list();
-        assert_eq!(flag_info, expected_flag_info);
-    }
 }
diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp
index 2a02379..a7aceee 100644
--- a/tools/aconfig/aflags/Android.bp
+++ b/tools/aconfig/aflags/Android.bp
@@ -10,7 +10,9 @@
     srcs: ["src/main.rs"],
     rustlibs: [
         "libaconfig_device_paths",
+        "libaconfig_flags",
         "libaconfig_protos",
+        "libaconfigd_protos_rust",
         "libaconfig_storage_read_api",
         "libaconfig_storage_file",
         "libanyhow",
@@ -18,11 +20,16 @@
         "libnix",
         "libprotobuf",
         "libregex",
+        // TODO: b/371021174 remove this fake dependency once we find a proper strategy to
+        // deal with test aconfig libs are not present in storage because they are never used
+        // by the actual build
+        "libaconfig_test_rust_library",
     ],
 }
 
 rust_binary {
     name: "aflags",
+    host_supported: true,
     defaults: ["aflags.defaults"],
 }
 
diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml
index eeae295..d31e232 100644
--- a/tools/aconfig/aflags/Cargo.toml
+++ b/tools/aconfig/aflags/Cargo.toml
@@ -9,8 +9,10 @@
 protobuf = "3.2.0"
 regex = "1.10.3"
 aconfig_protos = { path = "../aconfig_protos" }
+aconfigd_protos = { version = "0.1.0", path = "../../../../../packages/modules/ConfigInfrastructure/aconfigd/proto"}
 nix = { version = "0.28.0", features = ["user"] }
 aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" }
 aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" }
 clap = {version = "4.5.2" }
 aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" }
+aconfig_flags = { version = "0.1.0", path = "../aconfig_flags" }
diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs
index 04140c7..3f593fe 100644
--- a/tools/aconfig/aflags/src/aconfig_storage_source.rs
+++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs
@@ -1,52 +1,146 @@
-use crate::{Flag, FlagPermission, FlagSource, FlagValue, ValuePickedFrom};
-use anyhow::{anyhow, Result};
-
-use std::fs::File;
-use std::io::Read;
+use crate::load_protos;
+use crate::{Flag, FlagSource};
+use crate::{FlagPermission, FlagValue, ValuePickedFrom};
+use aconfigd_protos::{
+    ProtoFlagQueryReturnMessage, ProtoListStorageMessage, ProtoListStorageMessageMsg,
+    ProtoStorageRequestMessage, ProtoStorageRequestMessageMsg, ProtoStorageRequestMessages,
+    ProtoStorageReturnMessage, ProtoStorageReturnMessageMsg, ProtoStorageReturnMessages,
+};
+use anyhow::anyhow;
+use anyhow::Result;
+use protobuf::Message;
+use protobuf::SpecialFields;
+use std::collections::HashMap;
+use std::io::{Read, Write};
+use std::net::Shutdown;
+use std::os::unix::net::UnixStream;
 
 pub struct AconfigStorageSource {}
 
-use aconfig_storage_file::protos::ProtoStorageFiles;
+fn load_flag_to_container() -> Result<HashMap<String, String>> {
+    Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect())
+}
 
-static STORAGE_INFO_FILE_PATH: &str = "/metadata/aconfig/persistent_storage_file_records.pb";
+fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> {
+    let (value, value_picked_from) = match (
+        &msg.boot_flag_value,
+        msg.default_flag_value,
+        msg.local_flag_value,
+        msg.has_local_override,
+    ) {
+        (_, _, Some(local), Some(has_local)) if has_local => {
+            (FlagValue::try_from(local.as_str())?, ValuePickedFrom::Local)
+        }
+        (Some(boot), Some(default), _, _) => {
+            let value = FlagValue::try_from(boot.as_str())?;
+            if *boot == default {
+                (value, ValuePickedFrom::Default)
+            } else {
+                (value, ValuePickedFrom::Server)
+            }
+        }
+        _ => return Err(anyhow!("missing override")),
+    };
+
+    let staged_value = match (msg.boot_flag_value, msg.server_flag_value, msg.has_server_override) {
+        (Some(boot), Some(server), _) if boot == server => None,
+        (Some(boot), Some(server), Some(has_server)) if boot != server && has_server => {
+            Some(FlagValue::try_from(server.as_str())?)
+        }
+        _ => None,
+    };
+
+    let permission = match msg.is_readwrite {
+        Some(is_readwrite) => {
+            if is_readwrite {
+                FlagPermission::ReadWrite
+            } else {
+                FlagPermission::ReadOnly
+            }
+        }
+        None => return Err(anyhow!("missing permission")),
+    };
+
+    let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?;
+    let package = msg.package_name.ok_or(anyhow!("missing package name"))?;
+    let qualified_name = format!("{package}.{name}");
+    Ok(Flag {
+        name,
+        package,
+        value,
+        permission,
+        value_picked_from,
+        staged_value,
+        container: containers
+            .get(&qualified_name)
+            .cloned()
+            .unwrap_or_else(|| "<no container>".to_string())
+            .to_string(),
+        // TODO: remove once DeviceConfig is not in the CLI.
+        namespace: "-".to_string(),
+    })
+}
+
+fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> {
+    let messages = ProtoStorageRequestMessages {
+        msgs: vec![ProtoStorageRequestMessage {
+            msg: Some(ProtoStorageRequestMessageMsg::ListStorageMessage(ProtoListStorageMessage {
+                msg: Some(ProtoListStorageMessageMsg::All(true)),
+                special_fields: SpecialFields::new(),
+            })),
+            special_fields: SpecialFields::new(),
+        }],
+        special_fields: SpecialFields::new(),
+    };
+
+    let socket_name = if aconfig_flags::auto_generated::enable_system_aconfigd_rust() {
+        "/dev/socket/aconfigd_system"
+    } else {
+        "/dev/socket/aconfigd"
+    };
+    let mut socket = UnixStream::connect(socket_name)?;
+
+    let message_buffer = messages.write_to_bytes()?;
+    let mut message_length_buffer: [u8; 4] = [0; 4];
+    let message_size = &message_buffer.len();
+    message_length_buffer[0] = (message_size >> 24) as u8;
+    message_length_buffer[1] = (message_size >> 16) as u8;
+    message_length_buffer[2] = (message_size >> 8) as u8;
+    message_length_buffer[3] = *message_size as u8;
+    socket.write_all(&message_length_buffer)?;
+    socket.write_all(&message_buffer)?;
+    socket.shutdown(Shutdown::Write)?;
+
+    let mut response_length_buffer: [u8; 4] = [0; 4];
+    socket.read_exact(&mut response_length_buffer)?;
+    let response_length = u32::from_be_bytes(response_length_buffer) as usize;
+    let mut response_buffer = vec![0; response_length];
+    socket.read_exact(&mut response_buffer)?;
+
+    let response: ProtoStorageReturnMessages =
+        protobuf::Message::parse_from_bytes(&response_buffer)?;
+
+    match response.msgs.as_slice() {
+        [ProtoStorageReturnMessage {
+            msg: Some(ProtoStorageReturnMessageMsg::ListStorageMessage(list_storage_message)),
+            ..
+        }] => Ok(list_storage_message.flags.clone()),
+        _ => Err(anyhow!("unexpected response from aconfigd")),
+    }
+}
 
 impl FlagSource for AconfigStorageSource {
     fn list_flags() -> Result<Vec<Flag>> {
-        let mut result = Vec::new();
-
-        let mut file = File::open(STORAGE_INFO_FILE_PATH)?;
-        let mut bytes = Vec::new();
-        file.read_to_end(&mut bytes)?;
-        let storage_file_info: ProtoStorageFiles = protobuf::Message::parse_from_bytes(&bytes)?;
-
-        for file_info in storage_file_info.files {
-            let package_map =
-                file_info.package_map.ok_or(anyhow!("storage file is missing package map"))?;
-            let flag_map = file_info.flag_map.ok_or(anyhow!("storage file is missing flag map"))?;
-            let flag_val = file_info.flag_val.ok_or(anyhow!("storage file is missing flag val"))?;
-            let container =
-                file_info.container.ok_or(anyhow!("storage file is missing container"))?;
-
-            for listed_flag in aconfig_storage_file::list_flags(&package_map, &flag_map, &flag_val)?
-            {
-                result.push(Flag {
-                    name: listed_flag.flag_name,
-                    package: listed_flag.package_name,
-                    value: FlagValue::try_from(listed_flag.flag_value.as_str())?,
-                    container: container.to_string(),
-
-                    // TODO(b/324436145): delete namespace field once DeviceConfig isn't in CLI.
-                    namespace: "-".to_string(),
-
-                    // TODO(b/324436145): Populate with real values once API is available.
-                    staged_value: None,
-                    permission: FlagPermission::ReadOnly,
-                    value_picked_from: ValuePickedFrom::Default,
-                });
-            }
-        }
-
-        Ok(result)
+        let containers = load_flag_to_container()?;
+        read_from_socket()
+            .map(|query_messages| {
+                query_messages
+                    .iter()
+                    .map(|message| convert(message.clone(), &containers))
+                    .collect::<Vec<_>>()
+            })?
+            .into_iter()
+            .collect()
     }
 
     fn override_flag(_namespace: &str, _qualified_name: &str, _value: &str) -> Result<()> {
diff --git a/tools/aconfig/aflags/src/load_protos.rs b/tools/aconfig/aflags/src/load_protos.rs
index 90d8599..c5ac8ff 100644
--- a/tools/aconfig/aflags/src/load_protos.rs
+++ b/tools/aconfig/aflags/src/load_protos.rs
@@ -51,7 +51,10 @@
 
     let paths = aconfig_device_paths::parsed_flags_proto_paths()?;
     for path in paths {
-        let bytes = fs::read(path.clone())?;
+        let Ok(bytes) = fs::read(&path) else {
+            eprintln!("warning: failed to read {:?}", path);
+            continue;
+        };
         let parsed_flags: ProtoParsedFlags = protobuf::Message::parse_from_bytes(&bytes)?;
         for flag in parsed_flags.parsed_flag {
             // TODO(b/334954748): enforce one-container-per-flag invariant.
@@ -60,3 +63,10 @@
     }
     Ok(result)
 }
+
+pub(crate) fn list_containers() -> Result<Vec<String>> {
+    Ok(aconfig_device_paths::parsed_flags_proto_paths()?
+        .into_iter()
+        .map(|p| infer_container(&p))
+        .collect())
+}
diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs
index 810f2e3..8173bc2 100644
--- a/tools/aconfig/aflags/src/main.rs
+++ b/tools/aconfig/aflags/src/main.rs
@@ -50,6 +50,7 @@
 enum ValuePickedFrom {
     Default,
     Server,
+    Local,
 }
 
 impl std::fmt::Display for ValuePickedFrom {
@@ -60,6 +61,7 @@
             match &self {
                 Self::Default => "default",
                 Self::Server => "server",
+                Self::Local => "local",
             }
         )
     }
@@ -114,9 +116,10 @@
     }
 
     fn display_staged_value(&self) -> String {
-        match self.staged_value {
-            Some(v) => format!("(->{})", v),
-            None => "-".to_string(),
+        match (&self.permission, self.staged_value) {
+            (FlagPermission::ReadOnly, _) => "-".to_string(),
+            (FlagPermission::ReadWrite, None) => "-".to_string(),
+            (FlagPermission::ReadWrite, Some(v)) => format!("(->{})", v),
         }
     }
 }
@@ -162,10 +165,6 @@
 enum Command {
     /// List all aconfig flags on this device.
     List {
-        /// Read from the new flag storage.
-        #[clap(long)]
-        use_new_storage: bool,
-
         /// Optionally filter by container name.
         #[clap(short = 'c', long = "container")]
         container: Option<String>,
@@ -182,6 +181,9 @@
         /// <package>.<flag_name>
         qualified_name: String,
     },
+
+    /// Display which flag storage backs aconfig flags.
+    WhichBacking,
 }
 
 struct PaddingInfo {
@@ -251,6 +253,14 @@
         FlagSourceType::DeviceConfig => DeviceConfigSource::list_flags()?,
         FlagSourceType::AconfigStorage => AconfigStorageSource::list_flags()?,
     };
+
+    if let Some(ref c) = container {
+        ensure!(
+            load_protos::list_containers()?.contains(c),
+            format!("container '{}' not found", &c)
+        );
+    }
+
     let flags = (Filter { container }).apply(&flags_unfiltered);
     let padding_info = PaddingInfo {
         longest_flag_col: flags.iter().map(|f| f.qualified_name().len()).max().unwrap_or(0),
@@ -280,19 +290,31 @@
     Ok(result)
 }
 
+fn display_which_backing() -> String {
+    if aconfig_flags::auto_generated::enable_only_new_storage() {
+        "aconfig_storage".to_string()
+    } else {
+        "device_config".to_string()
+    }
+}
+
 fn main() -> Result<()> {
     ensure!(nix::unistd::Uid::current().is_root(), "must be root");
 
     let cli = Cli::parse();
     let output = match cli.command {
-        Command::List { use_new_storage: true, container } => {
-            list(FlagSourceType::AconfigStorage, container).map(Some)
-        }
-        Command::List { use_new_storage: false, container } => {
-            list(FlagSourceType::DeviceConfig, container).map(Some)
+        Command::List { container } => {
+            if aconfig_flags::auto_generated::enable_only_new_storage() {
+                list(FlagSourceType::AconfigStorage, container)
+                    .map_err(|err| anyhow!("could not list flags: {err}"))
+                    .map(Some)
+            } else {
+                list(FlagSourceType::DeviceConfig, container).map(Some)
+            }
         }
         Command::Enable { qualified_name } => set_flag(&qualified_name, "true").map(|_| None),
         Command::Disable { qualified_name } => set_flag(&qualified_name, "false").map(|_| None),
+        Command::WhichBacking => Ok(Some(display_which_backing())),
     };
     match output {
         Ok(Some(text)) => println!("{text}"),
diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp
index 4566bf9..1c5b7c5 100644
--- a/tools/aconfig/fake_device_config/Android.bp
+++ b/tools/aconfig/fake_device_config/Android.bp
@@ -13,10 +13,43 @@
 // limitations under the License.
 
 java_library {
-	name: "fake_device_config",
-	srcs: ["src/**/*.java"],
-	sdk_version: "none",
-	system_modules: "core-all-system-modules",
-	host_supported: true,
+    name: "fake_device_config",
+    srcs: [
+        "src/**/*.java",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+    is_stubs_module: true,
 }
 
+java_library {
+    name: "strict_mode_stub",
+    srcs: [
+        "src/android/os/StrictMode.java",
+    ],
+    sdk_version: "core_current",
+    host_supported: true,
+    is_stubs_module: true,
+}
+
+java_library {
+    name: "aconfig_storage_stub",
+    srcs: [
+        "src/android/os/flagging/**/*.java",
+    ],
+    sdk_version: "core_current",
+    host_supported: true,
+    is_stubs_module: true,
+}
+
+java_library {
+    name: "aconfig_storage_stub_none",
+    srcs: [
+        "src/android/os/flagging/**/*.java",
+    ],
+    sdk_version: "none",
+    system_modules: "core-all-system-modules",
+    host_supported: true,
+    is_stubs_module: true,
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/Binder.java b/tools/aconfig/fake_device_config/src/android/os/Binder.java
new file mode 100644
index 0000000..8a2313d
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/Binder.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os;
+
+public class Binder {
+    public static final long clearCallingIdentity() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+    public static final void restoreCallingIdentity(long token) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/StrictMode.java b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java
new file mode 100644
index 0000000..6416252
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os;
+
+public class StrictMode {
+    public static ThreadPolicy allowThreadDiskReads() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static void setThreadPolicy(final ThreadPolicy policy) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static final class ThreadPolicy {}
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackage.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackage.java
new file mode 100644
index 0000000..3cac516
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackage.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os.flagging;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class AconfigPackage {
+    public static AconfigPackage load(String packageName) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java
new file mode 100644
index 0000000..5f066a8
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigPackageInternal.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os.flagging;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class AconfigPackageInternal {
+
+    public static AconfigPackageInternal load(String container, String packageName) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static AconfigPackageInternal load(
+            String container, String packageName, long packageFingerprint) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public boolean getBooleanFlagValue(int index) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public AconfigStorageReadException getException() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigStorageReadException.java b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigStorageReadException.java
new file mode 100644
index 0000000..bfec98c
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/AconfigStorageReadException.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os.flagging;
+
+public class AconfigStorageReadException extends RuntimeException {
+
+    /** Generic error code indicating an unspecified Aconfig Storage error. */
+    public static final int ERROR_GENERIC = 0;
+
+    /** Error code indicating that the Aconfig Storage system is not found on the device. */
+    public static final int ERROR_STORAGE_SYSTEM_NOT_FOUND = 1;
+
+    /** Error code indicating that the requested configuration package is not found. */
+    public static final int ERROR_PACKAGE_NOT_FOUND = 2;
+
+    /** Error code indicating that the specified container is not found. */
+    public static final int ERROR_CONTAINER_NOT_FOUND = 3;
+
+    /** Error code indicating that there was an error reading the Aconfig Storage file. */
+    public static final int ERROR_CANNOT_READ_STORAGE_FILE = 4;
+
+    public static final int ERROR_FILE_FINGERPRINT_MISMATCH = 5;
+
+    public AconfigStorageReadException(int errorCode, String msg) {
+        super(msg);
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public AconfigStorageReadException(int errorCode, String msg, Throwable cause) {
+        super(msg, cause);
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public AconfigStorageReadException(int errorCode, Throwable cause) {
+        super(cause);
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public int getErrorCode() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    @Override
+    public String getMessage() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+}
diff --git a/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java
new file mode 100644
index 0000000..c1bc19b
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/os/flagging/PlatformAconfigPackageInternal.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.os.flagging;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class PlatformAconfigPackageInternal {
+
+    public static PlatformAconfigPackageInternal load(String container, String packageName) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public static PlatformAconfigPackageInternal load(
+            String container, String packageName, long packageFingerprint) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public boolean getBooleanFlagValue(int index) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public boolean getBooleanFlagValue(String flagName, boolean defaultValue) {
+        throw new UnsupportedOperationException("Stub!");
+    }
+
+    public AconfigStorageReadException getException() {
+        throw new UnsupportedOperationException("Stub!");
+    }
+}
diff --git a/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java
new file mode 100644
index 0000000..2f01b8c
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.provider;
+
+/*
+ * This class allows generated aconfig code to compile independently of the framework.
+ */
+public class AconfigPackage {
+
+    /** Flag value is true */
+    public static final int FLAG_BOOLEAN_VALUE_TRUE = 1;
+
+    /** Flag value is false */
+    public static final int FLAG_BOOLEAN_VALUE_FALSE = 0;
+
+    /** Flag value doesn't exist */
+    public static final int FLAG_BOOLEAN_VALUE_NOT_EXIST = 2;
+
+    public static int getBooleanFlagValue(String packageName, String flagName) {
+        return 0;
+    }
+
+    public AconfigPackage(String packageName) {}
+
+    public int getBooleanFlagValue(String flagName) {
+        return 0;
+    }
+}
\ No newline at end of file
diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java
new file mode 100644
index 0000000..79de680
--- /dev/null
+++ b/tools/aconfig/fake_device_config/src/android/util/Log.java
@@ -0,0 +1,19 @@
+package android.util;
+
+public final class Log {
+    public static int i(String tag, String msg) {
+        return 0;
+    }
+
+    public static int w(String tag, String msg) {
+        return 0;
+    }
+
+    public static int e(String tag, String msg) {
+        return 0;
+    }
+
+    public static int e(String tag, String msg, Throwable tr) {
+        return 0;
+    }
+}
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index 8ee599a..d54c412 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -34,6 +34,7 @@
 PLACEHOLDER_PACKAGE = '{PACKAGE}'
 PLACEHOLDER_RUNNER = '{RUNNER}'
 PLACEHOLDER_TEST_TYPE = '{TEST_TYPE}'
+PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS = '{EXTRA_TEST_RUNNER_CONFIGS}'
 
 
 def main(argv):
@@ -59,6 +60,7 @@
       "instrumentation_test_config_template",
       help="Path to the instrumentation test config template.")
   parser.add_argument("--extra-configs", default="")
+  parser.add_argument("--extra-test-runner-configs", default="")
   args = parser.parse_args(argv)
 
   target_config = args.target_config
@@ -66,6 +68,7 @@
   empty_config = args.empty_config
   instrumentation_test_config_template = args.instrumentation_test_config_template
   extra_configs = '\n'.join(args.extra_configs.split('\\n'))
+  extra_test_runner_configs = '\n'.join(args.extra_test_runner_configs.split('\\n'))
 
   module = os.path.splitext(os.path.basename(target_config))[0]
 
@@ -131,6 +134,7 @@
     config = config.replace(PLACEHOLDER_PACKAGE, package)
     config = config.replace(PLACEHOLDER_TEST_TYPE, test_type)
     config = config.replace(PLACEHOLDER_EXTRA_CONFIGS, extra_configs)
+    config = config.replace(PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS, extra_test_runner_configs)
     config = config.replace(PLACEHOLDER_RUNNER, runner)
     with open(target_config, 'w') as config_file:
       config_file.write(config)
diff --git a/tools/compliance/go.work b/tools/compliance/go.work
index a24d2ea..506e619 100644
--- a/tools/compliance/go.work
+++ b/tools/compliance/go.work
@@ -1,4 +1,4 @@
-go 1.22
+go 1.23
 
 use (
 	.
diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp
new file mode 100644
index 0000000..b8ac5bf
--- /dev/null
+++ b/tools/edit_monitor/Android.bp
@@ -0,0 +1,118 @@
+// Copyright 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Set of error prone rules to ensure code quality
+// PackageLocation check requires the androidCompatible=false otherwise it does not do anything.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+    default_team: "trendy_team_adte",
+}
+
+python_library_host {
+    name: "edit_event_proto",
+    srcs: [
+        "proto/edit_event.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
+
+python_library_host {
+    name: "edit_monitor_lib",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "daemon_manager.py",
+        "edit_monitor.py",
+        "utils.py",
+    ],
+    libs: [
+        "asuite_cc_client",
+        "edit_event_proto",
+        "watchdog",
+    ],
+}
+
+python_test_host {
+    name: "daemon_manager_test",
+    main: "daemon_manager_test.py",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "daemon_manager_test.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+}
+
+python_test_host {
+    name: "edit_monitor_test",
+    main: "edit_monitor_test.py",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "edit_monitor_test.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+}
+
+python_test_host {
+    name: "edit_monitor_utils_test",
+    main: "utils_test.py",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "utils_test.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+}
+
+python_test_host {
+    name: "edit_monitor_integration_test",
+    main: "edit_monitor_integration_test.py",
+    pkg_path: "testdata",
+    srcs: [
+        "edit_monitor_integration_test.py",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    data: [
+        ":edit_monitor",
+    ],
+}
+
+python_binary_host {
+    name: "edit_monitor",
+    pkg_path: "edit_monitor",
+    srcs: [
+        "main.py",
+    ],
+    libs: [
+        "edit_monitor_lib",
+    ],
+    main: "main.py",
+}
diff --git a/tools/edit_monitor/OWNERS b/tools/edit_monitor/OWNERS
new file mode 100644
index 0000000..8f0f364
--- /dev/null
+++ b/tools/edit_monitor/OWNERS
@@ -0,0 +1 @@
+include platform/tools/asuite:/OWNERS_ADTE_TEAM
\ No newline at end of file
diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py
new file mode 100644
index 0000000..22782f7
--- /dev/null
+++ b/tools/edit_monitor/daemon_manager.py
@@ -0,0 +1,421 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import getpass
+import hashlib
+import logging
+import multiprocessing
+import os
+import pathlib
+import platform
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from edit_monitor import utils
+from proto import edit_event_pb2
+
+DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 5
+DEFAULT_MONITOR_INTERVAL_SECONDS = 5
+DEFAULT_MEMORY_USAGE_THRESHOLD = 0.02  # 2% of total memory
+DEFAULT_CPU_USAGE_THRESHOLD = 200
+DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24
+BLOCK_SIGN_FILE = "edit_monitor_block_sign"
+# Enum of the Clearcut log source defined under
+# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
+LOG_SOURCE = 2524
+
+
+def default_daemon_target():
+  """Place holder for the default daemon target."""
+  print("default daemon target")
+
+
+class DaemonManager:
+  """Class to manage and monitor the daemon run as a subprocess."""
+
+  def __init__(
+      self,
+      binary_path: str,
+      daemon_target: callable = default_daemon_target,
+      daemon_args: tuple = (),
+      cclient: clearcut_client.Clearcut | None = None,
+  ):
+    self.binary_path = binary_path
+    self.daemon_target = daemon_target
+    self.daemon_args = daemon_args
+    self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
+
+    self.user_name = getpass.getuser()
+    self.host_name = platform.node()
+    self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
+    self.pid = os.getpid()
+    self.daemon_process = None
+
+    self.max_memory_usage = 0
+    self.max_cpu_usage = 0
+    self.total_memory_size = os.sysconf("SC_PAGE_SIZE") * os.sysconf(
+        "SC_PHYS_PAGES"
+    )
+
+    pid_file_dir = pathlib.Path(tempfile.gettempdir()).joinpath("edit_monitor")
+    pid_file_dir.mkdir(parents=True, exist_ok=True)
+    self.pid_file_path = self._get_pid_file_path(pid_file_dir)
+    self.block_sign = pathlib.Path(tempfile.gettempdir()).joinpath(
+        BLOCK_SIGN_FILE
+    )
+
+  def start(self):
+    """Writes the pidfile and starts the daemon proces."""
+    if not utils.is_feature_enabled(
+        "edit_monitor",
+        self.user_name,
+        "ENABLE_ANDROID_EDIT_MONITOR",
+        50,
+    ):
+      logging.warning("Edit monitor is disabled, exiting...")
+      return
+
+    if self.block_sign.exists():
+      logging.warning("Block sign found, exiting...")
+      return
+
+    if self.binary_path.startswith("/google/cog/"):
+      logging.warning("Edit monitor for cog is not supported, exiting...")
+      return
+
+    try:
+      self._stop_any_existing_instance()
+      self._write_pid_to_pidfile()
+      self._start_daemon_process()
+    except Exception as e:
+      logging.exception("Failed to start daemon manager with error %s", e)
+      self._send_error_event_to_clearcut(
+          edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+      )
+      raise e
+
+  def monitor_daemon(
+      self,
+      interval: int = DEFAULT_MONITOR_INTERVAL_SECONDS,
+      memory_threshold: float = DEFAULT_MEMORY_USAGE_THRESHOLD,
+      cpu_threshold: float = DEFAULT_CPU_USAGE_THRESHOLD,
+      reboot_timeout: int = DEFAULT_REBOOT_TIMEOUT_SECONDS,
+  ):
+    """Monits the daemon process status.
+
+    Periodically check the CPU/Memory usage of the daemon process as long as the
+    process is still running and kill the process if the resource usage is above
+    given thresholds.
+    """
+    if not self.daemon_process:
+      return
+
+    logging.info("start monitoring daemon process %d.", self.daemon_process.pid)
+    reboot_time = time.time() + reboot_timeout
+    while self.daemon_process.is_alive():
+      if time.time() > reboot_time:
+        self.reboot()
+      try:
+        memory_usage = self._get_process_memory_percent(self.daemon_process.pid)
+        self.max_memory_usage = max(self.max_memory_usage, memory_usage)
+
+        cpu_usage = self._get_process_cpu_percent(self.daemon_process.pid)
+        self.max_cpu_usage = max(self.max_cpu_usage, cpu_usage)
+
+        time.sleep(interval)
+      except Exception as e:
+        # Logging the error and continue.
+        logging.warning("Failed to monitor daemon process with error: %s", e)
+
+      if self.max_memory_usage >= memory_threshold:
+        self._send_error_event_to_clearcut(
+            edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE
+        )
+        logging.error(
+            "Daemon process is consuming too much memory, rebooting...")
+        self.reboot()
+
+      if self.max_cpu_usage >= cpu_threshold:
+        self._send_error_event_to_clearcut(
+            edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_CPU_USAGE
+        )
+        logging.error(
+            "Daemon process is consuming too much cpu, killing...")
+        self._terminate_process(self.daemon_process.pid)
+
+    logging.info(
+        "Daemon process %d terminated. Max memory usage: %f, Max cpu"
+        " usage: %f.",
+        self.daemon_process.pid,
+        self.max_memory_usage,
+        self.max_cpu_usage,
+    )
+
+  def stop(self):
+    """Stops the daemon process and removes the pidfile."""
+
+    logging.info("in daemon manager cleanup.")
+    try:
+      if self.daemon_process:
+        # The daemon process might already in termination process,
+        # wait some time before kill it explicitly.
+        self._wait_for_process_terminate(self.daemon_process.pid, 1)
+        if self.daemon_process.is_alive():
+          self._terminate_process(self.daemon_process.pid)
+      self._remove_pidfile()
+      logging.info("Successfully stopped daemon manager.")
+    except Exception as e:
+      logging.exception("Failed to stop daemon manager with error %s", e)
+      self._send_error_event_to_clearcut(
+          edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+      )
+      sys.exit(1)
+    finally:
+      self.cclient.flush_events()
+
+  def reboot(self):
+    """Reboots the current process.
+
+    Stops the current daemon manager and reboots the entire process based on
+    the binary file. Exits directly If the binary file no longer exists.
+    """
+    logging.info("Rebooting process based on binary %s.", self.binary_path)
+
+    # Stop the current daemon manager first.
+    self.stop()
+
+    # If the binary no longer exists, exit directly.
+    if not os.path.exists(self.binary_path):
+      logging.info("binary %s no longer exists, exiting.", self.binary_path)
+      sys.exit(0)
+
+    try:
+      os.execv(self.binary_path, sys.argv)
+    except OSError as e:
+      logging.exception("Failed to reboot process with error: %s.", e)
+      self._send_error_event_to_clearcut(
+          edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+      )
+      sys.exit(1)  # Indicate an error occurred
+
+  def cleanup(self):
+    """Wipes out all edit monitor instances in the system.
+
+    Stops all the existing edit monitor instances and place a block sign
+    to prevent any edit monitor process to start. This method is only used
+    in emergency case when there's something goes wrong with the edit monitor
+    that requires immediate cleanup to prevent damanger to the system.
+    """
+    logging.debug("Start cleaning up all existing instances.")
+    self._send_error_event_to_clearcut(edit_event_pb2.EditEvent.FORCE_CLEANUP)
+
+    try:
+      # First places a block sign to prevent any edit monitor process to start.
+      self.block_sign.touch()
+    except (FileNotFoundError, PermissionError, OSError):
+      logging.exception("Failed to place the block sign")
+
+    # Finds and kills all the existing instances of edit monitor.
+    existing_instances_pids = self._find_all_instances_pids()
+    for pid in existing_instances_pids:
+      logging.info(
+          "Found existing edit monitor instance with pid %d, killing...", pid
+      )
+      try:
+        self._terminate_process(pid)
+      except Exception:
+        logging.exception("Failed to terminate process %d", pid)
+
+  def _stop_any_existing_instance(self):
+    if not self.pid_file_path.exists():
+      logging.debug("No existing instances.")
+      return
+
+    ex_pid = self._read_pid_from_pidfile()
+
+    if ex_pid:
+      logging.info("Found another instance with pid %d.", ex_pid)
+      self._terminate_process(ex_pid)
+      self._remove_pidfile()
+
+  def _read_pid_from_pidfile(self):
+    with open(self.pid_file_path, "r") as f:
+      return int(f.read().strip())
+
+  def _write_pid_to_pidfile(self):
+    """Creates a pidfile and writes the current pid to the file.
+
+    Raise FileExistsError if the pidfile already exists.
+    """
+    try:
+      # Use the 'x' mode to open the file for exclusive creation
+      with open(self.pid_file_path, "x") as f:
+        f.write(f"{self.pid}")
+    except FileExistsError as e:
+      # This could be caused due to race condition that a user is trying
+      # to start two edit monitors at the same time. Or because there is
+      # already an existing edit monitor running and we can not kill it
+      # for some reason.
+      logging.exception("pidfile %s already exists.", self.pid_file_path)
+      raise e
+
+  def _start_daemon_process(self):
+    """Starts a subprocess to run the daemon."""
+    p = multiprocessing.Process(
+        target=self.daemon_target, args=self.daemon_args
+    )
+    p.daemon = True
+    p.start()
+
+    logging.info("Start subprocess with PID %d", p.pid)
+    self.daemon_process = p
+
+  def _terminate_process(
+      self, pid: int, timeout: int = DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS
+  ):
+    """Terminates a process with given pid.
+
+    It first sends a SIGTERM to the process to allow it for proper
+    termination with a timeout. If the process is not terminated within
+    the timeout, kills it forcefully.
+    """
+    try:
+      os.kill(pid, signal.SIGTERM)
+      if not self._wait_for_process_terminate(pid, timeout):
+        logging.warning(
+            "Process %d not terminated within timeout, try force kill", pid
+        )
+        os.kill(pid, signal.SIGKILL)
+    except ProcessLookupError:
+      logging.info("Process with PID %d not found (already terminated)", pid)
+
+  def _wait_for_process_terminate(self, pid: int, timeout: int) -> bool:
+    start_time = time.time()
+
+    while time.time() < start_time + timeout:
+      if not self._is_process_alive(pid):
+        return True
+      time.sleep(1)
+
+    logging.error("Process %d not terminated within %d seconds.", pid, timeout)
+    return False
+
+  def _is_process_alive(self, pid: int) -> bool:
+    try:
+      output = subprocess.check_output(
+          ["ps", "-p", str(pid), "-o", "state="], text=True
+      ).strip()
+      state = output.split()[0]
+      return state != "Z"  # Check if the state is not 'Z' (zombie)
+    except subprocess.CalledProcessError:
+      # Process not found (already dead).
+      return False
+    except (FileNotFoundError, OSError, ValueError) as e:
+      logging.warning(
+          "Unable to check the status for process %d with error: %s.", pid, e
+      )
+      return True
+
+  def _remove_pidfile(self):
+    try:
+      os.remove(self.pid_file_path)
+    except FileNotFoundError:
+      logging.info("pid file %s already removed.", self.pid_file_path)
+
+  def _get_pid_file_path(self, pid_file_dir: pathlib.Path) -> pathlib.Path:
+    """Generates the path to store the pidfile.
+
+    The file path should have the format of "/tmp/edit_monitor/xxxx.lock"
+    where xxxx is a hashed value based on the binary path that starts the
+    process.
+    """
+    hash_object = hashlib.sha256()
+    hash_object.update(self.binary_path.encode("utf-8"))
+    pid_file_path = pid_file_dir.joinpath(hash_object.hexdigest() + ".lock")
+    logging.info("pid_file_path: %s", pid_file_path)
+
+    return pid_file_path
+
+  def _get_process_memory_percent(self, pid: int) -> float:
+    with open(f"/proc/{pid}/stat", "r") as f:
+      stat_data = f.readline().split()
+      # RSS is the 24th field in /proc/[pid]/stat
+      rss_pages = int(stat_data[23])
+      process_memory = rss_pages * 4 * 1024  # Convert to bytes
+
+    return (
+        process_memory / self.total_memory_size
+        if self.total_memory_size
+        else 0.0
+    )
+
+  def _get_process_cpu_percent(self, pid: int, interval: int = 1) -> float:
+    total_start_time = self._get_total_cpu_time(pid)
+    with open("/proc/uptime", "r") as f:
+      uptime_start = float(f.readline().split()[0])
+
+    time.sleep(interval)
+
+    total_end_time = self._get_total_cpu_time(pid)
+    with open("/proc/uptime", "r") as f:
+      uptime_end = float(f.readline().split()[0])
+
+    return (
+        (total_end_time - total_start_time)
+        / (uptime_end - uptime_start)
+        * 100
+    )
+
+  def _get_total_cpu_time(self, pid: int) -> float:
+    with open(f"/proc/{str(pid)}/stat", "r") as f:
+      stats = f.readline().split()
+      # utime is the 14th field in /proc/[pid]/stat measured in clock ticks.
+      utime = int(stats[13])
+      # stime is the 15th field in /proc/[pid]/stat measured in clock ticks.
+      stime = int(stats[14])
+      return (utime + stime) / os.sysconf(os.sysconf_names["SC_CLK_TCK"])
+
+  def _find_all_instances_pids(self) -> list[int]:
+    pids = []
+
+    for file in os.listdir(self.pid_file_path.parent):
+      if file.endswith(".lock"):
+        try:
+          with open(self.pid_file_path.parent.joinpath(file), "r") as f:
+            pids.append(int(f.read().strip()))
+        except (FileNotFoundError, IOError, ValueError, TypeError):
+          logging.exception("Failed to get pid from file path: %s", file)
+
+    return pids
+
+  def _send_error_event_to_clearcut(self, error_type):
+    edit_monitor_error_event_proto = edit_event_pb2.EditEvent(
+        user_name=self.user_name,
+        host_name=self.host_name,
+        source_root=self.source_root,
+    )
+    edit_monitor_error_event_proto.edit_monitor_error_event.CopyFrom(
+        edit_event_pb2.EditEvent.EditMonitorErrorEvent(error_type=error_type)
+    )
+    log_event = clientanalytics_pb2.LogEvent(
+        event_time_ms=int(time.time() * 1000),
+        source_extension=edit_monitor_error_event_proto.SerializeToString(),
+    )
+    self.cclient.log(log_event)
diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py
new file mode 100644
index 0000000..350739d
--- /dev/null
+++ b/tools/edit_monitor/daemon_manager_test.py
@@ -0,0 +1,479 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for DaemonManager."""
+
+import logging
+import multiprocessing
+import os
+import pathlib
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+from unittest import mock
+from edit_monitor import daemon_manager
+from proto import edit_event_pb2
+
+
+TEST_BINARY_FILE = '/path/to/test_binary'
+TEST_PID_FILE_PATH = (
+    '587239c2d1050afdf54512e2d799f3b929f86b43575eb3c7b4bab105dd9bd25e.lock'
+)
+
+
+def simple_daemon(output_file):
+  with open(output_file, 'w') as f:
+    f.write('running daemon target')
+
+
+def long_running_daemon():
+  while True:
+    time.sleep(1)
+
+
+def memory_consume_daemon_target(size_mb):
+  try:
+    size_bytes = size_mb * 1024 * 1024
+    dummy_data = bytearray(size_bytes)
+    time.sleep(10)
+  except MemoryError:
+    print(f'Process failed to allocate {size_mb} MB of memory.')
+
+
+def cpu_consume_daemon_target(target_usage_percent):
+  while True:
+    start_time = time.time()
+    while time.time() - start_time < target_usage_percent / 100:
+      pass  # Busy loop to consume CPU
+
+    # Sleep to reduce CPU usage
+    time.sleep(1 - target_usage_percent / 100)
+
+
+class DaemonManagerTest(unittest.TestCase):
+
+  @classmethod
+  def setUpClass(cls):
+    super().setUpClass()
+    # Configure to print logging to stdout.
+    logging.basicConfig(filename=None, level=logging.DEBUG)
+    console = logging.StreamHandler(sys.stdout)
+    logging.getLogger('').addHandler(console)
+
+  def setUp(self):
+    super().setUp()
+    self.original_tempdir = tempfile.tempdir
+    self.working_dir = tempfile.TemporaryDirectory()
+    # Sets the tempdir under the working dir so any temp files created during
+    # tests will be cleaned.
+    tempfile.tempdir = self.working_dir.name
+    self.patch = mock.patch.dict(
+        os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'})
+    self.patch.start()
+
+  def tearDown(self):
+    # Cleans up any child processes left by the tests.
+    self._cleanup_child_processes()
+    self.working_dir.cleanup()
+    # Restores tempdir.
+    tempfile.tempdir = self.original_tempdir
+    self.patch.stop()
+    super().tearDown()
+
+  def test_start_success_with_no_existing_instance(self):
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_running(self):
+    # Create a running daemon subprocess
+    p = self._create_fake_deamon_process()
+
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_already_dead(self):
+    # Create a pidfile with pid that does not exist.
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
+      f.write('123456')
+
+    self.assert_run_simple_daemon_success()
+
+  def test_start_success_with_existing_instance_from_different_binary(self):
+    # First start an instance based on "some_binary_path"
+    existing_dm = daemon_manager.DaemonManager(
+        'some_binary_path',
+        daemon_target=long_running_daemon,
+    )
+    existing_dm.start()
+
+    self.assert_run_simple_daemon_success()
+    existing_dm.stop()
+
+  def test_start_return_directly_if_block_sign_exists(self):
+    # Creates the block sign.
+    pathlib.Path(self.working_dir.name).joinpath(
+        daemon_manager.BLOCK_SIGN_FILE
+    ).touch()
+
+    dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+    dm.start()
+
+    # Verify no daemon process is started.
+    self.assertIsNone(dm.daemon_process)
+
+  @mock.patch.dict(os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'false'}, clear=True)
+  def test_start_return_directly_if_disabled(self):
+    dm = daemon_manager.DaemonManager(TEST_BINARY_FILE)
+    dm.start()
+
+    # Verify no daemon process is started.
+    self.assertIsNone(dm.daemon_process)
+
+  def test_start_return_directly_if_in_cog_env(self):
+    dm = daemon_manager.DaemonManager(
+        '/google/cog/cloud/user/workspace/edit_monitor'
+    )
+    dm.start()
+
+    # Verify no daemon process is started.
+    self.assertIsNone(dm.daemon_process)
+
+  @mock.patch('os.kill')
+  def test_start_failed_to_kill_existing_instance(self, mock_kill):
+    mock_kill.side_effect = OSError('Unknown OSError')
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f:
+      f.write('123456')
+
+    fake_cclient = FakeClearcutClient()
+    with self.assertRaises(OSError):
+      dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
+      dm.start()
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+    )
+
+  def test_start_failed_to_write_pidfile(self):
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    # Makes the directory read-only so write pidfile will fail.
+    os.chmod(pid_file_path_dir, 0o555)
+
+    fake_cclient = FakeClearcutClient()
+    with self.assertRaises(PermissionError):
+      dm = daemon_manager.DaemonManager(TEST_BINARY_FILE, cclient=fake_cclient)
+      dm.start()
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+    )
+
+  def test_start_failed_to_start_daemon_process(self):
+    fake_cclient = FakeClearcutClient()
+    with self.assertRaises(TypeError):
+      dm = daemon_manager.DaemonManager(
+          TEST_BINARY_FILE,
+          daemon_target='wrong_target',
+          daemon_args=(1),
+          cclient=fake_cclient,
+      )
+      dm.start()
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_START_EDIT_MONITOR
+    )
+
+  @mock.patch('os.execv')
+  def test_monitor_reboot_with_high_memory_usage(self, mock_execv):
+    fake_cclient = FakeClearcutClient()
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name,
+        daemon_target=memory_consume_daemon_target,
+        daemon_args=(2,),
+        cclient=fake_cclient,
+    )
+    # set the fake total_memory_size
+    dm.total_memory_size = 100 * 1024 *1024
+    dm.start()
+    dm.monitor_daemon(interval=1)
+
+    self.assertTrue(dm.max_memory_usage >= 0.02)
+    self.assert_no_subprocess_running()
+    self._assert_error_event_logged(
+        fake_cclient,
+        edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE,
+    )
+    mock_execv.assert_called_once()
+
+  def test_monitor_daemon_subprocess_killed_high_cpu_usage(self):
+    fake_cclient = FakeClearcutClient()
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=cpu_consume_daemon_target,
+        daemon_args=(20,),
+        cclient=fake_cclient,
+    )
+    dm.start()
+    dm.monitor_daemon(interval=1, cpu_threshold=20)
+
+    self.assertTrue(dm.max_cpu_usage >= 20)
+    self.assert_no_subprocess_running()
+    self._assert_error_event_logged(
+        fake_cclient,
+        edit_event_pb2.EditEvent.KILLED_DUE_TO_EXCEEDED_CPU_USAGE,
+    )
+
+  @mock.patch('subprocess.check_output')
+  def test_monitor_daemon_failed_does_not_matter(self, mock_output):
+    mock_output.side_effect = OSError('Unknown OSError')
+    self.assert_run_simple_daemon_success()
+
+  @mock.patch('os.execv')
+  def test_monitor_daemon_reboot_triggered(self, mock_execv):
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name,
+        daemon_target=long_running_daemon,
+    )
+    dm.start()
+    dm.monitor_daemon(reboot_timeout=0.5)
+    mock_execv.assert_called_once()
+
+  def test_stop_success(self):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.stop()
+
+    self.assert_no_subprocess_running()
+    self.assertFalse(dm.pid_file_path.exists())
+
+  @mock.patch('os.kill')
+  def test_stop_failed_to_kill_daemon_process(self, mock_kill):
+    mock_kill.side_effect = OSError('Unknown OSError')
+    fake_cclient = FakeClearcutClient()
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=long_running_daemon,
+        cclient=fake_cclient,
+    )
+
+    with self.assertRaises(SystemExit):
+      dm.start()
+      dm.stop()
+      self.assertTrue(dm.daemon_process.is_alive())
+      self.assertTrue(dm.pid_file_path.exists())
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+    )
+
+  @mock.patch('os.remove')
+  def test_stop_failed_to_remove_pidfile(self, mock_remove):
+    mock_remove.side_effect = OSError('Unknown OSError')
+
+    fake_cclient = FakeClearcutClient()
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=long_running_daemon,
+        cclient=fake_cclient,
+    )
+
+    with self.assertRaises(SystemExit):
+      dm.start()
+      dm.stop()
+      self.assert_no_subprocess_running()
+      self.assertTrue(dm.pid_file_path.exists())
+
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_STOP_EDIT_MONITOR
+    )
+
+  @mock.patch('os.execv')
+  def test_reboot_success(self, mock_execv):
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name, daemon_target=long_running_daemon
+    )
+    dm.start()
+    dm.reboot()
+
+    # Verifies the old process is stopped
+    self.assert_no_subprocess_running()
+    self.assertFalse(dm.pid_file_path.exists())
+
+    mock_execv.assert_called_once()
+
+  @mock.patch('os.execv')
+  def test_reboot_binary_no_longer_exists(self, mock_execv):
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE, daemon_target=long_running_daemon
+    )
+    dm.start()
+
+    with self.assertRaises(SystemExit):
+      dm.reboot()
+      mock_execv.assert_not_called()
+      self.assertEqual(cm.exception.code, 0)
+
+  @mock.patch('os.execv')
+  def test_reboot_failed(self, mock_execv):
+    mock_execv.side_effect = OSError('Unknown OSError')
+    fake_cclient = FakeClearcutClient()
+    binary_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+
+    dm = daemon_manager.DaemonManager(
+        binary_file.name,
+        daemon_target=long_running_daemon,
+        cclient=fake_cclient,
+    )
+    dm.start()
+
+    with self.assertRaises(SystemExit):
+      dm.reboot()
+      self.assertEqual(cm.exception.code, 1)
+    self._assert_error_event_logged(
+        fake_cclient, edit_event_pb2.EditEvent.FAILED_TO_REBOOT_EDIT_MONITOR
+    )
+
+  def assert_run_simple_daemon_success(self):
+    damone_output_file = tempfile.NamedTemporaryFile(
+        dir=self.working_dir.name, delete=False
+    )
+    dm = daemon_manager.DaemonManager(
+        TEST_BINARY_FILE,
+        daemon_target=simple_daemon,
+        daemon_args=(damone_output_file.name,),
+    )
+    dm.start()
+    dm.monitor_daemon(interval=1)
+
+    # Verifies the expected pid file is created.
+    expected_pid_file_path = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor', TEST_PID_FILE_PATH
+    )
+    self.assertTrue(expected_pid_file_path.exists())
+
+    # Verify the daemon process is executed successfully.
+    with open(damone_output_file.name, 'r') as f:
+      contents = f.read()
+      self.assertEqual(contents, 'running daemon target')
+
+  def assert_no_subprocess_running(self):
+    child_pids = self._get_child_processes(os.getpid())
+    for child_pid in child_pids:
+      self.assertFalse(
+          self._is_process_alive(child_pid), f'process {child_pid} still alive'
+      )
+
+  def _get_child_processes(self, parent_pid: int) -> list[int]:
+    try:
+      output = subprocess.check_output(
+          ['ps', '-o', 'pid,ppid', '--no-headers'], text=True
+      )
+
+      child_processes = []
+      for line in output.splitlines():
+        pid, ppid = line.split()
+        if int(ppid) == parent_pid:
+          child_processes.append(int(pid))
+      return child_processes
+    except subprocess.CalledProcessError as e:
+      self.fail(f'failed to get child process, error: {e}')
+
+  def _is_process_alive(self, pid: int) -> bool:
+    try:
+      output = subprocess.check_output(
+          ['ps', '-p', str(pid), '-o', 'state='], text=True
+      ).strip()
+      state = output.split()[0]
+      return state != 'Z'  # Check if the state is not 'Z' (zombie)
+    except subprocess.CalledProcessError:
+      return False
+
+  def _cleanup_child_processes(self):
+    child_pids = self._get_child_processes(os.getpid())
+    for child_pid in child_pids:
+      try:
+        os.kill(child_pid, signal.SIGKILL)
+      except ProcessLookupError:
+        # process already terminated
+        pass
+
+  def _create_fake_deamon_process(
+      self, name: str = ''
+  ) -> multiprocessing.Process:
+    # Create a long running subprocess
+    p = multiprocessing.Process(target=long_running_daemon)
+    p.start()
+
+    # Create the pidfile with the subprocess pid
+    pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath(
+        'edit_monitor'
+    )
+    pid_file_path_dir.mkdir(parents=True, exist_ok=True)
+    with open(pid_file_path_dir.joinpath(name + 'pid.lock'), 'w') as f:
+      f.write(str(p.pid))
+    return p
+
+  def _assert_error_event_logged(self, fake_cclient, error_type):
+    error_events = fake_cclient.get_sent_events()
+    self.assertEquals(len(error_events), 1)
+    self.assertEquals(
+        edit_event_pb2.EditEvent.FromString(
+            error_events[0].source_extension
+        ).edit_monitor_error_event.error_type,
+        error_type,
+    )
+
+
+class FakeClearcutClient:
+
+  def __init__(self):
+    self.pending_log_events = []
+    self.sent_log_event = []
+
+  def log(self, log_event):
+    self.pending_log_events.append(log_event)
+
+  def flush_events(self):
+    self.sent_log_event.extend(self.pending_log_events)
+    self.pending_log_events.clear()
+
+  def get_sent_events(self):
+    return self.sent_log_event + self.pending_log_events
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/edit_monitor/edit_monitor.py b/tools/edit_monitor/edit_monitor.py
new file mode 100644
index 0000000..ab528e8
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor.py
@@ -0,0 +1,220 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import getpass
+import logging
+import multiprocessing.connection
+import os
+import pathlib
+import platform
+import threading
+import time
+
+from atest.metrics import clearcut_client
+from atest.proto import clientanalytics_pb2
+from proto import edit_event_pb2
+from watchdog.events import FileSystemEvent
+from watchdog.events import PatternMatchingEventHandler
+from watchdog.observers import Observer
+
+# Enum of the Clearcut log source defined under
+# /google3/wireless/android/play/playlog/proto/log_source_enum.proto
+LOG_SOURCE = 2524
+DEFAULT_FLUSH_INTERVAL_SECONDS = 5
+DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD = 100
+
+
+class ClearcutEventHandler(PatternMatchingEventHandler):
+
+  def __init__(
+      self,
+      path: str,
+      flush_interval_sec: int,
+      single_events_size_threshold: int,
+      is_dry_run: bool = False,
+      cclient: clearcut_client.Clearcut | None = None,
+  ):
+
+    super().__init__(patterns=["*"], ignore_directories=True)
+    self.root_monitoring_path = path
+    self.flush_interval_sec = flush_interval_sec
+    self.single_events_size_threshold = single_events_size_threshold
+    self.is_dry_run = is_dry_run
+    self.cclient = cclient or clearcut_client.Clearcut(LOG_SOURCE)
+
+    self.user_name = getpass.getuser()
+    self.host_name = platform.node()
+    self.source_root = os.environ.get("ANDROID_BUILD_TOP", "")
+
+    self.pending_events = []
+    self._scheduled_log_thread = None
+    self._pending_events_lock = threading.Lock()
+
+  def on_moved(self, event: FileSystemEvent):
+    self._log_edit_event(event, edit_event_pb2.EditEvent.MOVE)
+
+  def on_created(self, event: FileSystemEvent):
+    self._log_edit_event(event, edit_event_pb2.EditEvent.CREATE)
+
+  def on_deleted(self, event: FileSystemEvent):
+    self._log_edit_event(event, edit_event_pb2.EditEvent.DELETE)
+
+  def on_modified(self, event: FileSystemEvent):
+    self._log_edit_event(event, edit_event_pb2.EditEvent.MODIFY)
+
+  def flushall(self):
+    logging.info("flushing all pending events.")
+    if self._scheduled_log_thread:
+      logging.info("canceling log thread")
+      self._scheduled_log_thread.cancel()
+      self._scheduled_log_thread = None
+
+    self._log_clearcut_events()
+    self.cclient.flush_events()
+
+  def _log_edit_event(
+      self, event: FileSystemEvent, edit_type: edit_event_pb2.EditEvent.EditType
+  ):
+    try:
+      event_time = time.time()
+
+      if self._is_hidden_file(pathlib.Path(event.src_path)):
+        logging.debug("ignore hidden file: %s.", event.src_path)
+        return
+
+      if not self._is_under_git_project(pathlib.Path(event.src_path)):
+        logging.debug(
+            "ignore file %s which does not belong to a git project",
+            event.src_path,
+        )
+        return
+
+      logging.info("%s: %s", event.event_type, event.src_path)
+
+      event_proto = edit_event_pb2.EditEvent(
+          user_name=self.user_name,
+          host_name=self.host_name,
+          source_root=self.source_root,
+      )
+      event_proto.single_edit_event.CopyFrom(
+          edit_event_pb2.EditEvent.SingleEditEvent(
+              file_path=event.src_path, edit_type=edit_type
+          )
+      )
+      with self._pending_events_lock:
+        self.pending_events.append((event_proto, event_time))
+        if not self._scheduled_log_thread:
+          logging.debug(
+              "Scheduling thread to run in %d seconds", self.flush_interval_sec
+          )
+          self._scheduled_log_thread = threading.Timer(
+              self.flush_interval_sec, self._log_clearcut_events
+          )
+          self._scheduled_log_thread.start()
+
+    except Exception:
+      logging.exception("Failed to log edit event.")
+
+  def _is_hidden_file(self, file_path: pathlib.Path) -> bool:
+    return any(
+        part.startswith(".")
+        for part in file_path.relative_to(self.root_monitoring_path).parts
+    )
+
+  def _is_under_git_project(self, file_path: pathlib.Path) -> bool:
+    root_path = pathlib.Path(self.root_monitoring_path).resolve()
+    return any(
+        root_path.joinpath(dir).joinpath('.git').exists()
+        for dir in file_path.relative_to(root_path).parents
+    )
+
+  def _log_clearcut_events(self):
+    with self._pending_events_lock:
+      self._scheduled_log_thread = None
+      edit_events = self.pending_events
+      self.pending_events = []
+
+    pending_events_size = len(edit_events)
+    if pending_events_size > self.single_events_size_threshold:
+      logging.info(
+          "got %d events in %d seconds, sending aggregated events instead",
+          pending_events_size,
+          self.flush_interval_sec,
+      )
+      aggregated_event_time = edit_events[0][1]
+      aggregated_event_proto = edit_event_pb2.EditEvent(
+          user_name=self.user_name,
+          host_name=self.host_name,
+          source_root=self.source_root,
+      )
+      aggregated_event_proto.aggregated_edit_event.CopyFrom(
+          edit_event_pb2.EditEvent.AggregatedEditEvent(
+              num_edits=pending_events_size
+          )
+      )
+      edit_events = [(aggregated_event_proto, aggregated_event_time)]
+
+    if self.is_dry_run:
+      logging.info("Sent %d edit events in dry run.", len(edit_events))
+      return
+
+    for event_proto, event_time in edit_events:
+      log_event = clientanalytics_pb2.LogEvent(
+          event_time_ms=int(event_time * 1000),
+          source_extension=event_proto.SerializeToString(),
+      )
+      self.cclient.log(log_event)
+
+    logging.info("sent %d edit events", len(edit_events))
+
+
+def start(
+    path: str,
+    is_dry_run: bool = False,
+    flush_interval_sec: int = DEFAULT_FLUSH_INTERVAL_SECONDS,
+    single_events_size_threshold: int = DEFAULT_SINGLE_EVENTS_SIZE_THRESHOLD,
+    cclient: clearcut_client.Clearcut | None = None,
+    pipe_sender: multiprocessing.connection.Connection | None = None,
+):
+  """Method to start the edit monitor.
+
+  This is the entry point to start the edit monitor as a subprocess of
+  the daemon manager.
+
+  params:
+    path: The root path to monitor
+    cclient: The clearcut client to send the edit logs.
+    conn: the sender of the pipe to communicate with the deamon manager.
+  """
+  event_handler = ClearcutEventHandler(
+      path, flush_interval_sec, single_events_size_threshold, is_dry_run, cclient)
+  observer = Observer()
+
+  logging.info("Starting observer on path %s.", path)
+  observer.schedule(event_handler, path, recursive=True)
+  observer.start()
+  logging.info("Observer started.")
+  if pipe_sender:
+    pipe_sender.send("Observer started.")
+
+  try:
+    while True:
+      time.sleep(1)
+  finally:
+    event_handler.flushall()
+    observer.stop()
+    observer.join()
+    if pipe_sender:
+      pipe_sender.close()
diff --git a/tools/edit_monitor/edit_monitor_integration_test.py b/tools/edit_monitor/edit_monitor_integration_test.py
new file mode 100644
index 0000000..3d28274
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor_integration_test.py
@@ -0,0 +1,141 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for Edit Monitor."""
+
+import glob
+import logging
+import os
+import pathlib
+import shutil
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+
+from importlib import resources
+from unittest import mock
+
+
+class EditMonitorIntegrationTest(unittest.TestCase):
+
+  @classmethod
+  def setUpClass(cls):
+    super().setUpClass()
+    # Configure to print logging to stdout.
+    logging.basicConfig(filename=None, level=logging.DEBUG)
+    console = logging.StreamHandler(sys.stdout)
+    logging.getLogger("").addHandler(console)
+
+  def setUp(self):
+    super().setUp()
+    self.working_dir = tempfile.TemporaryDirectory()
+    self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath(
+        "files"
+    )
+    self.root_monitoring_path.mkdir()
+    self.edit_monitor_binary_path = self._import_executable("edit_monitor")
+    self.patch = mock.patch.dict(
+        os.environ, {'ENABLE_ANDROID_EDIT_MONITOR': 'true'})
+    self.patch.start()
+
+  def tearDown(self):
+    self.patch.stop()
+    self.working_dir.cleanup()
+    super().tearDown()
+
+  def test_log_single_edit_event_success(self):
+    p = self._start_edit_monitor_process()
+
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath(".git").touch()
+
+    # Create and modify a file.
+    test_file = self.root_monitoring_path.joinpath("test.txt")
+    with open(test_file, "w") as f:
+      f.write("something")
+
+    # Move the file.
+    test_file_moved = self.root_monitoring_path.joinpath("new_test.txt")
+    test_file.rename(test_file_moved)
+
+    # Delete the file.
+    test_file_moved.unlink()
+
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.communicate()
+
+    self.assertEqual(self._get_logged_events_num(), 4)
+
+  def _start_edit_monitor_process(self):
+    command = f"""
+    export TMPDIR="{self.working_dir.name}"
+    {self.edit_monitor_binary_path} --path={self.root_monitoring_path} --dry_run"""
+    p = subprocess.Popen(
+        command,
+        shell=True,
+        text=True,
+        start_new_session=True,
+        executable="/bin/bash",
+    )
+    self._wait_for_observer_start(time_out=5)
+    return p
+
+  def _wait_for_observer_start(self, time_out):
+    start_time = time.time()
+
+    while time.time() < start_time + time_out:
+      log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log")
+      if log_files:
+        with open(log_files[0], "r") as f:
+          for line in f:
+            logging.debug("initial log: %s", line)
+            if line.rstrip("\n").endswith("Observer started."):
+              return
+      else:
+        time.sleep(1)
+
+    self.fail(f"Observer not started in {time_out} seconds.")
+
+  def _get_logged_events_num(self):
+    log_files = glob.glob(self.working_dir.name + "/edit_monitor_*/*.log")
+    self.assertEqual(len(log_files), 1)
+
+    with open(log_files[0], "r") as f:
+      for line in f:
+        logging.debug("complete log: %s", line)
+        if line.rstrip("\n").endswith("in dry run."):
+          return int(line.split(":")[-1].split(" ")[2])
+
+    return 0
+
+  def _import_executable(self, executable_name: str) -> pathlib.Path:
+    binary_dir = pathlib.Path(self.working_dir.name).joinpath("binary")
+    binary_dir.mkdir()
+    executable_path = binary_dir.joinpath(executable_name)
+    with resources.as_file(
+        resources.files("testdata").joinpath(executable_name)
+    ) as binary:
+      shutil.copy(binary, executable_path)
+    executable_path.chmod(0o755)
+    return executable_path
+
+
+if __name__ == "__main__":
+  unittest.main()
diff --git a/tools/edit_monitor/edit_monitor_test.py b/tools/edit_monitor/edit_monitor_test.py
new file mode 100644
index 0000000..64a3871
--- /dev/null
+++ b/tools/edit_monitor/edit_monitor_test.py
@@ -0,0 +1,301 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for Edit Monitor."""
+
+import logging
+import multiprocessing
+import os
+import pathlib
+import signal
+import sys
+import tempfile
+import time
+import unittest
+
+from atest.proto import clientanalytics_pb2
+from edit_monitor import edit_monitor
+from proto import edit_event_pb2
+
+
+class EditMonitorTest(unittest.TestCase):
+
+  @classmethod
+  def setUpClass(cls):
+    super().setUpClass()
+    # Configure to print logging to stdout.
+    logging.basicConfig(filename=None, level=logging.DEBUG)
+    console = logging.StreamHandler(sys.stdout)
+    logging.getLogger('').addHandler(console)
+
+  def setUp(self):
+    super().setUp()
+    self.working_dir = tempfile.TemporaryDirectory()
+    self.root_monitoring_path = pathlib.Path(self.working_dir.name).joinpath(
+        'files'
+    )
+    self.root_monitoring_path.mkdir()
+    self.log_event_dir = pathlib.Path(self.working_dir.name).joinpath('logs')
+    self.log_event_dir.mkdir()
+
+  def tearDown(self):
+    self.working_dir.cleanup()
+    super().tearDown()
+
+  def test_log_single_edit_event_success(self):
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath('.git').touch()
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output')
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create and modify a file.
+    test_file = self.root_monitoring_path.joinpath('test.txt')
+    with open(test_file, 'w') as f:
+      f.write('something')
+    # Move the file.
+    test_file_moved = self.root_monitoring_path.joinpath('new_test.txt')
+    test_file.rename(test_file_moved)
+    # Delete the file.
+    test_file_moved.unlink()
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 4)
+    expected_create_event = edit_event_pb2.EditEvent.SingleEditEvent(
+        file_path=str(
+            self.root_monitoring_path.joinpath('test.txt').resolve()
+        ),
+        edit_type=edit_event_pb2.EditEvent.CREATE,
+    )
+    expected_modify_event = edit_event_pb2.EditEvent.SingleEditEvent(
+        file_path=str(
+            self.root_monitoring_path.joinpath('test.txt').resolve()
+        ),
+        edit_type=edit_event_pb2.EditEvent.MODIFY,
+    )
+    expected_move_event = edit_event_pb2.EditEvent.SingleEditEvent(
+        file_path=str(
+            self.root_monitoring_path.joinpath('test.txt').resolve()
+        ),
+        edit_type=edit_event_pb2.EditEvent.MOVE,
+    )
+    expected_delete_event = edit_event_pb2.EditEvent.SingleEditEvent(
+        file_path=str(
+            self.root_monitoring_path.joinpath('new_test.txt').resolve()
+        ),
+        edit_type=edit_event_pb2.EditEvent.DELETE,
+    )
+    self.assertEqual(
+        expected_create_event,
+        edit_event_pb2.EditEvent.FromString(
+            logged_events[0].source_extension
+        ).single_edit_event,
+    )
+    self.assertEqual(
+        expected_modify_event,
+        edit_event_pb2.EditEvent.FromString(
+            logged_events[1].source_extension
+        ).single_edit_event,
+    )
+    self.assertEqual(
+        expected_move_event,
+        edit_event_pb2.EditEvent.FromString(
+            logged_events[2].source_extension
+        ).single_edit_event,
+    )
+    self.assertEqual(
+        expected_delete_event,
+        edit_event_pb2.EditEvent.FromString(
+            logged_events[3].source_extension
+        ).single_edit_event,
+    )
+
+
+  def test_log_aggregated_edit_event_success(self):
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath('.git').touch()
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output')
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create 6 test files
+    for i in range(6):
+      test_file = self.root_monitoring_path.joinpath('test_' + str(i))
+      test_file.touch()
+
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 1)
+
+    expected_aggregated_edit_event = (
+        edit_event_pb2.EditEvent.AggregatedEditEvent(
+            num_edits=6,
+        )
+    )
+
+    self.assertEqual(
+        expected_aggregated_edit_event,
+        edit_event_pb2.EditEvent.FromString(
+            logged_events[0].source_extension
+        ).aggregated_edit_event,
+    )
+
+  def test_do_not_log_edit_event_for_directory_change(self):
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath('.git').touch()
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output')
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create a sub directory
+    self.root_monitoring_path.joinpath('test_dir').mkdir()
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 0)
+
+  def test_do_not_log_edit_event_for_hidden_file(self):
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath('.git').touch()
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output')
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create a hidden file.
+    self.root_monitoring_path.joinpath('.test.txt').touch()
+    # Create a hidden dir.
+    hidden_dir = self.root_monitoring_path.joinpath('.test')
+    hidden_dir.mkdir()
+    hidden_dir.joinpath('test.txt').touch()
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 0)
+
+  def test_do_not_log_edit_event_for_non_git_project_file(self):
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output')
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create a file.
+    self.root_monitoring_path.joinpath('test.txt').touch()
+    # Create a file under a sub dir.
+    sub_dir = self.root_monitoring_path.joinpath('.test')
+    sub_dir.mkdir()
+    sub_dir.joinpath('test.txt').touch()
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 0)
+
+  def test_log_edit_event_fail(self):
+    # Create the .git file under the monitoring dir.
+    self.root_monitoring_path.joinpath('.git').touch()
+    fake_cclient = FakeClearcutClient(
+        log_output_file=self.log_event_dir.joinpath('logs.output'),
+        raise_log_exception=True,
+    )
+    p = self._start_test_edit_monitor_process(fake_cclient)
+
+    # Create a file.
+    self.root_monitoring_path.joinpath('test.txt').touch()
+    # Give some time for the edit monitor to receive the edit event.
+    time.sleep(1)
+    # Stop the edit monitor and flush all events.
+    os.kill(p.pid, signal.SIGINT)
+    p.join()
+
+    logged_events = self._get_logged_events()
+    self.assertEqual(len(logged_events), 0)
+
+  def _start_test_edit_monitor_process(
+      self, cclient
+  ) -> multiprocessing.Process:
+    receiver, sender = multiprocessing.Pipe()
+    # Start edit monitor in a subprocess.
+    p = multiprocessing.Process(
+        target=edit_monitor.start,
+        args=(str(self.root_monitoring_path.resolve()), False, 0.5, 5, cclient, sender),
+    )
+    p.daemon = True
+    p.start()
+
+    # Wait until observer started.
+    received_data = receiver.recv()
+    self.assertEquals(received_data, 'Observer started.')
+
+    receiver.close()
+    return p
+
+  def _get_logged_events(self):
+    with open(self.log_event_dir.joinpath('logs.output'), 'rb') as f:
+      data = f.read()
+
+    return [
+        clientanalytics_pb2.LogEvent.FromString(record)
+        for record in data.split(b'\x00')
+        if record
+    ]
+
+
+class FakeClearcutClient:
+
+  def __init__(self, log_output_file, raise_log_exception=False):
+    self.pending_log_events = []
+    self.raise_log_exception = raise_log_exception
+    self.log_output_file = log_output_file
+
+  def log(self, log_event):
+    if self.raise_log_exception:
+      raise Exception('unknown exception')
+    self.pending_log_events.append(log_event)
+
+  def flush_events(self):
+    delimiter = b'\x00'  # Use a null byte as the delimiter
+    with open(self.log_output_file, 'wb') as f:
+      for log_event in self.pending_log_events:
+        f.write(log_event.SerializeToString() + delimiter)
+
+    self.pending_log_events.clear()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py
new file mode 100644
index 0000000..49385f1
--- /dev/null
+++ b/tools/edit_monitor/main.py
@@ -0,0 +1,118 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import os
+import signal
+import sys
+import tempfile
+
+from edit_monitor import daemon_manager
+from edit_monitor import edit_monitor
+
+
+def create_arg_parser():
+  """Creates an instance of the default arg parser."""
+
+  parser = argparse.ArgumentParser(
+      description=(
+          'Monitors edits in Android source code and uploads the edit logs.'
+      ),
+      add_help=True,
+      formatter_class=argparse.RawDescriptionHelpFormatter,
+  )
+
+  parser.add_argument(
+      '--path',
+      type=str,
+      required=True,
+      help='Root path to monitor the edit events.',
+  )
+
+  parser.add_argument(
+      '--dry_run',
+      action='store_true',
+      help='Dry run the edit monitor. This starts the edit monitor process without actually send the edit logs to clearcut.',
+  )
+
+  parser.add_argument(
+      '--force_cleanup',
+      action='store_true',
+      help=(
+          'Instead of start a new edit monitor, force stop all existing edit'
+          ' monitors in the system. This option is only used in emergent cases'
+          ' when we want to prevent user damage by the edit monitor.'
+      ),
+  )
+
+  parser.add_argument(
+      '--verbose',
+      action='store_true',
+      help=(
+          'Log verbose info in the log file for debugging purpose.'
+      ),
+  )
+
+  return parser
+
+
+def configure_logging(verbose=False):
+  root_logging_dir = tempfile.mkdtemp(prefix='edit_monitor_')
+  _, log_path = tempfile.mkstemp(dir=root_logging_dir, suffix='.log')
+
+  log_fmt = '%(asctime)s %(filename)s:%(lineno)s:%(levelname)s: %(message)s'
+  date_fmt = '%Y-%m-%d %H:%M:%S'
+  log_level = logging.DEBUG if verbose else logging.INFO
+
+  logging.basicConfig(
+      filename=log_path, level=log_level, format=log_fmt, datefmt=date_fmt
+  )
+  # Filter out logs from inotify_buff to prevent log pollution.
+  logging.getLogger('watchdog.observers.inotify_buffer').addFilter(
+      lambda record: record.filename != 'inotify_buffer.py')
+  print(f'logging to file {log_path}')
+
+
+def term_signal_handler(_signal_number, _frame):
+  logging.info('Process %d received SIGTERM, Terminating...', os.getpid())
+  sys.exit(0)
+
+
+def main(argv: list[str]):
+  args = create_arg_parser().parse_args(argv[1:])
+  configure_logging(args.verbose)
+  if args.dry_run:
+    logging.info('This is a dry run.')
+  dm = daemon_manager.DaemonManager(
+      binary_path=argv[0],
+      daemon_target=edit_monitor.start,
+      daemon_args=(args.path, args.dry_run),
+  )
+
+  if args.force_cleanup:
+    dm.cleanup()
+
+  try:
+    dm.start()
+    dm.monitor_daemon()
+  except Exception:
+    logging.exception('Unexpected exception raised when run daemon.')
+  finally:
+    dm.stop()
+
+
+if __name__ == '__main__':
+  signal.signal(signal.SIGTERM, term_signal_handler)
+  main(sys.argv)
diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto
new file mode 100644
index 0000000..9acc2e7
--- /dev/null
+++ b/tools/edit_monitor/proto/edit_event.proto
@@ -0,0 +1,57 @@
+syntax = "proto3";
+
+package tools.asuite.edit_monitor;
+
+message EditEvent {
+  enum EditType {
+    UNSUPPORTED_TYPE = 0;
+    CREATE = 1;
+    MODIFY = 2;
+    DELETE = 3;
+    MOVE = 4;
+  }
+
+  enum ErrorType {
+    UNKNOWN_ERROR = 0;
+    FAILED_TO_START_EDIT_MONITOR = 1;
+    FAILED_TO_STOP_EDIT_MONITOR = 2;
+    FAILED_TO_REBOOT_EDIT_MONITOR = 3;
+    KILLED_DUE_TO_EXCEEDED_MEMORY_USAGE = 4;
+    FORCE_CLEANUP = 5;
+    KILLED_DUE_TO_EXCEEDED_CPU_USAGE = 6;
+  }
+
+  // Event that logs a single edit
+  message SingleEditEvent {
+    // Full path of the file that edited.
+    string file_path = 1;
+    // Type of the edit.
+    EditType edit_type = 2;
+  }
+
+  // Event that logs aggregated info for a set of edits.
+  message AggregatedEditEvent {
+    int32 num_edits = 1;
+  }
+
+  // Event that logs errors happened in the edit monitor.
+  message EditMonitorErrorEvent {
+    ErrorType error_type = 1;
+  }
+
+  // ------------------------
+  // FIELDS FOR EditEvent
+  // ------------------------
+  // Internal user name.
+  string user_name = 1;
+  // The root of Android source.
+  string source_root = 2;
+  // Name of the host workstation.
+  string host_name = 3;
+
+  oneof event {
+    SingleEditEvent single_edit_event = 4;
+    AggregatedEditEvent aggregated_edit_event = 5;
+    EditMonitorErrorEvent edit_monitor_error_event = 6;
+  }
+}
diff --git a/tools/edit_monitor/utils.py b/tools/edit_monitor/utils.py
new file mode 100644
index 0000000..b88949d
--- /dev/null
+++ b/tools/edit_monitor/utils.py
@@ -0,0 +1,53 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import hashlib
+import logging
+import os
+
+
+def is_feature_enabled(
+    feature_name: str,
+    user_name: str,
+    enable_flag: str = None,
+    rollout_percent: int = 100,
+) -> bool:
+  """Determine whether the given feature is enabled.
+
+  Whether a given feature is enabled or not depends on two flags: 1) the
+  enable_flag that explicitly enable/disable the feature and 2) the rollout_flag
+  that controls the rollout percentage.
+
+  Args:
+    feature_name: name of the feature.
+    user_name: system user name.
+    enable_flag: name of the env var that enables/disables the feature
+      explicitly.
+    rollout_flg: name of the env var that controls the rollout percentage, the
+      value stored in the env var should be an int between 0 and 100 string
+  """
+  if enable_flag:
+    if os.environ.get(enable_flag, "") == "false":
+      logging.info("feature: %s is disabled", feature_name)
+      return False
+
+    if os.environ.get(enable_flag, "") == "true":
+      logging.info("feature: %s is enabled", feature_name)
+      return True
+
+  hash_object = hashlib.sha256()
+  hash_object.update((user_name + feature_name).encode("utf-8"))
+  hash_number = int(hash_object.hexdigest(), 16) % 100
+
+  return hash_number < rollout_percent
diff --git a/tools/edit_monitor/utils_test.py b/tools/edit_monitor/utils_test.py
new file mode 100644
index 0000000..1c30aa1
--- /dev/null
+++ b/tools/edit_monitor/utils_test.py
@@ -0,0 +1,71 @@
+# Copyright 2024, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for edit monitor utils."""
+import os
+import unittest
+from unittest import mock
+
+from edit_monitor import utils
+
+TEST_USER = 'test_user'
+TEST_FEATURE = 'test_feature'
+ENABLE_TEST_FEATURE_FLAG = 'ENABLE_TEST_FEATURE'
+ROLLOUT_TEST_FEATURE_FLAG = 'ROLLOUT_TEST_FEATURE'
+
+
+class EnableFeatureTest(unittest.TestCase):
+
+  def test_feature_enabled_without_flag(self):
+    self.assertTrue(utils.is_feature_enabled(TEST_FEATURE, TEST_USER))
+
+  @mock.patch.dict(os.environ, {ENABLE_TEST_FEATURE_FLAG: 'false'}, clear=True)
+  def test_feature_disabled_with_flag(self):
+    self.assertFalse(
+        utils.is_feature_enabled(
+            TEST_FEATURE, TEST_USER, ENABLE_TEST_FEATURE_FLAG
+        )
+    )
+
+  @mock.patch.dict(os.environ, {ENABLE_TEST_FEATURE_FLAG: 'true'}, clear=True)
+  def test_feature_enabled_with_flag(self):
+    self.assertTrue(
+        utils.is_feature_enabled(
+            TEST_FEATURE, TEST_USER, ENABLE_TEST_FEATURE_FLAG
+        )
+    )
+
+  def test_feature_enabled_with_rollout_percentage(self):
+    self.assertTrue(
+        utils.is_feature_enabled(
+            TEST_FEATURE,
+            TEST_USER,
+            ENABLE_TEST_FEATURE_FLAG,
+            90,
+        )
+    )
+
+  def test_feature_disabled_with_rollout_percentage(self):
+    self.assertFalse(
+        utils.is_feature_enabled(
+            TEST_FEATURE,
+            TEST_USER,
+            ENABLE_TEST_FEATURE_FLAG,
+            10,
+        )
+    )
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/filelistdiff/Android.bp b/tools/filelistdiff/Android.bp
index ab766d6..3826e50 100644
--- a/tools/filelistdiff/Android.bp
+++ b/tools/filelistdiff/Android.bp
@@ -24,4 +24,9 @@
 prebuilt_etc_host {
     name: "system_image_diff_allowlist",
     src: "allowlist",
-}
\ No newline at end of file
+}
+
+prebuilt_etc_host {
+    name: "system_image_diff_allowlist_next",
+    src: "allowlist_next",
+}
diff --git a/tools/filelistdiff/OWNERS b/tools/filelistdiff/OWNERS
new file mode 100644
index 0000000..690fb17
--- /dev/null
+++ b/tools/filelistdiff/OWNERS
@@ -0,0 +1 @@
+per-file allowlist = justinyun@google.com, jeongik@google.com, kiyoungkim@google.com, inseob@google.com
diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist
index 72c12a0..eb78587 100644
--- a/tools/filelistdiff/allowlist
+++ b/tools/filelistdiff/allowlist
@@ -1,99 +1,5 @@
-# Known diffs only in the KATI system image
-etc/NOTICE.xml.gz
-etc/compatconfig/TeleService-platform-compat-config.xml
-etc/compatconfig/calendar-provider-compat-config.xml
-etc/compatconfig/contacts-provider-platform-compat-config.xml
-etc/compatconfig/documents-ui-compat-config.xml
-etc/compatconfig/framework-location-compat-config.xml
-etc/compatconfig/framework-platform-compat-config.xml
-etc/compatconfig/icu4j-platform-compat-config.xml
-etc/compatconfig/services-platform-compat-config.xml
-etc/permissions/android.software.credentials.xml
-etc/permissions/android.software.preview_sdk.xml
-etc/permissions/android.software.webview.xml
-etc/permissions/android.software.window_magnification.xml
-etc/permissions/com.android.adservices.sdksandbox.xml
-etc/security/otacerts.zip
-etc/vintf/compatibility_matrix.202404.xml
-etc/vintf/compatibility_matrix.202504.xml
-etc/vintf/compatibility_matrix.5.xml
-etc/vintf/compatibility_matrix.6.xml
-etc/vintf/compatibility_matrix.7.xml
-etc/vintf/compatibility_matrix.8.xml
-etc/vintf/compatibility_matrix.device.xml
-etc/vintf/manifest.xml
-framework/boot-apache-xml.vdex
-framework/boot-apache-xml.vdex.fsv_meta
-framework/boot-bouncycastle.vdex
-framework/boot-bouncycastle.vdex.fsv_meta
-framework/boot-core-icu4j.vdex
-framework/boot-core-icu4j.vdex.fsv_meta
-framework/boot-core-libart.vdex
-framework/boot-core-libart.vdex.fsv_meta
-framework/boot-ext.vdex
-framework/boot-ext.vdex.fsv_meta
-framework/boot-framework-adservices.vdex
-framework/boot-framework-adservices.vdex.fsv_meta
-framework/boot-framework-graphics.vdex
-framework/boot-framework-graphics.vdex.fsv_meta
-framework/boot-framework-location.vdex
-framework/boot-framework-location.vdex.fsv_meta
-framework/boot-framework.vdex
-framework/boot-framework.vdex.fsv_meta
-framework/boot-ims-common.vdex
-framework/boot-ims-common.vdex.fsv_meta
-framework/boot-okhttp.vdex
-framework/boot-okhttp.vdex.fsv_meta
-framework/boot-telephony-common.vdex
-framework/boot-telephony-common.vdex.fsv_meta
-framework/boot-voip-common.vdex
-framework/boot-voip-common.vdex.fsv_meta
-framework/boot.vdex
-framework/boot.vdex.fsv_meta
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
-framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta
-lib/aaudio-aidl-cpp.so
-lib/android.hardware.biometrics.fingerprint@2.1.so
-lib/android.hardware.radio.config@1.0.so
-lib/android.hardware.radio.deprecated@1.0.so
-lib/android.hardware.radio@1.0.so
-lib/android.hardware.radio@1.1.so
-lib/android.hardware.radio@1.2.so
-lib/android.hardware.radio@1.3.so
-lib/android.hardware.radio@1.4.so
-lib/android.hardware.secure_element@1.0.so
-lib/com.android.media.aaudio-aconfig-cc.so
-lib/heapprofd_client.so
-lib/heapprofd_client_api.so
-lib/libaaudio.so
-lib/libaaudio_internal.so
-lib/libalarm_jni.so
-lib/libamidi.so
-lib/libcups.so
-lib/libjni_deviceAsWebcam.so
-lib/libprintspooler_jni.so
-lib/libvendorsupport.so
-lib/libwfds.so
-lib/libyuv.so
-
-# b/351258461
-adb_keys
+# Known diffs that are installed in either system image with the configuration
+# b/353429422
 init.environ.rc
-
-# Known diffs only in the Soong system image
-lib/libhidcommand_jni.so
-lib/libuinputcommand_jni.so
-
-# Known diffs in internal source
-bin/uprobestats
-etc/aconfig/flag.map
-etc/aconfig/flag.val
-etc/aconfig/package.map
-etc/bpf/uprobestats/BitmapAllocation.o
-etc/bpf/uprobestats/GenericInstrumentation.o
-etc/init/UprobeStats.rc
-lib/libuprobestats_client.so
-lib64/libuprobestats_client.so
-priv-app/DeviceDiagnostics/DeviceDiagnostics.apk
\ No newline at end of file
+# b/338342381
+etc/NOTICE.xml.gz
diff --git a/tools/filelistdiff/allowlist_next b/tools/filelistdiff/allowlist_next
new file mode 100644
index 0000000..8f91c9f
--- /dev/null
+++ b/tools/filelistdiff/allowlist_next
@@ -0,0 +1,9 @@
+# Allowlist only for the next release configuration.
+# TODO(b/369678122): The list will be cleared when the trunk configurations are
+# available to the next.
+
+# KATI only installed files
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex
+framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta
diff --git a/tools/filelistdiff/file_list_diff.py b/tools/filelistdiff/file_list_diff.py
index cdc5b2e..a6408e8 100644
--- a/tools/filelistdiff/file_list_diff.py
+++ b/tools/filelistdiff/file_list_diff.py
@@ -19,38 +19,54 @@
 COLOR_ERROR = '\033[91m'
 COLOR_NORMAL = '\033[0m'
 
-def find_unique_items(kati_installed_files, soong_installed_files, allowlist, system_module_name):
+def find_unique_items(kati_installed_files, soong_installed_files, system_module_name, allowlists):
     with open(kati_installed_files, 'r') as kati_list_file, \
-            open(soong_installed_files, 'r') as soong_list_file, \
-            open(allowlist, 'r') as allowlist_file:
+            open(soong_installed_files, 'r') as soong_list_file:
         kati_files = set(kati_list_file.read().split())
         soong_files = set(soong_list_file.read().split())
-        allowed_files = set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n'))))
+
+    allowed_files = set()
+    for allowlist in allowlists:
+        with open(allowlist, 'r') as allowlist_file:
+            allowed_files.update(set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n')))))
 
     def is_unknown_diff(filepath):
-        return not filepath in allowed_files
+        return filepath not in allowed_files
+
+    def is_unnecessary_allowlist(filepath):
+        return filepath not in kati_files.symmetric_difference(soong_files)
 
     unique_in_kati = set(filter(is_unknown_diff, kati_files - soong_files))
     unique_in_soong = set(filter(is_unknown_diff, soong_files - kati_files))
+    unnecessary_allowlists = set(filter(is_unnecessary_allowlist, allowed_files))
 
     if unique_in_kati:
-        print(f'{COLOR_ERROR}Please add following modules into system image module {system_module_name}.{COLOR_NORMAL}')
-        print(f'{COLOR_WARNING}KATI only module(s):{COLOR_NORMAL}')
+        print('')
+        print(f'{COLOR_ERROR}Missing required modules in {system_module_name} module.{COLOR_NORMAL}')
+        print(f'To resolve this issue, please add the modules to the Android.bp file for the {system_module_name} to install the following KATI only installed files.')
+        print(f'You can find the correct Android.bp file using the command "gomod {system_module_name}".')
+        print(f'{COLOR_WARNING}KATI only installed file(s):{COLOR_NORMAL}')
         for item in sorted(unique_in_kati):
-            print(item)
+            print('  '+item)
 
     if unique_in_soong:
-        if unique_in_kati:
-            print('')
-
-        print(f'{COLOR_ERROR}Please add following modules into build/make/target/product/base_system.mk.{COLOR_NORMAL}')
-        print(f'{COLOR_WARNING}Soong only module(s):{COLOR_NORMAL}')
-        for item in sorted(unique_in_soong):
-            print(item)
-
-    if unique_in_kati or unique_in_soong:
         print('')
-        print(f'{COLOR_ERROR}FAILED: System image from KATI and SOONG differs from installed file list.{COLOR_NORMAL}')
+        print(f'{COLOR_ERROR}Missing packages in base_system.mk.{COLOR_NORMAL}')
+        print('Please add packages into build/make/target/product/base_system.mk or build/make/tools/filelistdiff/allowlist to install or skip the following Soong only installed files.')
+        print(f'{COLOR_WARNING}Soong only installed file(s):{COLOR_NORMAL}')
+        for item in sorted(unique_in_soong):
+            print('  '+item)
+
+    if unnecessary_allowlists:
+        print('')
+        print(f'{COLOR_ERROR}Unnecessary files in allowlist.{COLOR_NORMAL}')
+        print('Please remove these entries from build/make/tools/filelistdiff/allowlist')
+        for item in sorted(unnecessary_allowlists):
+            print('  '+item)
+
+
+    if unique_in_kati or unique_in_soong or unnecessary_allowlists:
+        print('')
         sys.exit(1)
 
 
@@ -59,8 +75,8 @@
 
     parser.add_argument('kati_installed_file_list')
     parser.add_argument('soong_installed_file_list')
-    parser.add_argument('allowlist')
     parser.add_argument('system_module_name')
+    parser.add_argument('--allowlists', nargs='*', default=[])
     args = parser.parse_args()
 
-    find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.allowlist, args.system_module_name)
\ No newline at end of file
+    find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.system_module_name, args.allowlists)
\ No newline at end of file
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 6aa5289..a5b6fd0 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -277,6 +277,7 @@
     out: ["out"],
 }
 
+// system
 genrule {
     name: "fs_config_dirs_system_gen",
     defaults: ["fs_config_defaults"],
@@ -307,6 +308,7 @@
     src: ":fs_config_files_system_gen",
 }
 
+// system_ext
 genrule {
     name: "fs_config_dirs_system_ext_gen",
     defaults: ["fs_config_defaults"],
@@ -337,6 +339,7 @@
     system_ext_specific: true,
 }
 
+// product
 genrule {
     name: "fs_config_dirs_product_gen",
     defaults: ["fs_config_defaults"],
@@ -367,6 +370,7 @@
     product_specific: true,
 }
 
+// vendor
 genrule {
     name: "fs_config_dirs_vendor_gen",
     defaults: ["fs_config_defaults"],
@@ -397,6 +401,7 @@
     vendor: true,
 }
 
+// odm
 genrule {
     name: "fs_config_dirs_odm_gen",
     defaults: ["fs_config_defaults"],
@@ -427,4 +432,214 @@
     device_specific: true,
 }
 
-// TODO(jiyong): add fs_config for oem, system_dlkm, vendor_dlkm, odm_dlkm partitions
+// system_dlkm
+genrule {
+    name: "fs_config_dirs_system_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition system_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_system_dlkm",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_system_dlkm_gen",
+    system_dlkm_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_system_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition system_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_system_dlkm",
+    filename: "fs_config_files",
+    src: ":fs_config_files_system_dlkm_gen",
+    system_dlkm_specific: true,
+}
+
+// vendor_dlkm
+genrule {
+    name: "fs_config_dirs_vendor_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition vendor_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_vendor_dlkm",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_vendor_dlkm_gen",
+    vendor_dlkm_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_vendor_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition vendor_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_vendor_dlkm",
+    filename: "fs_config_files",
+    src: ":fs_config_files_vendor_dlkm_gen",
+    vendor_dlkm_specific: true,
+}
+
+// odm_dlkm
+genrule {
+    name: "fs_config_dirs_odm_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition odm_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_odm_dlkm",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_odm_dlkm_gen",
+    odm_dlkm_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_odm_dlkm_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition odm_dlkm " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_odm_dlkm",
+    filename: "fs_config_files",
+    src: ":fs_config_files_odm_dlkm_gen",
+    odm_dlkm_specific: true,
+}
+
+// oem
+genrule {
+    name: "fs_config_dirs_oem_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_dirs +
+        "--partition oem " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_dirs_oem",
+    filename: "fs_config_dirs",
+    src: ":fs_config_dirs_oem_gen",
+    oem_specific: true,
+}
+
+genrule {
+    name: "fs_config_files_oem_gen",
+    defaults: ["fs_config_defaults"],
+    cmd: fs_config_cmd_files +
+        "--partition oem " +
+        "$(locations :target_fs_config_gen)",
+}
+
+prebuilt_etc {
+    name: "fs_config_files_oem",
+    filename: "fs_config_files",
+    src: ":fs_config_files_oem_gen",
+    oem_specific: true,
+}
+
+// Generate the <p>/etc/fs_config_dirs binary files for each partition.
+// Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable.
+phony {
+    name: "fs_config_dirs",
+    required: [
+        "fs_config_dirs_system",
+        "fs_config_dirs_system_ext",
+        "fs_config_dirs_product",
+        "fs_config_dirs_nonsystem",
+    ],
+}
+
+// Generate the <p>/etc/fs_config_files binary files for each partition.
+// Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable.
+phony {
+    name: "fs_config_files",
+    required: [
+        "fs_config_files_system",
+        "fs_config_files_system_ext",
+        "fs_config_files_product",
+        "fs_config_files_nonsystem",
+    ],
+}
+
+// Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions
+// excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to
+// PRODUCT_PACKAGES in the device make file to enable.
+phony {
+    name: "fs_config_dirs_nonsystem",
+    required: [] +
+        select(soong_config_variable("fs_config", "vendor"), {
+            true: ["fs_config_dirs_vendor"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "oem"), {
+            true: ["fs_config_dirs_oem"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "odm"), {
+            true: ["fs_config_dirs_odm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "vendor_dlkm"), {
+            true: ["fs_config_dirs_vendor_dlkm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "odm_dlkm"), {
+            true: ["fs_config_dirs_odm_dlkm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "system_dlkm"), {
+            true: ["fs_config_dirs_system_dlkm"],
+            default: [],
+        }),
+}
+
+// Generate the <p>/etc/fs_config_files binary files for all enabled partitions
+// excluding /system, /system_ext and /product. Add fs_config_files_nonsystem to
+// PRODUCT_PACKAGES in the device make file to enable.
+phony {
+    name: "fs_config_files_nonsystem",
+    required: [] +
+        select(soong_config_variable("fs_config", "vendor"), {
+            true: ["fs_config_files_vendor"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "oem"), {
+            true: ["fs_config_files_oem"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "odm"), {
+            true: ["fs_config_files_odm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "vendor_dlkm"), {
+            true: ["fs_config_files_vendor_dlkm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "odm_dlkm"), {
+            true: ["fs_config_files_odm_dlkm"],
+            default: [],
+        }) +
+        select(soong_config_variable("fs_config", "system_dlkm"), {
+            true: ["fs_config_files_system_dlkm"],
+            default: [],
+        }),
+}
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
deleted file mode 100644
index e4c3626..0000000
--- a/tools/fs_config/Android.mk
+++ /dev/null
@@ -1,328 +0,0 @@
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH := $(call my-dir)
-
-# One can override the default android_filesystem_config.h file by using TARGET_FS_CONFIG_GEN.
-#   Set TARGET_FS_CONFIG_GEN to contain a list of intermediate format files
-#   for generating the android_filesystem_config.h file.
-#
-# More information can be found in the README
-
-ifneq ($(wildcard $(TARGET_DEVICE_DIR)/android_filesystem_config.h),)
-$(error Using $(TARGET_DEVICE_DIR)/android_filesystem_config.h is deprecated, please use TARGET_FS_CONFIG_GEN instead)
-endif
-
-android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
-capability_header := bionic/libc/kernel/uapi/linux/capability.h
-
-# List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, and system_dlkm Partitions
-fs_config_generate_extra_partition_list := $(strip \
-  $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
-  $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
-  $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm) \
-  $(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),vendor_dlkm) \
-  $(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),odm_dlkm) \
-  $(if $(BOARD_USES_SYSTEM_DLKMIMAGE)$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE),system_dlkm) \
-)
-
-##################################
-# Generate the <p>/etc/fs_config_dirs binary files for each partition.
-# Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := \
-  fs_config_dirs_system \
-  fs_config_dirs_system_ext \
-  fs_config_dirs_product \
-  fs_config_dirs_nonsystem
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the <p>/etc/fs_config_files binary files for each partition.
-# Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := \
-  fs_config_files_system \
-  fs_config_files_system_ext \
-  fs_config_files_product \
-  fs_config_files_nonsystem
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the <p>/etc/fs_config_dirs binary files for all enabled partitions
-# excluding /system, /system_ext and /product. Add fs_config_dirs_nonsystem to
-# PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_nonsystem
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_dirs_$(t))
-include $(BUILD_PHONY_PACKAGE)
-
-##################################
-# Generate the <p>/etc/fs_config_files binary files for all enabled partitions
-# excluding /system, /system_ext and /product. Add fs_config_files_nonsystem to
-# PRODUCT_PACKAGES in the device make file to enable.
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_nonsystem
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),fs_config_files_$(t))
-include $(BUILD_PHONY_PACKAGE)
-
-ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the oem/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_oem
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition oem \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the oem/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_oem
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition oem \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
-ifneq ($(filter vendor_dlkm,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the vendor_dlkm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_vendor_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition vendor_dlkm \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the vendor_dlkm/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES in
-# the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_vendor_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition vendor_dlkm \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
-ifneq ($(filter odm_dlkm,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the odm_dlkm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_odm_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition odm_dlkm \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the odm_dlkm/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_odm_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition odm_dlkm \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
-ifneq ($(filter system_dlkm,$(fs_config_generate_extra_partition_list)),)
-##################################
-# Generate the system_dlkm/etc/fs_config_dirs binary file for the target
-# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_dirs_system_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
-LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system_dlkm \
-	   --dirs \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-##################################
-# Generate the system_dlkm/etc/fs_config_files binary file for the target
-# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
-# in the device make file to enable
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := fs_config_files_system_dlkm
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_MODULE_CLASS := ETC
-LOCAL_INSTALLED_MODULE_STEM := fs_config_files
-LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
-include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(capability_header)
-$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(android_filesystem_config) $(capability_header)
-	@mkdir -p $(dir $@)
-	$< fsconfig \
-	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
-	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
-	   --partition system_dlkm \
-	   --files \
-	   --out_file $@ \
-	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
-
-endif
-
-android_filesystem_config :=
-capability_header :=
-fs_config_generate_extra_partition_list :=
diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go
index 23c7abd..c7cf5ed 100644
--- a/tools/ide_query/ide_query.go
+++ b/tools/ide_query/ide_query.go
@@ -293,11 +293,19 @@
 // If a file is covered by multiple modules, the first module is returned.
 func findJavaModules(paths []string, modules map[string]*javaModule) map[string]string {
 	ret := make(map[string]string)
-	for name, module := range modules {
+	// A file may be part of multiple modules. To make the result deterministic,
+	// check the modules in sorted order.
+	keys := make([]string, 0, len(modules))
+	for name := range modules {
+		keys = append(keys, name)
+	}
+	slices.Sort(keys)
+	for _, name := range keys {
 		if strings.HasSuffix(name, ".impl") {
 			continue
 		}
 
+		module := modules[name]
 		for i, p := range paths {
 			if slices.Contains(module.Srcs, p) {
 				ret[p] = name
@@ -341,6 +349,8 @@
 			Id:              moduleName,
 			Language:        pb.Language_LANGUAGE_JAVA,
 			SourceFilePaths: m.Srcs,
+			GeneratedFiles:  genFiles(env, m),
+			DependencyIds:   m.Deps,
 		}
 		unitsById[u.Id] = u
 
@@ -355,14 +365,11 @@
 				continue
 			}
 
-			var paths []string
-			paths = append(paths, mod.Srcs...)
-			paths = append(paths, mod.SrcJars...)
-			paths = append(paths, mod.Jars...)
 			unitsById[name] = &pb.BuildableUnit{
 				Id:              name,
 				SourceFilePaths: mod.Srcs,
-				GeneratedFiles:  genFiles(env, paths),
+				GeneratedFiles:  genFiles(env, mod),
+				DependencyIds:   mod.Deps,
 			}
 
 			for _, d := range mod.Deps {
@@ -379,8 +386,13 @@
 }
 
 // genFiles returns the generated files (paths that start with outDir/) for the
-// given paths. Generated files that do not exist are ignored.
-func genFiles(env Env, paths []string) []*pb.GeneratedFile {
+// given module. Generated files that do not exist are ignored.
+func genFiles(env Env, mod *javaModule) []*pb.GeneratedFile {
+	var paths []string
+	paths = append(paths, mod.Srcs...)
+	paths = append(paths, mod.SrcJars...)
+	paths = append(paths, mod.Jars...)
+
 	prefix := env.OutDir + "/"
 	var ret []*pb.GeneratedFile
 	for _, p := range paths {
diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh
index 6f9b0c4..8dfffc1 100755
--- a/tools/ide_query/ide_query.sh
+++ b/tools/ide_query/ide_query.sh
@@ -19,7 +19,7 @@
 require_top
 
 # Ensure cogsetup (out/ will be symlink outside the repo)
-. ${TOP}/build/make/cogsetup.sh
+setup_cog_env_if_needed
 
 case $(uname -s) in
     Linux)
diff --git a/tools/ide_query/prober_scripts/ide_query.out b/tools/ide_query/prober_scripts/ide_query.out
index cd7ce6d..be48da1 100644
--- a/tools/ide_query/prober_scripts/ide_query.out
+++ b/tools/ide_query/prober_scripts/ide_query.out
@@ -1,7 +1,9 @@
 
-out–a
-8build/make/tools/ide_query/prober_scripts/cpp/general.cc8prebuilts/clang/host/linux-x86/clang-r522817/bin/clang++-mthumb-Os-fomit-frame-pointer-mllvm-enable-shrink-wrap=false-O2-Wall-Wextra-Winit-self-Wpointer-arith-Wunguarded-availability-Werror=date-time-Werror=int-conversion-Werror=pragma-pack&-Werror=pragma-pack-suspicious-include-Werror=sizeof-array-div-Werror=string-plus-int'-Werror=unreachable-code-loop-increment"-Wno-error=deprecated-declarations-Wno-c99-designator-Wno-gnu-folding-constant"-Wno-inconsistent-missing-override-Wno-error=reorder-init-list-Wno-reorder-init-list-Wno-sign-compare-Wno-unused	-DANDROID-DNDEBUG-UDEBUG(-D__compiler_offsetof=__builtin_offsetof*-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__	-faddrsig-fdebug-default-version=5-fcolor-diagnostics-ffp-contract=off-fno-exceptions-fno-strict-aliasing-fmessage-length=0#-fno-relaxed-template-template-args-gsimple-template-names-gz=zstd-no-canonical-prefixes-Wno-error=format"-fdebug-prefix-map=/proc/self/cwd=-ftrivial-auto-var-init=zero-g-ffunction-sections-fdata-sections-fno-short-enums-funwind-tables-fstack-protector-strong-Wa,--noexecstack-D_FORTIFY_SOURCE=2-Wstrict-aliasing=2-Werror=return-type-Werror=non-virtual-dtor-Werror=address-Werror=sequence-point-Werror=format-security-nostdlibinc-fdebug-info-for-profiling-msoft-float-march=armv7-a-mfloat-abi=softfp
--mfpu=neon/-Ibuild/make/tools/ide_query/prober_scripts/cpp³-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cppÂ…-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto-D__LIBC_API__=10000-D__LIBM_API__=10000-D__LIBDL_API__=10000-Iexternal/protobuf/srcY-Iprebuilts/clang/host/linux-x86/clang-r522817/android_libc++/platform/arm/include/c++/v1=-Iprebuilts/clang/host/linux-x86/clang-r522817/include/c++/v1 -Ibionic/libc/async_safe/include-Isystem/logging/liblog/include'-Ibionic/libc/system_properties/include<-Isystem/core/property_service/libpropertyinfoparser/include-isystembionic/libc/include-isystembionic/libc/kernel/uapi/asm-arm-isystembionic/libc/kernel/uapi-isystembionic/libc/kernel/android/scsi-isystembionic/libc/kernel/android/uapi-targetarmv7a-linux-androideabi10000-DANDROID_STRICT-fPIE-Werror-Wno-unused-parameter-DGOOGLE_PROTOBUF_NO_RTTI-Wimplicit-fallthrough*-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS-Wno-gnu-include-next-fvisibility-inlines-hidden-mllvm-enable-shrink-wrap=false-std=gnu++20	-fno-rtti-Isystem/core/include-Isystem/logging/liblog/include-Isystem/media/audio/include-Ihardware/libhardware/include%-Ihardware/libhardware_legacy/include-Ihardware/ril/include-Iframeworks/native/include"-Iframeworks/native/opengl/include-Iframeworks/av/include-Werror=bool-operation -Werror=format-insufficient-args%-Werror=implicit-int-float-conversion-Werror=int-in-bool-context-Werror=int-to-pointer-cast-Werror=pointer-to-int-cast-Werror=xor-used-as-pow-Wno-void-pointer-to-enum-cast-Wno-void-pointer-to-int-cast-Wno-pointer-to-int-cast-Werror=fortify-source-Wno-unused-variable-Wno-missing-field-initializers-Wno-packed-non-pod-Werror=address-of-temporary+-Werror=incompatible-function-pointer-types-Werror=null-dereference-Werror=return-type"-Wno-tautological-constant-compare$-Wno-tautological-type-limit-compare"-Wno-implicit-int-float-conversion!-Wno-tautological-overlap-compare-Wno-deprecated-copy-Wno-range-loop-construct"-Wno-zero-as-null-pointer-constant)-Wno-deprecated-anon-enum-enum-conversion$-Wno-deprecated-enum-enum-conversion-Wno-pessimizing-move-Wno-non-c-typedef-for-linkage-Wno-align-mismatch"-Wno-error=unused-but-set-variable#-Wno-error=unused-but-set-parameter-Wno-error=deprecated-builtins-Wno-error=deprecated2-Wno-error=single-bit-bitfield-constant-conversion$-Wno-error=enum-constexpr-conversion-Wno-error=invalid-offsetof&-Wno-deprecated-dynamic-exception-spec8build/make/tools/ide_query/prober_scripts/cpp/general.cc"Õ?
+out2x
+8build/make/tools/ide_query/prober_scripts/cpp/general.cc8build/make/tools/ide_query/prober_scripts/cpp/general.cc:—"
+8build/make/tools/ide_query/prober_scripts/cpp/general.cc8build/make/tools/ide_query/prober_scripts/cpp/general.cc"8prebuilts/clang/host/linux-x86/clang-r530567/bin/clang++"-nostdlibinc"-mthumb"-Os"-fomit-frame-pointer"-mllvm"-enable-shrink-wrap=false"-O2"-Wall"-Wextra"-Winit-self"-Wpointer-arith"-Wunguarded-availability"-Werror=date-time"-Werror=int-conversion"-Werror=pragma-pack"&-Werror=pragma-pack-suspicious-include"-Werror=sizeof-array-div"-Werror=string-plus-int"'-Werror=unreachable-code-loop-increment""-Wno-error=deprecated-declarations"-Wno-c23-extensions"-Wno-c99-designator"-Wno-gnu-folding-constant""-Wno-inconsistent-missing-override"-Wno-error=reorder-init-list"-Wno-reorder-init-list"-Wno-sign-compare"-Wno-unused"	-DANDROID"-DNDEBUG"-UDEBUG"(-D__compiler_offsetof=__builtin_offsetof"*-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__"	-faddrsig"-fdebug-default-version=5"-fcolor-diagnostics"-ffp-contract=off"-fno-exceptions"-fno-strict-aliasing"-fmessage-length=0"-gsimple-template-names"-gz=zstd"-no-canonical-prefixes""-fdebug-prefix-map=/proc/self/cwd="-ftrivial-auto-var-init=zero"-g"-ffunction-sections"-fdata-sections"-fno-short-enums"-funwind-tables"-fstack-protector-strong"-Wa,--noexecstack"-D_FORTIFY_SOURCE=2"-Wstrict-aliasing=2"-Werror=return-type"-Werror=non-virtual-dtor"-Werror=address"-Werror=sequence-point"-Werror=format-security"-msoft-float"-march=armv7-a"-mfloat-abi=softfp"
+-mfpu=neon"/-Ibuild/make/tools/ide_query/prober_scripts/cpp"³-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp"Â…-Iout/soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto"-D__LIBC_API__=10000"-D__LIBM_API__=10000"-D__LIBDL_API__=10000"-Iexternal/protobuf/src"Y-Iprebuilts/clang/host/linux-x86/clang-r530567/android_libc++/platform/arm/include/c++/v1"=-Iprebuilts/clang/host/linux-x86/clang-r530567/include/c++/v1" -Ibionic/libc/async_safe/include"-Isystem/logging/liblog/include"'-Ibionic/libc/system_properties/include"<-Isystem/core/property_service/libpropertyinfoparser/include"-isystem"bionic/libc/include"-isystem"bionic/libc/kernel/uapi/asm-arm"-isystem"bionic/libc/kernel/uapi"-isystem"bionic/libc/kernel/android/scsi"-isystem"bionic/libc/kernel/android/uapi"-target"armv7a-linux-androideabi10000"-DANDROID_STRICT"-fPIE"-Werror"-Wno-unused-parameter"-DGOOGLE_PROTOBUF_NO_RTTI"-Wimplicit-fallthrough"*-D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS"-Wno-gnu-include-next"-fvisibility-inlines-hidden"-mllvm"-enable-shrink-wrap=false"-std=gnu++20"	-fno-rtti"-Isystem/core/include"-Isystem/logging/liblog/include"-Isystem/media/audio/include"-Ihardware/libhardware/include"%-Ihardware/libhardware_legacy/include"-Ihardware/ril/include"-Iframeworks/native/include""-Iframeworks/native/opengl/include"-Iframeworks/av/include"-Werror=bool-operation" -Werror=format-insufficient-args"%-Werror=implicit-int-float-conversion"-Werror=int-in-bool-context"-Werror=int-to-pointer-cast"-Werror=pointer-to-int-cast"-Werror=xor-used-as-pow"-Wno-void-pointer-to-enum-cast"-Wno-void-pointer-to-int-cast"-Wno-pointer-to-int-cast"-Werror=fortify-source"-Wno-unused-variable"-Wno-missing-field-initializers"-Wno-packed-non-pod"-Werror=address-of-temporary"+-Werror=incompatible-function-pointer-types"-Werror=null-dereference"-Werror=return-type""-Wno-tautological-constant-compare"$-Wno-tautological-type-limit-compare""-Wno-implicit-int-float-conversion"!-Wno-tautological-overlap-compare"-Wno-deprecated-copy"-Wno-range-loop-construct""-Wno-zero-as-null-pointer-constant")-Wno-deprecated-anon-enum-enum-conversion"$-Wno-deprecated-enum-enum-conversion"-Wno-error=pessimizing-move"-Wno-non-c-typedef-for-linkage"-Wno-align-mismatch""-Wno-error=unused-but-set-variable"#-Wno-error=unused-but-set-parameter"-Wno-error=deprecated-builtins"-Wno-error=deprecated"&-Wno-deprecated-dynamic-exception-spec"$-Wno-error=enum-constexpr-conversion"-Wno-error=invalid-offsetof")-Wno-error=thread-safety-reference-return"-Wno-vla-cxx-extension"8build/make/tools/ide_query/prober_scripts/cpp/general.cc2Egenfiles_for_build/make/tools/ide_query/prober_scripts/cpp/general.cc:Ÿ@
+Egenfiles_for_build/make/tools/ide_query/prober_scripts/cpp/general.cc*Õ?
 ¶soong/.intermediates/build/make/tools/ide_query/prober_scripts/cpp/ide_query_proberscript_cc/android_arm_armv7-a-neon/gen/proto/build/make/tools/ide_query/prober_scripts/cpp/foo.pb.h™>// Generated by the protocol buffer compiler.  DO NOT EDIT!
 // source: build/make/tools/ide_query/prober_scripts/cpp/foo.proto
 
diff --git a/tools/ide_query/prober_scripts/jvm/Foo.java b/tools/ide_query/prober_scripts/jvm/Foo.java
new file mode 100644
index 0000000..a043f72
--- /dev/null
+++ b/tools/ide_query/prober_scripts/jvm/Foo.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package jvm;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+
+/** Foo class. */
+public final class Foo {
+
+  void testCompletion() {
+    ArrayList<Integer> list = new ArrayList<>();
+    System.out.println(list);
+
+    // ^
+
+    // step
+    // ; Test completion on the standard types.
+    // type("list.")
+    // completion.trigger()
+    // assert completion.items.filter(label="add.*")
+  }
+}
diff --git a/tools/ide_query/prober_scripts/jvm/suite.textpb b/tools/ide_query/prober_scripts/jvm/suite.textpb
new file mode 100644
index 0000000..460e08c
--- /dev/null
+++ b/tools/ide_query/prober_scripts/jvm/suite.textpb
@@ -0,0 +1,4 @@
+tests: {
+  name: "general"
+  scripts: "build/make/tools/ide_query/prober_scripts/jvm/Foo.java"
+}
diff --git a/tools/ide_query/prober_scripts/regen.sh b/tools/ide_query/prober_scripts/regen.sh
index 2edfe53..04a0264 100755
--- a/tools/ide_query/prober_scripts/regen.sh
+++ b/tools/ide_query/prober_scripts/regen.sh
@@ -21,13 +21,8 @@
 # ide_query.sh. The prober doesn't check-out the full source code, so it
 # can't run ide_query.sh itself.
 
-cd $(dirname $BASH_SOURCE)
-source $(pwd)/../../../shell_utils.sh
-require_top
-
 files_to_build=(
   build/make/tools/ide_query/prober_scripts/cpp/general.cc
 )
 
-cd ${TOP}
 build/make/tools/ide_query/ide_query.sh --lunch_target=aosp_arm-trunk_staging-eng ${files_to_build[@]} > build/make/tools/ide_query/prober_scripts/ide_query.out
diff --git a/tools/metadata/Android.bp b/tools/metadata/Android.bp
deleted file mode 100644
index 77d106d..0000000
--- a/tools/metadata/Android.bp
+++ /dev/null
@@ -1,16 +0,0 @@
-package {
-    default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-blueprint_go_binary {
-    name: "metadata",
-    deps: [
-            "soong-testing-test_spec_proto",
-            "soong-testing-code_metadata_proto",
-            "soong-testing-code_metadata_internal_proto",
-            "golang-protobuf-proto",
-        ],
-    srcs: [
-        "generator.go",
-    ]
-}
\ No newline at end of file
diff --git a/tools/metadata/OWNERS b/tools/metadata/OWNERS
deleted file mode 100644
index 03bcdf1..0000000
--- a/tools/metadata/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-dariofreni@google.com
-joeo@google.com
-ronish@google.com
-caditya@google.com
diff --git a/tools/metadata/generator.go b/tools/metadata/generator.go
deleted file mode 100644
index b7668be..0000000
--- a/tools/metadata/generator.go
+++ /dev/null
@@ -1,328 +0,0 @@
-package main
-
-import (
-	"flag"
-	"fmt"
-	"io"
-	"log"
-	"os"
-	"sort"
-	"strings"
-	"sync"
-
-	"android/soong/testing/code_metadata_internal_proto"
-	"android/soong/testing/code_metadata_proto"
-	"android/soong/testing/test_spec_proto"
-	"google.golang.org/protobuf/proto"
-)
-
-type keyToLocksMap struct {
-	locks sync.Map
-}
-
-func (kl *keyToLocksMap) GetLockForKey(key string) *sync.Mutex {
-	mutex, _ := kl.locks.LoadOrStore(key, &sync.Mutex{})
-	return mutex.(*sync.Mutex)
-}
-
-// Define a struct to hold the combination of team ID and multi-ownership flag for validation
-type sourceFileAttributes struct {
-	TeamID         string
-	MultiOwnership bool
-	Path           string
-}
-
-func getSortedKeys(syncMap *sync.Map) []string {
-	var allKeys []string
-	syncMap.Range(
-		func(key, _ interface{}) bool {
-			allKeys = append(allKeys, key.(string))
-			return true
-		},
-	)
-
-	sort.Strings(allKeys)
-	return allKeys
-}
-
-// writeProtoToFile marshals a protobuf message and writes it to a file
-func writeProtoToFile(outputFile string, message proto.Message) {
-	data, err := proto.Marshal(message)
-	if err != nil {
-		log.Fatal(err)
-	}
-	file, err := os.Create(outputFile)
-	if err != nil {
-		log.Fatal(err)
-	}
-	defer file.Close()
-
-	_, err = file.Write(data)
-	if err != nil {
-		log.Fatal(err)
-	}
-}
-
-func readFileToString(filePath string) string {
-	file, err := os.Open(filePath)
-	if err != nil {
-		log.Fatal(err)
-	}
-	defer file.Close()
-
-	data, err := io.ReadAll(file)
-	if err != nil {
-		log.Fatal(err)
-	}
-	return string(data)
-}
-
-func writeEmptyOutputProto(outputFile string, metadataRule string) {
-	file, err := os.Create(outputFile)
-	if err != nil {
-		log.Fatal(err)
-	}
-	var message proto.Message
-	if metadataRule == "test_spec" {
-		message = &test_spec_proto.TestSpec{}
-	} else if metadataRule == "code_metadata" {
-		message = &code_metadata_proto.CodeMetadata{}
-	}
-	data, err := proto.Marshal(message)
-	if err != nil {
-		log.Fatal(err)
-	}
-	defer file.Close()
-
-	_, err = file.Write([]byte(data))
-	if err != nil {
-		log.Fatal(err)
-	}
-}
-
-func processTestSpecProtobuf(
-	filePath string, ownershipMetadataMap *sync.Map, keyLocks *keyToLocksMap,
-	errCh chan error, wg *sync.WaitGroup,
-) {
-	defer wg.Done()
-
-	fileContent := strings.TrimRight(readFileToString(filePath), "\n")
-	testData := test_spec_proto.TestSpec{}
-	err := proto.Unmarshal([]byte(fileContent), &testData)
-	if err != nil {
-		errCh <- err
-		return
-	}
-
-	ownershipMetadata := testData.GetOwnershipMetadataList()
-	for _, metadata := range ownershipMetadata {
-		key := metadata.GetTargetName()
-		lock := keyLocks.GetLockForKey(key)
-		lock.Lock()
-
-		value, loaded := ownershipMetadataMap.LoadOrStore(
-			key, []*test_spec_proto.TestSpec_OwnershipMetadata{metadata},
-		)
-		if loaded {
-			existingMetadata := value.([]*test_spec_proto.TestSpec_OwnershipMetadata)
-			isDuplicate := false
-			for _, existing := range existingMetadata {
-				if metadata.GetTrendyTeamId() != existing.GetTrendyTeamId() {
-					errCh <- fmt.Errorf(
-						"Conflicting trendy team IDs found for %s at:\n%s with teamId"+
-							": %s,\n%s with teamId: %s",
-						key,
-						metadata.GetPath(), metadata.GetTrendyTeamId(), existing.GetPath(),
-						existing.GetTrendyTeamId(),
-					)
-
-					lock.Unlock()
-					return
-				}
-				if metadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && metadata.GetPath() == existing.GetPath() {
-					isDuplicate = true
-					break
-				}
-			}
-			if !isDuplicate {
-				existingMetadata = append(existingMetadata, metadata)
-				ownershipMetadataMap.Store(key, existingMetadata)
-			}
-		}
-
-		lock.Unlock()
-	}
-}
-
-// processCodeMetadataProtobuf processes CodeMetadata protobuf files
-func processCodeMetadataProtobuf(
-	filePath string, ownershipMetadataMap *sync.Map, sourceFileMetadataMap *sync.Map, keyLocks *keyToLocksMap,
-	errCh chan error, wg *sync.WaitGroup,
-) {
-	defer wg.Done()
-
-	fileContent := strings.TrimRight(readFileToString(filePath), "\n")
-	internalCodeData := code_metadata_internal_proto.CodeMetadataInternal{}
-	err := proto.Unmarshal([]byte(fileContent), &internalCodeData)
-	if err != nil {
-		errCh <- err
-		return
-	}
-
-	// Process each TargetOwnership entry
-	for _, internalMetadata := range internalCodeData.GetTargetOwnershipList() {
-		key := internalMetadata.GetTargetName()
-		lock := keyLocks.GetLockForKey(key)
-		lock.Lock()
-
-		for _, srcFile := range internalMetadata.GetSourceFiles() {
-			srcFileKey := srcFile
-			srcFileLock := keyLocks.GetLockForKey(srcFileKey)
-			srcFileLock.Lock()
-			attributes := sourceFileAttributes{
-				TeamID:         internalMetadata.GetTrendyTeamId(),
-				MultiOwnership: internalMetadata.GetMultiOwnership(),
-				Path:           internalMetadata.GetPath(),
-			}
-
-			existingAttributes, exists := sourceFileMetadataMap.Load(srcFileKey)
-			if exists {
-				existing := existingAttributes.(sourceFileAttributes)
-				if attributes.TeamID != existing.TeamID && (!attributes.MultiOwnership || !existing.MultiOwnership) {
-					errCh <- fmt.Errorf(
-						"Conflict found for source file %s covered at %s with team ID: %s. Existing team ID: %s and path: %s."+
-							" If multi-ownership is required, multiOwnership should be set to true in all test_spec modules using this target. "+
-							"Multiple-ownership in general is discouraged though as it make infrastructure around android relying on this information pick up a random value when it needs only one.",
-						srcFile, internalMetadata.GetPath(), attributes.TeamID, existing.TeamID, existing.Path,
-					)
-					srcFileLock.Unlock()
-					lock.Unlock()
-					return
-				}
-			} else {
-				// Store the metadata if no conflict
-				sourceFileMetadataMap.Store(srcFileKey, attributes)
-			}
-			srcFileLock.Unlock()
-		}
-
-		value, loaded := ownershipMetadataMap.LoadOrStore(
-			key, []*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership{internalMetadata},
-		)
-		if loaded {
-			existingMetadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
-			isDuplicate := false
-			for _, existing := range existingMetadata {
-				if internalMetadata.GetTrendyTeamId() == existing.GetTrendyTeamId() && internalMetadata.GetPath() == existing.GetPath() {
-					isDuplicate = true
-					break
-				}
-			}
-			if !isDuplicate {
-				existingMetadata = append(existingMetadata, internalMetadata)
-				ownershipMetadataMap.Store(key, existingMetadata)
-			}
-		}
-
-		lock.Unlock()
-	}
-}
-
-func main() {
-	inputFile := flag.String("inputFile", "", "Input file path")
-	outputFile := flag.String("outputFile", "", "Output file path")
-	rule := flag.String(
-		"rule", "", "Metadata rule (Hint: test_spec or code_metadata)",
-	)
-	flag.Parse()
-
-	if *inputFile == "" || *outputFile == "" || *rule == "" {
-		fmt.Println("Usage: metadata -rule <rule> -inputFile <input file path> -outputFile <output file path>")
-		os.Exit(1)
-	}
-
-	inputFileData := strings.TrimRight(readFileToString(*inputFile), "\n")
-	filePaths := strings.Split(inputFileData, " ")
-	if len(filePaths) == 1 && filePaths[0] == "" {
-		writeEmptyOutputProto(*outputFile, *rule)
-		return
-	}
-	ownershipMetadataMap := &sync.Map{}
-	keyLocks := &keyToLocksMap{}
-	errCh := make(chan error, len(filePaths))
-	var wg sync.WaitGroup
-
-	switch *rule {
-	case "test_spec":
-		for _, filePath := range filePaths {
-			wg.Add(1)
-			go processTestSpecProtobuf(
-				filePath, ownershipMetadataMap, keyLocks, errCh, &wg,
-			)
-		}
-
-		wg.Wait()
-		close(errCh)
-
-		for err := range errCh {
-			log.Fatal(err)
-		}
-
-		allKeys := getSortedKeys(ownershipMetadataMap)
-		var allMetadata []*test_spec_proto.TestSpec_OwnershipMetadata
-
-		for _, key := range allKeys {
-			value, _ := ownershipMetadataMap.Load(key)
-			metadataList := value.([]*test_spec_proto.TestSpec_OwnershipMetadata)
-			allMetadata = append(allMetadata, metadataList...)
-		}
-
-		testSpec := &test_spec_proto.TestSpec{
-			OwnershipMetadataList: allMetadata,
-		}
-		writeProtoToFile(*outputFile, testSpec)
-		break
-	case "code_metadata":
-		sourceFileMetadataMap := &sync.Map{}
-		for _, filePath := range filePaths {
-			wg.Add(1)
-			go processCodeMetadataProtobuf(
-				filePath, ownershipMetadataMap, sourceFileMetadataMap, keyLocks, errCh, &wg,
-			)
-		}
-
-		wg.Wait()
-		close(errCh)
-
-		for err := range errCh {
-			log.Fatal(err)
-		}
-
-		sortedKeys := getSortedKeys(ownershipMetadataMap)
-		allMetadata := make([]*code_metadata_proto.CodeMetadata_TargetOwnership, 0)
-		for _, key := range sortedKeys {
-			value, _ := ownershipMetadataMap.Load(key)
-			metadata := value.([]*code_metadata_internal_proto.CodeMetadataInternal_TargetOwnership)
-			for _, m := range metadata {
-				targetName := m.GetTargetName()
-				path := m.GetPath()
-				trendyTeamId := m.GetTrendyTeamId()
-
-				allMetadata = append(allMetadata, &code_metadata_proto.CodeMetadata_TargetOwnership{
-					TargetName:   &targetName,
-					Path:         &path,
-					TrendyTeamId: &trendyTeamId,
-					SourceFiles:  m.GetSourceFiles(),
-				})
-			}
-		}
-
-		finalMetadata := &code_metadata_proto.CodeMetadata{
-			TargetOwnershipList: allMetadata,
-		}
-		writeProtoToFile(*outputFile, finalMetadata)
-		break
-	default:
-		log.Fatalf("No specific processing implemented for rule '%s'.\n", *rule)
-	}
-}
diff --git a/tools/metadata/go.mod b/tools/metadata/go.mod
deleted file mode 100644
index e9d04b1..0000000
--- a/tools/metadata/go.mod
+++ /dev/null
@@ -1,7 +0,0 @@
-module android/soong/tools/metadata
-
-require google.golang.org/protobuf v0.0.0
-
-replace google.golang.org/protobuf v0.0.0 => ../../../external/golang-protobuf
-
-go 1.18
\ No newline at end of file
diff --git a/tools/metadata/go.work b/tools/metadata/go.work
deleted file mode 100644
index f2cdf8e..0000000
--- a/tools/metadata/go.work
+++ /dev/null
@@ -1,11 +0,0 @@
-go 1.18
-
-use (
-	.
-	../../../../external/golang-protobuf
-	../../../soong/testing/test_spec_proto
-	../../../soong/testing/code_metadata_proto
-	../../../soong/testing/code_metadata_proto_internal
-)
-
-replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
diff --git a/tools/metadata/testdata/emptyInputFile.txt b/tools/metadata/testdata/emptyInputFile.txt
deleted file mode 100644
index 8b13789..0000000
--- a/tools/metadata/testdata/emptyInputFile.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/tools/metadata/testdata/expectedCodeMetadataOutput.txt b/tools/metadata/testdata/expectedCodeMetadataOutput.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/expectedCodeMetadataOutput.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
- 
-bar
-Android.bp12346"b.java
- 
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/expectedOutputFile.txt b/tools/metadata/testdata/expectedOutputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/expectedOutputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file
diff --git a/tools/metadata/testdata/file1.txt b/tools/metadata/testdata/file1.txt
deleted file mode 100644
index 81beed0..0000000
--- a/tools/metadata/testdata/file1.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
diff --git a/tools/metadata/testdata/file2.txt b/tools/metadata/testdata/file2.txt
deleted file mode 100644
index 32a753f..0000000
--- a/tools/metadata/testdata/file2.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
diff --git a/tools/metadata/testdata/file3.txt b/tools/metadata/testdata/file3.txt
deleted file mode 100644
index 81beed0..0000000
--- a/tools/metadata/testdata/file3.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
diff --git a/tools/metadata/testdata/file4.txt b/tools/metadata/testdata/file4.txt
deleted file mode 100644
index 6a75900..0000000
--- a/tools/metadata/testdata/file4.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-one
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
diff --git a/tools/metadata/testdata/file5.txt b/tools/metadata/testdata/file5.txt
deleted file mode 100644
index d8de064..0000000
--- a/tools/metadata/testdata/file5.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
- 
-foo
-Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file6.txt b/tools/metadata/testdata/file6.txt
deleted file mode 100644
index 9c7cdcd..0000000
--- a/tools/metadata/testdata/file6.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
- 
-bar
-Android.bp12346"b.java
diff --git a/tools/metadata/testdata/file7.txt b/tools/metadata/testdata/file7.txt
deleted file mode 100644
index d8de064..0000000
--- a/tools/metadata/testdata/file7.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
- 
-foo
-Android.bp12345"a.java
diff --git a/tools/metadata/testdata/file8.txt b/tools/metadata/testdata/file8.txt
deleted file mode 100644
index a931690..0000000
--- a/tools/metadata/testdata/file8.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-
- 
-foo
-Android.gp12346"a.java
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutput.txt b/tools/metadata/testdata/generatedCodeMetadataOutput.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/generatedCodeMetadataOutput.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
- 
-bar
-Android.bp12346"b.java
- 
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt b/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
deleted file mode 100644
index 755cf40..0000000
--- a/tools/metadata/testdata/generatedCodeMetadataOutputFile.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-
- 
-bar
-Android.bp12346"b.java
- 
-foo
-Android.bp12345"a.java
\ No newline at end of file
diff --git a/tools/metadata/testdata/generatedEmptyOutputFile.txt b/tools/metadata/testdata/generatedEmptyOutputFile.txt
deleted file mode 100644
index e69de29..0000000
--- a/tools/metadata/testdata/generatedEmptyOutputFile.txt
+++ /dev/null
diff --git a/tools/metadata/testdata/generatedOutputFile.txt b/tools/metadata/testdata/generatedOutputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/generatedOutputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadata.txt b/tools/metadata/testdata/inputCodeMetadata.txt
deleted file mode 100644
index 7a81b7d..0000000
--- a/tools/metadata/testdata/inputCodeMetadata.txt
+++ /dev/null
@@ -1 +0,0 @@
-file5.txt file6.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputCodeMetadataNegative.txt b/tools/metadata/testdata/inputCodeMetadataNegative.txt
deleted file mode 100644
index 26668e4..0000000
--- a/tools/metadata/testdata/inputCodeMetadataNegative.txt
+++ /dev/null
@@ -1 +0,0 @@
-file7.txt file8.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFiles.txt b/tools/metadata/testdata/inputFiles.txt
deleted file mode 100644
index e44bc94..0000000
--- a/tools/metadata/testdata/inputFiles.txt
+++ /dev/null
@@ -1 +0,0 @@
-file1.txt file2.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/inputFilesNegativeCase.txt b/tools/metadata/testdata/inputFilesNegativeCase.txt
deleted file mode 100644
index a37aa3f..0000000
--- a/tools/metadata/testdata/inputFilesNegativeCase.txt
+++ /dev/null
@@ -1 +0,0 @@
-file3.txt file4.txt
\ No newline at end of file
diff --git a/tools/metadata/testdata/metadata_test.go b/tools/metadata/testdata/metadata_test.go
deleted file mode 100644
index 314add3..0000000
--- a/tools/metadata/testdata/metadata_test.go
+++ /dev/null
@@ -1,119 +0,0 @@
-package main
-
-import (
-	"fmt"
-	"io/ioutil"
-	"os/exec"
-	"strings"
-	"testing"
-)
-
-func TestMetadata(t *testing.T) {
-	cmd := exec.Command(
-		"metadata", "-rule", "test_spec", "-inputFile", "./inputFiles.txt", "-outputFile",
-		"./generatedOutputFile.txt",
-	)
-	stderr, err := cmd.CombinedOutput()
-	if err != nil {
-		t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
-	}
-
-	// Read the contents of the expected output file
-	expectedOutput, err := ioutil.ReadFile("./expectedOutputFile.txt")
-	if err != nil {
-		t.Fatalf("Error reading expected output file: %s", err)
-	}
-
-	// Read the contents of the generated output file
-	generatedOutput, err := ioutil.ReadFile("./generatedOutputFile.txt")
-	if err != nil {
-		t.Fatalf("Error reading generated output file: %s", err)
-	}
-
-	fmt.Println()
-
-	// Compare the contents
-	if string(expectedOutput) != string(generatedOutput) {
-		t.Errorf("Generated file contents do not match the expected output")
-	}
-}
-
-func TestMetadataNegativeCase(t *testing.T) {
-	cmd := exec.Command(
-		"metadata", "-rule", "test_spec", "-inputFile", "./inputFilesNegativeCase.txt", "-outputFile",
-		"./generatedOutputFileNegativeCase.txt",
-	)
-	stderr, err := cmd.CombinedOutput()
-	if err == nil {
-		t.Fatalf(
-			"Expected an error, but the metadata command executed successfully. Output: %s",
-			stderr,
-		)
-	}
-
-	expectedError := "Conflicting trendy team IDs found for java-test-module" +
-		"-name-one at:\nAndroid.bp with teamId: 12346," +
-		"\nAndroid.bp with teamId: 12345"
-	if !strings.Contains(
-		strings.TrimSpace(string(stderr)), strings.TrimSpace(expectedError),
-	) {
-		t.Errorf(
-			"Unexpected error message. Expected to contain: %s, Got: %s",
-			expectedError, stderr,
-		)
-	}
-}
-
-func TestEmptyInputFile(t *testing.T) {
-	cmd := exec.Command(
-		"metadata", "-rule", "test_spec", "-inputFile", "./emptyInputFile.txt", "-outputFile",
-		"./generatedEmptyOutputFile.txt",
-	)
-	stderr, err := cmd.CombinedOutput()
-	if err != nil {
-		t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
-	}
-
-	// Read the contents of the generated output file
-	generatedOutput, err := ioutil.ReadFile("./generatedEmptyOutputFile.txt")
-	if err != nil {
-		t.Fatalf("Error reading generated output file: %s", err)
-	}
-
-	fmt.Println()
-
-	// Compare the contents
-	if string(generatedOutput) != "\n" {
-		t.Errorf("Generated file contents do not match the expected output")
-	}
-}
-
-func TestCodeMetadata(t *testing.T) {
-	cmd := exec.Command(
-		"metadata", "-rule", "code_metadata", "-inputFile", "./inputCodeMetadata.txt", "-outputFile",
-		"./generatedCodeMetadataOutputFile.txt",
-	)
-	stderr, err := cmd.CombinedOutput()
-	if err != nil {
-		t.Fatalf("Error running metadata command: %s. Error: %v", stderr, err)
-	}
-
-	// Read the contents of the expected output file
-	expectedOutput, err := ioutil.ReadFile("./expectedCodeMetadataOutput.txt")
-	if err != nil {
-		t.Fatalf("Error reading expected output file: %s", err)
-	}
-
-	// Read the contents of the generated output file
-	generatedOutput, err := ioutil.ReadFile("./generatedCodeMetadataOutputFile.txt")
-	if err != nil {
-		t.Fatalf("Error reading generated output file: %s", err)
-	}
-
-	fmt.Println()
-
-	// Compare the contents
-	if string(expectedOutput) != string(generatedOutput) {
-		t.Errorf("Generated file contents do not match the expected output")
-	}
-}
diff --git a/tools/metadata/testdata/outputFile.txt b/tools/metadata/testdata/outputFile.txt
deleted file mode 100644
index b0d382f..0000000
--- a/tools/metadata/testdata/outputFile.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-
-.
-java-test-module-name-one
-Android.bp12345
-.
-java-test-module-name-six
-Android.bp12346
-.
-java-test-module-name-six
-Aqwerty.bp12346
-.
-java-test-module-name-six
-Apoiuyt.bp12346
-.
-java-test-module-name-two
-Android.bp12345
-.
-java-test-module-name-two
-Asdfghj.bp12345
-.
-java-test-module-name-two
-Azxcvbn.bp12345
\ No newline at end of file
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 9b134f2..e371b23 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -96,6 +96,7 @@
     ],
     libs: [
         "apex_manifest",
+        "releasetools_apex_utils",
         "releasetools_common",
     ],
     required: [
@@ -107,7 +108,7 @@
 python_library_host {
     name: "ota_metadata_proto",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     proto: {
         canonical_path_from_root: false,
@@ -117,7 +118,7 @@
 cc_library_static {
     name: "ota_metadata_proto_cc",
     srcs: [
-       "ota_metadata.proto",
+        "ota_metadata.proto",
     ],
     host_supported: true,
     recovery_available: true,
@@ -144,7 +145,7 @@
             static_libs: ["libprotobuf-java-nano"],
         },
     },
-    visibility: ["//frameworks/base:__subpackages__"]
+    visibility: ["//frameworks/base:__subpackages__"],
 }
 
 python_defaults {
@@ -367,6 +368,9 @@
     libs: [
         "ota_utils_lib",
     ],
+    required: [
+        "signapk",
+    ],
 }
 
 python_binary_host {
@@ -436,7 +440,7 @@
     name: "check_target_files_vintf",
     defaults: [
         "releasetools_binary_defaults",
-        "releasetools_check_target_files_vintf_defaults"
+        "releasetools_check_target_files_vintf_defaults",
     ],
 }
 
@@ -546,13 +550,15 @@
     defaults: ["releasetools_binary_defaults"],
     srcs: [
         "sign_target_files_apks.py",
-        "payload_signer.py",
-        "ota_signing_utils.py",
+        "ota_from_raw_img.py",
     ],
     libs: [
         "releasetools_add_img_to_target_files",
         "releasetools_apex_utils",
         "releasetools_common",
+        "ota_metadata_proto",
+        "ota_utils_lib",
+        "update_payload",
     ],
 }
 
@@ -631,8 +637,10 @@
     ],
     data: [
         "testdata/**/*",
+    ],
+    device_common_data: [
         ":com.android.apex.compressed.v1",
-        ":com.android.apex.vendor.foo.with_vintf"
+        ":com.android.apex.vendor.foo.with_vintf",
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index b39a82c..30a6acc 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -464,6 +464,7 @@
   dtbo_prebuilt_path = os.path.join(
       OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img")
   assert os.path.exists(dtbo_prebuilt_path)
+  os.makedirs(os.path.dirname(img.name), exist_ok=True)
   shutil.copy(dtbo_prebuilt_path, img.name)
 
   # AVB-sign the image as needed.
@@ -1099,7 +1100,7 @@
     vbmeta_partitions = common.AVB_PARTITIONS[:] + tuple(avb_custom_partitions)
 
     vbmeta_system = OPTIONS.info_dict.get("avb_vbmeta_system", "").strip()
-    if vbmeta_system:
+    if vbmeta_system and set(vbmeta_system.split()).intersection(partitions):
       banner("vbmeta_system")
       partitions["vbmeta_system"] = AddVBMeta(
           output_zip, partitions, "vbmeta_system", vbmeta_system.split())
@@ -1109,7 +1110,7 @@
       vbmeta_partitions.append("vbmeta_system")
 
     vbmeta_vendor = OPTIONS.info_dict.get("avb_vbmeta_vendor", "").strip()
-    if vbmeta_vendor:
+    if vbmeta_vendor and set(vbmeta_vendor.split()).intersection(partitions):
       banner("vbmeta_vendor")
       partitions["vbmeta_vendor"] = AddVBMeta(
           output_zip, partitions, "vbmeta_vendor", vbmeta_vendor.split())
@@ -1136,7 +1137,7 @@
             if item not in included_partitions]
         vbmeta_partitions.append(partition_name)
 
-    if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true":
+    if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true" and set(vbmeta_partitions).intersection(partitions):
       banner("vbmeta")
       AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
 
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3abef3b..54df955 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -36,6 +36,8 @@
 
 APEX_PUBKEY = 'apex_pubkey'
 
+# Partitions supporting APEXes
+PARTITIONS = ['system', 'system_ext', 'product', 'vendor', 'odm']
 
 class ApexInfoError(Exception):
   """An Exception raised during Apex Information command."""
@@ -550,7 +552,7 @@
   if not isinstance(input_file, str):
     raise RuntimeError("must pass filepath to target-files zip or directory")
   apex_infos = []
-  for partition in ['system', 'system_ext', 'product', 'vendor']:
+  for partition in PARTITIONS:
     apex_infos.extend(GetApexInfoForPartition(input_file, partition))
   return apex_infos
 
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index b8dcd84..dc123ef 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -30,6 +30,7 @@
 import sys
 import zipfile
 
+import apex_utils
 import common
 from apex_manifest import ParseApexManifest
 
@@ -229,7 +230,7 @@
   apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host')
   cmd = [apex_host, '--tool_path', OPTIONS.search_path]
   cmd += ['--apex_path', dirmap['/apex']]
-  for p in ['system', 'system_ext', 'product', 'vendor']:
+  for p in apex_utils.PARTITIONS:
     if '/' + p in dirmap:
       cmd += ['--' + p + '_path', dirmap['/' + p]]
   common.RunAndCheckOutput(cmd)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 4834834..f04dfb7 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -898,7 +898,7 @@
       if key.endswith("selinux_fc"):
         fc_basename = os.path.basename(d[key])
         fc_config = os.path.join(input_file, "META", fc_basename)
-        assert os.path.exists(fc_config)
+        assert os.path.exists(fc_config), "{} does not exist".format(fc_config)
 
         d[key] = fc_config
 
@@ -907,9 +907,10 @@
     d["root_fs_config"] = os.path.join(
         input_file, "META", "root_filesystem_config.txt")
 
+    partitions = ["system", "vendor", "system_ext", "product", "odm",
+                  "vendor_dlkm", "odm_dlkm", "system_dlkm"]
     # Redirect {partition}_base_fs_file for each of the named partitions.
-    for part_name in ["system", "vendor", "system_ext", "product", "odm",
-                      "vendor_dlkm", "odm_dlkm", "system_dlkm"]:
+    for part_name in partitions:
       key_name = part_name + "_base_fs_file"
       if key_name not in d:
         continue
@@ -922,6 +923,25 @@
             "Failed to find %s base fs file: %s", part_name, base_fs_file)
         del d[key_name]
 
+    # Redirecting helper for optional properties like erofs_compress_hints
+    def redirect_file(prop, filename):
+      if prop not in d:
+        return
+      config_file = os.path.join(input_file, "META/" + filename)
+      if os.path.exists(config_file):
+        d[prop] = config_file
+      else:
+        logger.warning(
+            "Failed to find %s fro %s", filename, prop)
+        del d[prop]
+
+    # Redirect erofs_[default_]compress_hints files
+    redirect_file("erofs_default_compress_hints",
+                  "erofs_default_compress_hints.txt")
+    for part in partitions:
+      redirect_file(part + "_erofs_compress_hints",
+                    part + "_erofs_compress_hints.txt")
+
   def makeint(key):
     if key in d:
       d[key] = int(d[key], 0)
@@ -2434,12 +2454,23 @@
         "Failed to obtain minSdkVersion for {}: aapt2 return code {}:\n{}\n{}".format(
             apk_name, proc.returncode, stdoutdata, stderrdata))
 
+  is_split_apk = False
   for line in stdoutdata.split("\n"):
+    # See b/353837347 , split APKs do not have sdk version defined,
+    # so we default to 21 as split APKs are only supported since SDK
+    # 21.
+    if (re.search(r"split=[\"'].*[\"']", line)):
+      is_split_apk = True
     # Due to ag/24161708, looking for lines such as minSdkVersion:'23',minSdkVersion:'M'
     # or sdkVersion:'23', sdkVersion:'M'.
     m = re.match(r'(?:minSdkVersion|sdkVersion):\'([^\']*)\'', line)
     if m:
       return m.group(1)
+  if is_split_apk:
+    logger.info("%s is a split APK, it does not have minimum SDK version"
+                " defined. Defaulting to 21 because split APK isn't supported"
+                " before that.", apk_name)
+    return 21
   raise ExternalError("No minSdkVersion returned by aapt2 for apk: {}".format(apk_name))
 
 
@@ -2977,7 +3008,7 @@
     zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
-def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
+def ZipWriteStr(zip_file: zipfile.ZipFile, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
@@ -3207,7 +3238,9 @@
     return t
 
   def WriteToDir(self, d):
-    with open(os.path.join(d, self.name), "wb") as fp:
+    output_path = os.path.join(d, self.name)
+    os.makedirs(os.path.dirname(output_path), exist_ok=True)
+    with open(output_path, "wb") as fp:
       fp.write(self.data)
 
   def AddToZip(self, z, compression=None):
diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py
index 03b44f1..3b9374a 100644
--- a/tools/releasetools/ota_from_raw_img.py
+++ b/tools/releasetools/ota_from_raw_img.py
@@ -105,9 +105,6 @@
 
     if args.package_key:
       logger.info("Signing payload...")
-      # TODO: remove OPTIONS when no longer used as fallback in payload_signer
-      common.OPTIONS.payload_signer_args = None
-      common.OPTIONS.payload_signer_maximum_signature_size = None
       signer = PayloadSigner(args.package_key, args.private_key_suffix,
                              key_passwords[args.package_key],
                              payload_signer=args.payload_signer,
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 985cd56..6446e1f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -264,6 +264,10 @@
 
   --compression_factor
       Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k
+
+  --full_ota_partitions
+      Specify list of partitions should be updated in full OTA fashion, even if
+      an incremental OTA is about to be generated
 """
 
 from __future__ import print_function
@@ -283,7 +287,7 @@
 import ota_utils
 import payload_signer
 from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR)
 from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
 from non_ab_ota import GenerateNonAbOtaPackage
@@ -337,6 +341,7 @@
 OPTIONS.max_threads = None
 OPTIONS.vabc_cow_version = None
 OPTIONS.compression_factor = None
+OPTIONS.full_ota_partitions = None
 
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -892,6 +897,14 @@
 
   if source_file is not None:
     source_file = ExtractTargetFiles(source_file)
+    if OPTIONS.full_ota_partitions:
+      for partition in OPTIONS.full_ota_partitions:
+        for subdir in TARGET_FILES_IMAGES_SUBDIR:
+          image_path = os.path.join(source_file, subdir, partition + ".img")
+          if os.path.exists(image_path):
+            logger.info(
+                "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition)
+            os.remove(image_path)
     assert "ab_partitions" in OPTIONS.source_info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -1193,7 +1206,7 @@
 
 def main(argv):
 
-  def option_handler(o, a):
+  def option_handler(o, a: str):
     if o in ("-i", "--incremental_from"):
       OPTIONS.incremental_source = a
     elif o == "--full_radio":
@@ -1320,6 +1333,9 @@
       else:
         raise ValueError("Cannot parse value %r for option %r - only "
                          "integers are allowed." % (a, o))
+    elif o == "--full_ota_partitions":
+      OPTIONS.full_ota_partitions = set(
+          a.strip().strip("\"").strip("'").split(","))
     else:
       return False
     return True
@@ -1370,6 +1386,7 @@
                                  "max_threads=",
                                  "vabc_cow_version=",
                                  "compression_factor=",
+                                 "full_ota_partitions=",
                              ], extra_option_handler=[option_handler, payload_signer.signer_options])
   common.InitLogging()
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index b8f848f..4ad97e0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -184,14 +184,17 @@
 import shutil
 import stat
 import sys
+import shlex
 import tempfile
 import zipfile
 from xml.etree import ElementTree
 
 import add_img_to_target_files
+import ota_from_raw_img
 import apex_utils
 import common
 import payload_signer
+import update_payload
 from payload_signer import SignOtaPackage, PAYLOAD_BIN
 
 
@@ -221,6 +224,7 @@
 OPTIONS.allow_gsi_debug_sepolicy = False
 OPTIONS.override_apk_keys = None
 OPTIONS.override_apex_keys = None
+OPTIONS.input_tmp = None
 
 
 AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -579,7 +583,104 @@
         filename.endswith("/prop.default")
 
 
-def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info,
+def GetOtaSigningArgs():
+  args = []
+  if OPTIONS.package_key:
+    args.extend(["--package_key", OPTIONS.package_key])
+  if OPTIONS.payload_signer:
+    args.extend(["--payload_signer=" + OPTIONS.payload_signer])
+  if OPTIONS.payload_signer_args:
+    args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)])
+  if OPTIONS.search_path:
+    args.extend(["--search_path", OPTIONS.search_path])
+  if OPTIONS.payload_signer_maximum_signature_size:
+    args.extend(["--payload_signer_maximum_signature_size",
+                OPTIONS.payload_signer_maximum_signature_size])
+  if OPTIONS.private_key_suffix:
+    args.extend(["--private_key_suffix", OPTIONS.private_key_suffix])
+  return args
+
+
+def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info):
+  """Re-generate boot and dtbo partitions using new signing configuration"""
+  files_to_unzip = [
+      "PREBUILT_IMAGES/*", "BOOTABLE_IMAGES/*.img", "*/boot_16k.img", "*/dtbo_16k.img"]
+  if OPTIONS.input_tmp is None:
+    OPTIONS.input_tmp = common.UnzipTemp(input_tf_zip.filename, files_to_unzip)
+  else:
+    common.UnzipToDir(input_tf_zip.filename, OPTIONS.input_tmp, files_to_unzip)
+  unzip_dir = OPTIONS.input_tmp
+  os.makedirs(os.path.join(unzip_dir, "IMAGES"), exist_ok=True)
+
+  boot_image = common.GetBootableImage(
+      "IMAGES/boot.img", "boot.img", unzip_dir, "BOOT", misc_info)
+  if boot_image:
+    boot_image.WriteToDir(unzip_dir)
+    boot_image = os.path.join(unzip_dir, boot_image.name)
+    common.ZipWrite(output_tf_zip, boot_image, "IMAGES/boot.img",
+                    compress_type=zipfile.ZIP_STORED)
+  if misc_info.get("has_dtbo") == "true":
+    add_img_to_target_files.AddDtbo(output_tf_zip)
+  return unzip_dir
+
+
+def RegenerateBootOTA(input_tf_zip: zipfile.ZipFile, filename, input_ota):
+  with input_tf_zip.open(filename, "r") as in_fp:
+    payload = update_payload.Payload(in_fp)
+  is_incremental = any([part.HasField('old_partition_info')
+                        for part in payload.manifest.partitions])
+  is_boot_ota = filename.startswith(
+      "VENDOR/boot_otas/") or filename.startswith("SYSTEM/boot_otas/")
+  if not is_boot_ota:
+    return
+  is_4k_boot_ota = filename in [
+      "VENDOR/boot_otas/boot_ota_4k.zip", "SYSTEM/boot_otas/boot_ota_4k.zip"]
+  # Only 4K boot image is re-generated, so if 16K boot ota isn't incremental,
+  # we do not need to re-generate
+  if not is_4k_boot_ota and not is_incremental:
+    return
+
+  timestamp = str(payload.manifest.max_timestamp)
+  partitions = [part.partition_name for part in payload.manifest.partitions]
+  unzip_dir = OPTIONS.input_tmp
+  signed_boot_image = os.path.join(unzip_dir, "IMAGES", "boot.img")
+  if not os.path.exists(signed_boot_image):
+    logger.warn("Need to re-generate boot OTA {} but failed to get signed boot image. 16K dev option will be impacted, after rolling back to 4K user would need to sideload/flash their device to continue receiving OTAs.")
+    return
+  signed_dtbo_image = os.path.join(unzip_dir, "IMAGES", "dtbo.img")
+  if "dtbo" in partitions and not os.path.exists(signed_dtbo_image):
+    raise ValueError(
+        "Boot OTA {} has dtbo partition, but no dtbo image found in target files.".format(filename))
+  if is_incremental:
+    signed_16k_boot_image = os.path.join(
+        unzip_dir, "IMAGES", "boot_16k.img")
+    signed_16k_dtbo_image = os.path.join(
+        unzip_dir, "IMAGES", "dtbo_16k.img")
+    if is_4k_boot_ota:
+      if os.path.exists(signed_16k_boot_image):
+        signed_boot_image = signed_16k_boot_image + ":" + signed_boot_image
+      if os.path.exists(signed_16k_dtbo_image):
+        signed_dtbo_image = signed_16k_dtbo_image + ":" + signed_dtbo_image
+    else:
+      if os.path.exists(signed_16k_boot_image):
+        signed_boot_image += ":" + signed_16k_boot_image
+      if os.path.exists(signed_16k_dtbo_image):
+        signed_dtbo_image += ":" + signed_16k_dtbo_image
+
+  args = ["ota_from_raw_img",
+          "--max_timestamp", timestamp, "--output", input_ota.name]
+  args.extend(GetOtaSigningArgs())
+  if "dtbo" in partitions:
+    args.extend(["--partition_name", "boot,dtbo",
+                signed_boot_image, signed_dtbo_image])
+  else:
+    args.extend(["--partition_name", "boot", signed_boot_image])
+  logger.info(
+      "Re-generating boot OTA {} using cmd {}".format(filename, args))
+  ota_from_raw_img.main(args)
+
+
+def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
                        compressed_extension):
@@ -593,6 +694,16 @@
     # Sets this to zero for targets without APK files.
     maxsize = 0
 
+  # Replace the AVB signing keys, if any.
+  ReplaceAvbSigningKeys(misc_info)
+  OPTIONS.info_dict = misc_info
+
+  # Rewrite the props in AVB signing args.
+  if misc_info.get('avb_enable') == 'true':
+    RewriteAvbProps(misc_info)
+
+  RegenerateKernelPartitions(input_tf_zip, output_tf_zip, misc_info)
+
   for info in input_tf_zip.infolist():
     filename = info.filename
     if filename.startswith("IMAGES/"):
@@ -603,10 +714,10 @@
     if filename.startswith("OTA/") and filename.endswith(".img"):
       continue
 
-    data = input_tf_zip.read(filename)
-    out_info = copy.copy(info)
     (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo(
         filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix)
+    data = input_tf_zip.read(filename)
+    out_info = copy.copy(info)
 
     if is_apk and should_be_skipped:
       # Copy skipped APKs verbatim.
@@ -670,9 +781,8 @@
     elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename):
       logger.info("Re-signing OTA package {}".format(filename))
       with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota:
-        with input_tf_zip.open(filename, "r") as in_fp:
-          shutil.copyfileobj(in_fp, input_ota)
-          input_ota.flush()
+        RegenerateBootOTA(input_tf_zip, filename, input_ota)
+
         SignOtaPackage(input_ota.name, output_ota.name)
         common.ZipWrite(output_tf_zip, output_ota.name, filename,
                         compress_type=zipfile.ZIP_STORED)
@@ -811,17 +921,18 @@
         common.ZipWrite(output_tf_zip, image.name, filename)
     # A non-APK file; copy it verbatim.
     else:
-      common.ZipWriteStr(output_tf_zip, out_info, data)
+      try:
+        entry = output_tf_zip.getinfo(filename)
+        if output_tf_zip.read(entry) != data:
+          logger.warn(
+              "Output zip contains duplicate entries for %s with different contents", filename)
+        continue
+      except KeyError:
+        common.ZipWriteStr(output_tf_zip, out_info, data)
 
   if OPTIONS.replace_ota_keys:
     ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
 
-  # Replace the AVB signing keys, if any.
-  ReplaceAvbSigningKeys(misc_info)
-
-  # Rewrite the props in AVB signing args.
-  if misc_info.get('avb_enable') == 'true':
-    RewriteAvbProps(misc_info)
 
   # Write back misc_info with the latest values.
   ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
@@ -1066,9 +1177,9 @@
   common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
 
 
-def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
+def ReplaceOtaKeys(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info):
   try:
-    keylist = input_tf_zip.read("META/otakeys.txt").split()
+    keylist = input_tf_zip.read("META/otakeys.txt").decode().split()
   except KeyError:
     raise common.ExternalError("can't read META/otakeys.txt from input")
 
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index 2b2b573..4f6d3b7 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -34,6 +34,31 @@
 }
 
 python_library_host {
+    name: "compliance_metadata",
+    srcs: [
+        "compliance_metadata.py",
+    ],
+}
+
+python_binary_host {
+    name: "gen_sbom",
+    srcs: [
+        "gen_sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "compliance_metadata",
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+        "sbom_lib",
+    ],
+}
+
+python_library_host {
     name: "sbom_lib",
     srcs: [
         "sbom_data.py",
@@ -91,4 +116,18 @@
     libs: [
         "sbom_lib",
     ],
-}
\ No newline at end of file
+}
+
+python_binary_host {
+    name: "gen_notice_xml",
+    srcs: [
+        "gen_notice_xml.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+    ],
+}
diff --git a/tools/sbom/compliance_metadata.py b/tools/sbom/compliance_metadata.py
new file mode 100644
index 0000000..9910217
--- /dev/null
+++ b/tools/sbom/compliance_metadata.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sqlite3
+
+class MetadataDb:
+  def __init__(self, db):
+    self.conn = sqlite3.connect(':memory')
+    self.conn.row_factory = sqlite3.Row
+    with sqlite3.connect(db) as c:
+      c.backup(self.conn)
+    self.reorg()
+
+  def reorg(self):
+    # package_license table
+    self.conn.execute("create table package_license as "
+                      "select name as package, pkg_default_applicable_licenses as license "
+                      "from modules "
+                      "where module_type = 'package' ")
+    cursor = self.conn.execute("select package,license from package_license where license like '% %'")
+    multi_licenses_packages = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for p in multi_licenses_packages:
+      licenses = p['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((p['package'], lic))
+    self.conn.executemany('insert into package_license values (?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from package_license where license like '% %'")
+    self.conn.commit()
+
+    # module_license table
+    self.conn.execute("create table module_license as "
+                      "select distinct name as module, package, licenses as license "
+                      "from modules "
+                      "where licenses != '' ")
+    cursor = self.conn.execute("select module,package,license from module_license where license like '% %'")
+    multi_licenses_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_licenses_modules:
+      licenses = m['license'].strip().split(' ')
+      for lic in licenses:
+        rows.append((m['module'], m['package'],lic))
+    self.conn.executemany('insert into module_license values (?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_license where license like '% %'")
+    self.conn.commit()
+
+    # module_installed_file table
+    self.conn.execute("create table module_installed_file as "
+                      "select id as module_id, name as module_name, package, installed_files as installed_file "
+                      "from modules "
+                      "where installed_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, installed_file "
+                               "from module_installed_file where installed_file like '% %'")
+    multi_installed_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_installed_file_modules:
+      installed_files = m['installed_file'].strip().split(' ')
+      for f in installed_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_installed_file where installed_file like '% %'")
+    self.conn.commit()
+
+    # module_built_file table
+    self.conn.execute("create table module_built_file as "
+                      "select id as module_id, name as module_name, package, built_files as built_file "
+                      "from modules "
+                      "where built_files != '' ")
+    cursor = self.conn.execute("select module_id, module_name, package, built_file "
+                               "from module_built_file where built_file like '% %'")
+    multi_built_file_modules = cursor.fetchall()
+    cursor.close()
+    rows = []
+    for m in multi_built_file_modules:
+      built_files = m['installed_file'].strip().split(' ')
+      for f in built_files:
+        rows.append((m['module_id'], m['module_name'], m['package'], f))
+    self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows)
+    self.conn.commit()
+
+    self.conn.execute("delete from module_built_file where built_file like '% %'")
+    self.conn.commit()
+
+
+    # Indexes
+    self.conn.execute('create index idx_modules_id on modules (id)')
+    self.conn.execute('create index idx_modules_name on modules (name)')
+    self.conn.execute('create index idx_package_licnese_package on package_license (package)')
+    self.conn.execute('create index idx_package_licnese_license on package_license (license)')
+    self.conn.execute('create index idx_module_licnese_module on module_license (module)')
+    self.conn.execute('create index idx_module_licnese_license on module_license (license)')
+    self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)')
+    self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)')
+    self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)')
+    self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)')
+    self.conn.commit()
+
+  def dump_debug_db(self, debug_db):
+    with sqlite3.connect(debug_db) as c:
+      self.conn.backup(c)
+
+  def get_installed_files(self):
+    # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata
+    cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata')
+    rows = cursor.fetchall()
+    cursor.close()
+    installed_files_metadata = []
+    for row in rows:
+      metadata = dict(zip(row.keys(), row))
+      installed_files_metadata.append(metadata)
+    return installed_files_metadata
+
+  def get_soong_modules(self):
+    # Get all records from table modules, which contains metadata of all soong modules
+    cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules')
+    rows = cursor.fetchall()
+    cursor.close()
+    soong_modules = []
+    for row in rows:
+      soong_module = dict(zip(row.keys(), row))
+      soong_modules.append(soong_module)
+    return soong_modules
+
+  def get_package_licenses(self, package):
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    licenses = {}
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_module_licenses(self, module_name, package):
+    licenses = {}
+    # If property "licenses" is defined on module
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from module_license ml join modules m on ml.license = m.name '
+                               'where ml.module = ? and ml.package = ?',
+                               (module_name, package))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    if len(licenses) > 0:
+      return licenses
+
+    # Use default package license
+    cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text '
+                               'from package_license pl join modules m on pl.license = m.name '
+                               'where pl.package = ?',
+                               ('//' + package,))
+    rows = cursor.fetchall()
+    for r in rows:
+      licenses[r['name']] = r['license_text']
+    return licenses
+
+  def get_soong_module_of_installed_file(self, installed_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_installed_file mif on m.id = mif.module_id '
+                               'where mif.installed_file = ?',
+                               (installed_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
+
+  def get_soong_module_of_built_file(self, built_file):
+    cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files '
+                               'from modules m join module_built_file mbf on m.id = mbf.module_id '
+                               'where mbf.built_file = ?',
+                               (built_file,))
+    rows = cursor.fetchall()
+    cursor.close()
+    if rows:
+      soong_module = dict(zip(rows[0].keys(), rows[0]))
+      return soong_module
+
+    return None
\ No newline at end of file
diff --git a/tools/sbom/gen_notice_xml.py b/tools/sbom/gen_notice_xml.py
new file mode 100644
index 0000000..eaa6e5a
--- /dev/null
+++ b/tools/sbom/gen_notice_xml.py
@@ -0,0 +1,81 @@
+# !/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate NOTICE.xml.gz of a partition.
+Usage example:
+  gen_notice_xml.py --output_file out/soong/.intermediate/.../NOTICE.xml.gz \
+              --metadata out/soong/compliance-metadata/aosp_cf_x86_64_phone/compliance-metadata.db \
+              --partition system \
+              --product_out out/target/vsoc_x86_64 \
+              --soong_out out/soong
+"""
+
+import argparse
+
+
+FILE_HEADER = '''\
+<?xml version="1.0" encoding="utf-8"?>
+<licenses>
+'''
+FILE_FOOTER = '''\
+</licenses>
+'''
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('-d', '--debug', action='store_true', default=True, help='Debug mode')
+  parser.add_argument('--output_file', required=True, help='The path of the generated NOTICE.xml.gz file.')
+  parser.add_argument('--partition', required=True, help='The name of partition for which the NOTICE.xml.gz is generated.')
+  parser.add_argument('--metadata', required=True, help='The path of compliance metadata DB file.')
+  parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.')
+  parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def new_file_name_tag(file_metadata, package_name):
+  file_path = file_metadata['installed_file'].removeprefix(args.product_out)
+  lib = 'Android'
+  if package_name:
+    lib = package_name
+  return f'<file-name contentId="" lib="{lib}">{file_path}</file-name>\n'
+
+
+def new_file_content_tag():
+  pass
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  with open(args.output_file, 'w', encoding="utf-8") as notice_xml_file:
+    notice_xml_file.write(FILE_HEADER)
+    notice_xml_file.write(FILE_FOOTER)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/gen_sbom.py b/tools/sbom/gen_sbom.py
new file mode 100644
index 0000000..9c3a8be
--- /dev/null
+++ b/tools/sbom/gen_sbom.py
@@ -0,0 +1,740 @@
+# !/usr/bin/env python3
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  gen_sbom.py --output_file out/soong/sbom/aosp_cf_x86_64_phone/sbom.spdx \
+              --metadata out/soong/metadata/aosp_cf_x86_64_phone/metadata.db \
+              --product_out out/target/vsoc_x86_64
+              --soong_out out/soong
+              --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+              --product_mfr=Google
+"""
+
+import argparse
+import compliance_metadata
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import pathlib
+import queue
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-existent installed files:'
+ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP = 'No module found for static dependency files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+SOONG_PREBUILT_MODULE_TYPES = [
+    'android_app_import',
+    'android_library_import',
+    'cc_prebuilt_binary',
+    'cc_prebuilt_library',
+    'cc_prebuilt_library_headers',
+    'cc_prebuilt_library_shared',
+    'cc_prebuilt_library_static',
+    'cc_prebuilt_object',
+    'dex_import',
+    'java_import',
+    'java_sdk_library_import',
+    'java_system_modules_import',
+    'libclang_rt_prebuilt_library_static',
+    'libclang_rt_prebuilt_library_shared',
+    'llvm_prebuilt_library_static',
+    'ndk_prebuilt_object',
+    'ndk_prebuilt_shared_stl',
+    'nkd_prebuilt_static_stl',
+    'prebuilt_apex',
+    'prebuilt_bootclasspath_fragment',
+    'prebuilt_dsp',
+    'prebuilt_firmware',
+    'prebuilt_kernel_modules',
+    'prebuilt_rfsa',
+    'prebuilt_root',
+    'rust_prebuilt_dylib',
+    'rust_prebuilt_library',
+    'rust_prebuilt_rlib',
+    'vndk_prebuilt_shared',
+]
+
+THIRD_PARTY_IDENTIFIER_TYPES = [
+    # Types defined in metadata_file.proto
+    'Git',
+    'SVN',
+    'Hg',
+    'Darcs',
+    'VCS',
+    'Archive',
+    'PrebuiltByAlphabet',
+    'LocalSource',
+    'Other',
+    # OSV ecosystems defined at https://ossf.github.io/osv-schema/#affectedpackage-field.
+    'Go',
+    'npm',
+    'OSS-Fuzz',
+    'PyPI',
+    'RubyGems',
+    'crates.io',
+    'Hackage',
+    'GHC',
+    'Packagist',
+    'Maven',
+    'NuGet',
+    'Linux',
+    'Debian',
+    'Alpine',
+    'Hex',
+    'Android',
+    'GitHub Actions',
+    'Pub',
+    'ConanCenter',
+    'Rocky Linux',
+    'AlmaLinux',
+    'Bitnami',
+    'Photon OS',
+    'CRAN',
+    'Bioconductor',
+    'SwiftURL'
+]
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The metadata DB file path.')
+  parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.')
+  parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{sbom_data.encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{sbom_data.encode_for_spdxid(file_path)}'
+
+
+def new_license_id(license_name):
+  return f'LicenseRef-{sbom_data.encode_for_spdxid(license_name)}'
+
+
+def checksum(file_path):
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return (file_metadata['soong_module_type'] and
+          file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES)
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  """Return source package info exists in its METADATA file, currently including name, security tag
+  and external SBOM reference.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(
+          sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                       type=sbom_data.PackageExternalRefType.cpe23Type,
+                                       locator=tag.removeprefix(NVD_CPE23)))
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(
+          sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                       type=sbom_data.PackageExternalRefType.cpe22Type,
+                                       locator=tag.removeprefix(NVD_CPE23)))
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+  module path or kernel module's source path if the installed file is a kernel module.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  """Search for METADATA file of a package and return its path."""
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  """Return a package's version in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  """Return a package's homepage URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  """Return a package's code repository URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_license_text(license_files):
+  license_text = ''
+  for license_file in license_files:
+    if args.debug:
+      license_text += '#### Content from ' + license_file + '\n'
+    else:
+      license_text += pathlib.Path(license_file).read_text(errors='replace') + '\n\n'
+  return license_text
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+  package, a UPSTREAM package and an external SBOM document reference if sbom_ref defined in its
+  METADATA file.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  external_doc_ref = None
+  packages = []
+  relationships = []
+  licenses = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  lics = db.get_package_licenses(installed_file_metadata['module_path'])
+  if not lics:
+    lics = db.get_package_licenses(metadata_file_path)
+
+  if lics:
+    for license_name, license_files in lics.items():
+      if not license_files:
+        continue
+      license_id = new_license_id(license_name)
+      if license_name not in licenses_text:
+        licenses_text[license_name] = get_license_text(license_files.split(' '))
+      licenses.append(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name]))
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+                                       download_location=sbom_data.VALUE_NONE,
+                                       supplier='Organization: ' + args.product_mfr,
+                                       external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier=(
+                                               'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=source_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+
+    for license in licenses:
+      source_package.declared_license_ids.append(license.id)
+      upstream_package.declared_license_ids.append(license.id)
+
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+                                         name=name,
+                                         download_location=sbom_data.VALUE_NONE,
+                                         version=version if version else args.build_version,
+                                         supplier='Organization: ' + args.product_mfr)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier=(
+                                               'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [prebuilt_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=prebuilt_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+    for license in licenses:
+      prebuilt_package.declared_license_ids.append(license.id)
+      upstream_package.declared_license_ids.append(license.id)
+
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+      sbom_url = metadata_proto.third_party.sbom_ref.url
+      sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+      upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+      if sbom_url and sbom_checksum and upstream_element_id:
+        doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{sbom_data.encode_for_spdxid(name)}'
+        external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+                                                               uri=sbom_url,
+                                                               checksum=sbom_checksum)
+        relationships.append(
+            sbom_data.Relationship(id1=upstream_package_id,
+                                   relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                   id2=doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships, licenses
+
+
+def save_report(report_file_path, report):
+  with open(report_file_path, 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+# Validate identifiers in a package's METADATA.
+# 1) Only known identifier type is allowed
+# 2) Only one identifier's primary_source can be true
+def validate_package_metadata(metadata_file_path, package_metadata):
+  primary_source_found = False
+  for identifier in package_metadata.third_party.identifier:
+    if identifier.type not in THIRD_PARTY_IDENTIFIER_TYPES:
+      sys.exit(f'Unknown value of third_party.identifier.type in {metadata_file_path}/METADATA: {identifier.type}.')
+    if primary_source_found and identifier.primary_source:
+      sys.exit(
+          f'Field "primary_source" is set to true in multiple third_party.identifier in {metadata_file_path}/METADATA.')
+    primary_source_found = identifier.primary_source
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    validate_package_metadata(metadata_file_path, package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+# If a file is from a source fork or prebuilt fork package, add its package information to SBOM
+def add_package_of_file(file_id, file_metadata, doc, report):
+  metadata_file_path = get_metadata_file_path(file_metadata)
+  report_metadata_file(metadata_file_path, file_metadata, report)
+
+  external_doc_ref, pkgs, rels, licenses = get_sbom_fragments(file_metadata, metadata_file_path)
+  if len(pkgs) > 0:
+    if external_doc_ref:
+      doc.add_external_ref(external_doc_ref)
+    for p in pkgs:
+      doc.add_package(p)
+    for rel in rels:
+      doc.add_relationship(rel)
+    fork_package_id = pkgs[0].id  # The first package should be the source/prebuilt fork package
+    doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                id2=fork_package_id))
+    for license in licenses:
+      doc.add_license(license)
+
+
+# Add STATIC_LINK relationship for static dependencies of a file
+def add_static_deps_of_file(file_id, file_metadata, doc):
+  if not file_metadata['static_dep_files'] and not file_metadata['whole_static_dep_files']:
+    return
+  static_dep_files = []
+  if file_metadata['static_dep_files']:
+    static_dep_files += file_metadata['static_dep_files'].split(' ')
+  if file_metadata['whole_static_dep_files']:
+    static_dep_files += file_metadata['whole_static_dep_files'].split(' ')
+
+  for dep_file in static_dep_files:
+    # Static libs are not shipped on devices, so names are derived from .intermediates paths.
+    doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                id2=new_file_id(
+                                                  dep_file.removeprefix(args.soong_out + '/.intermediates/'))))
+
+
+def add_licenses_of_file(file_id, file_metadata, doc):
+  lics = db.get_module_licenses(file_metadata.get('name', ''), file_metadata['module_path'])
+  if lics:
+    file = next(f for f in doc.files if file_id == f.id)
+    for license_name, license_files in lics.items():
+      if not license_files:
+        continue
+      license_id = new_license_id(license_name)
+      file.concluded_license_ids.append(license_id)
+      if license_name not in licenses_text:
+        license_text = get_license_text(license_files.split(' '))
+        licenses_text[license_name] = license_text
+
+      doc.add_license(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name]))
+
+
+def get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report):
+  # Find all transitive static dep files of all installed files
+  q = queue.Queue()
+  for installed_file_metadata in installed_files_metadata:
+    if installed_file_metadata['static_dep_files']:
+      for f in installed_file_metadata['static_dep_files'].split(' '):
+        q.put(f)
+    if installed_file_metadata['whole_static_dep_files']:
+      for f in installed_file_metadata['whole_static_dep_files'].split(' '):
+        q.put(f)
+
+  all_static_dep_files = {}
+  while not q.empty():
+    dep_file = q.get()
+    if dep_file in all_static_dep_files:
+      # It has been processed
+      continue
+
+    all_static_dep_files[dep_file] = True
+    soong_module = db.get_soong_module_of_built_file(dep_file)
+    if not soong_module:
+      # This should not happen, add to report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP]
+      report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP].append(f)
+      continue
+
+    if soong_module['static_dep_files']:
+      for f in soong_module['static_dep_files'].split(' '):
+        if f not in all_static_dep_files:
+          q.put(f)
+    if soong_module['whole_static_dep_files']:
+      for f in soong_module['whole_static_dep_files'].split(' '):
+        if f not in all_static_dep_files:
+          q.put(f)
+
+  return sorted(all_static_dep_files.keys())
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  global db
+  db = compliance_metadata.MetadataDb(args.metadata)
+  if args.debug:
+    db.dump_debug_db(os.path.dirname(args.output_file) + '/compliance-metadata-debug.db')
+
+  global metadata_file_protos
+  metadata_file_protos = {}
+  global licenses_text
+  licenses_text = {}
+
+  product_package_id = sbom_data.SPDXID_PRODUCT
+  product_package_name = sbom_data.PACKAGE_NAME_PRODUCT
+  product_package = sbom_data.Package(id=product_package_id,
+                                      name=product_package_name,
+                                      download_location=sbom_data.VALUE_NONE,
+                                      version=args.build_version,
+                                      supplier='Organization: ' + args.product_mfr,
+                                      files_analyzed=True)
+  doc_name = args.build_version
+  doc = sbom_data.Document(name=doc_name,
+                           namespace=f'https://www.google.com/sbom/spdx/android/{doc_name}',
+                           creators=['Organization: ' + args.product_mfr],
+                           describes=product_package_id)
+
+  doc.packages.append(product_package)
+  doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                                        download_location=sbom_data.VALUE_NONE,
+                                        version=args.build_version,
+                                        supplier='Organization: ' + args.product_mfr,
+                                        declared_license_ids=[sbom_data.SPDXID_LICENSE_APACHE]))
+
+  # Report on some issues and information
+  report = {
+      ISSUE_NO_METADATA: [],
+      ISSUE_NO_METADATA_FILE: [],
+      ISSUE_METADATA_FILE_INCOMPLETE: [],
+      ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+      ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+      ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP: [],
+      INFO_METADATA_FOUND_FOR_PACKAGE: [],
+  }
+
+  # Get installed files and corresponding make modules' metadata if an installed file is from a make module.
+  installed_files_metadata = db.get_installed_files()
+
+  # Find which Soong module an installed file is from and merge metadata from Make and Soong
+  for installed_file_metadata in installed_files_metadata:
+    soong_module = db.get_soong_module_of_installed_file(installed_file_metadata['installed_file'])
+    if soong_module:
+      # Merge soong metadata to make metadata
+      installed_file_metadata.update(soong_module)
+    else:
+      # For make modules soong_module_type should be empty
+      installed_file_metadata['soong_module_type'] = ''
+      installed_file_metadata['static_dep_files'] = ''
+      installed_file_metadata['whole_static_dep_files'] = ''
+
+  # Scan the metadata and create the corresponding package and file records in SPDX
+  for installed_file_metadata in installed_files_metadata:
+    installed_file = installed_file_metadata['installed_file']
+    module_path = installed_file_metadata['module_path']
+    product_copy_files = installed_file_metadata['product_copy_files']
+    kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+    build_output_path = installed_file
+    installed_file = installed_file.removeprefix(args.product_out)
+
+    if not installed_file_has_metadata(installed_file_metadata, report):
+      continue
+    if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+      report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+      continue
+
+    file_id = new_file_id(installed_file)
+    sha1 = checksum(build_output_path)
+    f = sbom_data.File(id=file_id, name=installed_file, checksum=sha1)
+    doc.files.append(f)
+    product_package.file_ids.append(file_id)
+
+    if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+      add_package_of_file(file_id, installed_file_metadata, doc, report)
+
+    elif module_path or installed_file_metadata['is_platform_generated']:
+      # File from PLATFORM package
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      if installed_file_metadata['is_platform_generated']:
+        f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif product_copy_files:
+      # Format of product_copy_files: <source path>:<dest path>
+      src_path = product_copy_files.split(':')[0]
+      # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+      # so process them as files from PLATFORM package
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      if installed_file_metadata['license_text']:
+        if installed_file_metadata['license_text'] == 'build/soong/licenses/LICENSE':
+          f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif installed_file.endswith('.fsv_meta'):
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+      f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE]
+
+    elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+      # For the four files generated for _dlkm, _ramdisk partitions
+      doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                  relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                  id2=sbom_data.SPDXID_PLATFORM))
+
+    # Process static dependencies of the installed file
+    add_static_deps_of_file(file_id, installed_file_metadata, doc)
+
+    # Add licenses of the installed file
+    add_licenses_of_file(file_id, installed_file_metadata, doc)
+
+  # Add all static library files to SBOM
+  for dep_file in get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report):
+    filepath = dep_file.removeprefix(args.soong_out + '/.intermediates/')
+    file_id = new_file_id(filepath)
+    # SHA1 of empty string. Sometimes .a files might not be built.
+    sha1 = 'SHA1: da39a3ee5e6b4b0d3255bfef95601890afd80709'
+    if os.path.islink(dep_file) or os.path.isfile(dep_file):
+      sha1 = checksum(dep_file)
+    doc.files.append(sbom_data.File(id=file_id,
+                                    name=filepath,
+                                    checksum=sha1))
+    file_metadata = {
+        'installed_file': dep_file,
+        'is_prebuilt_make_module': False
+    }
+    file_metadata.update(db.get_soong_module_of_built_file(dep_file))
+    add_package_of_file(file_id, file_metadata, doc, report)
+
+    # Add relationships for static deps of static libraries
+    add_static_deps_of_file(file_id, file_metadata, doc)
+
+    # Add licenses of the static lib
+    add_licenses_of_file(file_id, file_metadata, doc)
+
+  # Save SBOM records to output file
+  doc.generate_packages_verification_code()
+  doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  prefix = args.output_file
+  if prefix.endswith('.spdx'):
+    prefix = prefix.removesuffix('.spdx')
+  elif prefix.endswith('.spdx.json'):
+    prefix = prefix.removesuffix('.spdx.json')
+
+  output_file = prefix + '.spdx'
+  with open(output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file)
+  if args.json:
+    with open(prefix + '.spdx.json', 'w', encoding="utf-8") as file:
+      sbom_writers.JSONWriter.write(doc, file)
+
+  save_report(prefix + '-gen-report.txt', report)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py
index d0d232d..27f3d2e 100644
--- a/tools/sbom/generate-sbom-framework_res.py
+++ b/tools/sbom/generate-sbom-framework_res.py
@@ -80,7 +80,8 @@
 
   resource_file_spdxids = []
   for file in layoutlib_sbom[sbom_writers.PropNames.FILES]:
-    if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'):
+    file_path = file[sbom_writers.PropNames.FILE_NAME]
+    if file_path.startswith('data/res/') or file_path.startswith('data/overlays/'):
       resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID])
 
   doc.relationships = [
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 6b2341b..654e196 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -302,7 +302,6 @@
             final KeyStore keyStore, final String keyName)
             throws CertificateException, KeyStoreException, NoSuchAlgorithmException,
                     UnrecoverableKeyException, UnrecoverableEntryException {
-        final Key key = keyStore.getKey(keyName, readPassword(keyName));
         final PrivateKeyEntry privateKeyEntry = (PrivateKeyEntry) keyStore.getEntry(keyName, null);
         if (privateKeyEntry == null) {
         throw new Error(