Merge "aconfig: split aconfig_storage_file crate" into main
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 9f43a3e..82e8baa 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -164,6 +164,8 @@
 
 $(call add_soong_config_var_value,ANDROID,release_binder_death_recipient_weak_from_jni,$(RELEASE_BINDER_DEATH_RECIPIENT_WEAK_FROM_JNI))
 
+$(call add_soong_config_var_value,ANDROID,release_selinux_data_data_ignore,$(RELEASE_SELINUX_DATA_DATA_IGNORE))
+
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
 # TODO(b/240588226): Remove the off-by-default exceptions after handling
diff --git a/core/board_config.mk b/core/board_config.mk
index 8c23f93..77489c6 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -287,6 +287,9 @@
 
 include $(BUILD_SYSTEM)/board_config_wifi.mk
 
+# Set up soong config for "soong_config_value_variable".
+-include vendor/google/build/soong/soong_config_namespace/camera.mk
+
 # Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
 TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
 TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
diff --git a/tools/aconfig/printflags/src/main.rs b/tools/aconfig/printflags/src/main.rs
index a0c9ee8..7838b51 100644
--- a/tools/aconfig/printflags/src/main.rs
+++ b/tools/aconfig/printflags/src/main.rs
@@ -67,9 +67,23 @@
     let device_config_flags = parse_device_config(dc_stdout);
 
     // read aconfig_flags.pb files
+    let apex_pattern = Regex::new(r"^/apex/[^@]+\.[^@]+$").unwrap();
+    let mut mount_points = vec![
+        "system".to_string(),
+        "system_ext".to_string(),
+        "product".to_string(),
+        "vendor".to_string(),
+    ];
+    for apex in fs::read_dir("/apex")? {
+        let path_name = apex?.path().display().to_string();
+        if let Some(canonical_path) = apex_pattern.captures(&path_name) {
+            mount_points.push(canonical_path.get(0).unwrap().as_str().to_owned());
+        }
+    }
+
     let mut flags: BTreeMap<String, Vec<String>> = BTreeMap::new();
-    for partition in ["system", "system_ext", "product", "vendor"] {
-        let path = format!("/{}/etc/aconfig_flags.pb", partition);
+    for mount_point in mount_points {
+        let path = format!("/{}/etc/aconfig_flags.pb", mount_point);
         let Ok(bytes) = fs::read(&path) else {
             eprintln!("warning: failed to read {}", path);
             continue;
@@ -80,7 +94,7 @@
             })?;
         for flag in parsed_flags.parsed_flag {
             let key = format!("{}/{}.{}", flag.namespace(), flag.package(), flag.name());
-            let value = format!("{:?} + {:?} ({})", flag.permission(), flag.state(), partition);
+            let value = format!("{:?} + {:?} ({})", flag.permission(), flag.state(), mount_point);
             flags.entry(key).or_default().push(value);
         }
     }
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index bf69dec..52d95d8 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -795,6 +795,16 @@
         # Copy it verbatim if we allow the file to exist.
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
+    # Sign microdroid_vendor.img.
+    elif filename == "VENDOR/etc/avf/microdroid/microdroid_vendor.img":
+      vendor_key = OPTIONS.avb_keys.get("vendor")
+      vendor_algorithm = OPTIONS.avb_algorithms.get("vendor")
+      with tempfile.NamedTemporaryFile() as image:
+        image.write(data)
+        image.flush()
+        ReplaceKeyInAvbHashtreeFooter(image, vendor_key, vendor_algorithm,
+            misc_info)
+        common.ZipWrite(output_tf_zip, image.name, filename)
     # A non-APK file; copy it verbatim.
     else:
       common.ZipWriteStr(output_tf_zip, out_info, data)
@@ -812,6 +822,106 @@
   # Write back misc_info with the latest values.
   ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
 
+def ReplaceKeyInAvbHashtreeFooter(image, new_key, new_algorithm, misc_info):
+  # Get avb information about the image by parsing avbtool info_image.
+  def GetAvbInfo(avbtool, image_name):
+    # Get information with raw string by `avbtool info_image`.
+    info_raw = common.RunAndCheckOutput([
+      avbtool, 'info_image',
+      '--image', image_name
+    ])
+
+    # line_matcher is for parsing each output line of `avbtool info_image`.
+    # example string input: "      Hash Algorithm:        sha1"
+    # example matched input: ("      ", "Hash Algorithm", "sha1")
+    line_matcher = re.compile(r'^(\s*)([^:]+):\s*(.*)$')
+    # prop_matcher is for parsing value part of 'Prop' in `avbtool info_image`.
+    # example string input: "example_prop_key -> 'example_prop_value'"
+    # example matched output: ("example_prop_key", "example_prop_value")
+    prop_matcher = re.compile(r"(.+)\s->\s'(.+)'")
+    info = {}
+    indent_stack = [[-1, info]]
+    for line_info_raw in info_raw.split('\n'):
+      # Parse the line
+      line_info_parsed = line_matcher.match(line_info_raw)
+      if not line_info_parsed:
+        continue
+      indent = len(line_info_parsed.group(1))
+      key = line_info_parsed.group(2).strip()
+      value = line_info_parsed.group(3).strip()
+
+      # Pop indentation stack
+      while indent <= indent_stack[-1][0]:
+        del indent_stack[-1]
+
+      # Insert information into 'info'.
+      cur_info = indent_stack[-1][1]
+      if value == "":
+        if key == "Descriptors":
+          empty_list = []
+          cur_info[key] = empty_list
+          indent_stack.append([indent, empty_list])
+        else:
+          empty_dict = {}
+          cur_info.append({key:empty_dict})
+          indent_stack.append([indent, empty_dict])
+      elif key == "Prop":
+        prop_parsed = prop_matcher.match(value)
+        if not prop_parsed:
+          raise ValueError(
+              "Failed to parse prop while getting avb information.")
+        cur_info.append({key:{prop_parsed.group(1):prop_parsed.group(2)}})
+      else:
+        cur_info[key] = value
+
+    return info
+
+  # Get hashtree descriptor from info
+  def GetAvbHashtreeDescriptor(avb_info):
+    hashtree_descriptors = tuple(filter(lambda x: "Hashtree descriptor" in x,
+        info.get('Descriptors')))
+    if len(hashtree_descriptors) != 1:
+      raise ValueError("The number of hashtree descriptor is not 1.")
+    return hashtree_descriptors[0]["Hashtree descriptor"]
+
+  # Get avb info
+  avbtool = misc_info['avb_avbtool']
+  info = GetAvbInfo(avbtool, image.name)
+  hashtree_descriptor = GetAvbHashtreeDescriptor(info)
+
+  # Generate command
+  cmd = [avbtool, 'add_hashtree_footer',
+    '--key', new_key,
+    '--algorithm', new_algorithm,
+    '--partition_name', hashtree_descriptor.get("Partition Name"),
+    '--partition_size', info.get("Image size").removesuffix(" bytes"),
+    '--hash_algorithm', hashtree_descriptor.get("Hash Algorithm"),
+    '--salt', hashtree_descriptor.get("Salt"),
+    '--do_not_generate_fec',
+    '--image', image.name
+  ]
+
+  # Append properties into command
+  props = map(lambda x: x.get("Prop"), filter(lambda x: "Prop" in x,
+      info.get('Descriptors')))
+  for prop_wrapped in props:
+    prop = tuple(prop_wrapped.items())
+    if len(prop) != 1:
+      raise ValueError("The number of property is not 1.")
+    cmd.append('--prop')
+    cmd.append(prop[0][0] + ':' + prop[0][1])
+
+  # Replace Hashtree Footer with new key
+  common.RunAndCheckOutput(cmd)
+
+  # Check root digest is not changed
+  new_info = GetAvbInfo(avbtool, image.name)
+  new_hashtree_descriptor = GetAvbHashtreeDescriptor(info)
+  root_digest = hashtree_descriptor.get("Root Digest")
+  new_root_digest = new_hashtree_descriptor.get("Root Digest")
+  assert root_digest == new_root_digest, \
+      ("Root digest in hashtree descriptor shouldn't be changed. Old: {}, New: "
+       "{}").format(root_digest, new_root_digest)
 
 def ReplaceCerts(data):
   """Replaces all the occurences of X.509 certs with the new ones.