Merge "add module_x86_64only product"
diff --git a/core/Makefile b/core/Makefile
index 171dbde..7ea85bf 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4961,9 +4961,9 @@
   mke2fs \
   mke2fs.conf \
   mkfs.erofs \
-  mkf2fsuserimg.sh \
+  mkf2fsuserimg \
   mksquashfs \
-  mksquashfsimage.sh \
+  mksquashfsimage \
   mkuserimg_mke2fs \
   ota_extractor \
   ota_from_target_files \
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 9bb6c47..adf3668 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -1036,6 +1036,24 @@
   $(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
 
 ##########################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##########################################################################
+ifneq ($(LOCAL_USE_VNDK),)
+  ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    apiimport_postfix := .apiimport
+    ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+      apiimport_postfix := .apiimport.product
+    else
+      apiimport_postfix := .apiimport.vendor
+    endif
+
+    my_required_modules := $(foreach l,$(my_required_modules), \
+      $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+  endif
+endif
+
+##########################################################################
 ## When compiling against the VNDK, add the .vendor or .product suffix to
 ## required modules.
 ##########################################################################
diff --git a/core/binary.mk b/core/binary.mk
index 3f32fa9..1ad9be8 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1145,6 +1145,28 @@
     $(my_static_libraries),hwasan)
 endif
 
+###################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##################################################################
+
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+  ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+    apiimport_postfix := .apiimport.product
+  else
+    apiimport_postfix := .apiimport.vendor
+  endif
+endif
+
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_header_libraries := $(foreach l,$(my_header_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_HEADER_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+
 ###########################################################
 ## When compiling against the VNDK, use LL-NDK libraries
 ###########################################################
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index e8e01d8..2de4115 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -139,6 +139,27 @@
 # my_shared_libraries).
 include $(BUILD_SYSTEM)/cxx_stl_setup.mk
 
+# When compiling against API imported module, use API import stub libraries.
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+  ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+    apiimport_postfix := .apiimport.product
+  else
+    apiimport_postfix := .apiimport.vendor
+  endif
+endif
+
+ifdef my_shared_libraries
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_shared_libraries
+
+ifdef my_system_shared_libraries
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_system_shared_libraries
+
 ifdef my_shared_libraries
 ifdef LOCAL_USE_VNDK
   ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
diff --git a/core/config.mk b/core/config.mk
index 9e4b93a..51e140d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -603,8 +603,8 @@
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
 MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
 MKEROFS := $(HOST_OUT_EXECUTABLES)/mkfs.erofs
-MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
+MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage
+MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 8063ae2..67ee8ef 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -62,7 +62,7 @@
         "mkuserimg_mke2fs",
         "simg2img",
         "tune2fs",
-        "mkf2fsuserimg.sh",
+        "mkf2fsuserimg",
         "fsck.f2fs",
     ],
 }
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e52214e..7639ffd 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -365,7 +365,7 @@
 
     run_fsck = RunErofsFsck
   elif fs_type.startswith("squash"):
-    build_command = ["mksquashfsimage.sh"]
+    build_command = ["mksquashfsimage"]
     build_command.extend([in_dir, out_file])
     if "squashfs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["squashfs_sparse_flag"]])
@@ -387,7 +387,7 @@
     if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
-    build_command = ["mkf2fsuserimg.sh"]
+    build_command = ["mkf2fsuserimg"]
     build_command.extend([out_file, prop_dict["image_size"]])
     if "f2fs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["f2fs_sparse_flag"]])
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index a026316..1f5cb21 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -266,7 +266,7 @@
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, StreamingPropertyFiles, AbOtaPropertyFiles)
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
 from common import IsSparseImage
 import target_files_diff
 from check_target_files_vintf import CheckVintfIfTrebleEnabled
@@ -897,7 +897,7 @@
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = PayloadGenerator()
+  payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -1021,15 +1021,8 @@
   # FinalizeMetadata().
   common.ZipClose(output_zip)
 
-  # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
-  # all the info of the latter. However, system updaters and OTA servers need to
-  # take time to switch to the new flag. We keep both of the flags for
-  # P-timeframe, and will remove StreamingPropertyFiles in later release.
-  needed_property_files = (
-      AbOtaPropertyFiles(),
-      StreamingPropertyFiles(),
-  )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   package_key=OPTIONS.package_key)
 
 
 def main(argv):
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 4ff5027..e1529c7 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -48,7 +48,7 @@
 SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
 
 
-def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
+def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=None, package_key=None, pw=None):
   """Finalizes the metadata and signs an A/B OTA package.
 
   In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
@@ -66,8 +66,21 @@
     input_file: The input ZIP filename that doesn't contain the package METADATA
         entry yet.
     output_file: The final output ZIP filename.
-    needed_property_files: The list of PropertyFiles' to be generated.
+    needed_property_files: The list of PropertyFiles' to be generated. Default is [AbOtaPropertyFiles(), StreamingPropertyFiles()]
+    package_key: The key used to sign this OTA package
+    pw: Password for the package_key
   """
+  no_signing = package_key is None
+
+  if needed_property_files is None:
+    # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
+    # all the info of the latter. However, system updaters and OTA servers need to
+    # take time to switch to the new flag. We keep both of the flags for
+    # P-timeframe, and will remove StreamingPropertyFiles in later release.
+    needed_property_files = (
+        AbOtaPropertyFiles(),
+        StreamingPropertyFiles(),
+    )
 
   def ComputeAllPropertyFiles(input_file, needed_property_files):
     # Write the current metadata entry with placeholders.
@@ -83,11 +96,11 @@
     WriteMetadata(metadata, output_zip)
     ZipClose(output_zip)
 
-    if OPTIONS.no_signing:
+    if no_signing:
       return input_file
 
     prelim_signing = MakeTempFile(suffix='.zip')
-    SignOutput(input_file, prelim_signing)
+    SignOutput(input_file, prelim_signing, package_key, pw)
     return prelim_signing
 
   def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
@@ -122,10 +135,10 @@
   ZipClose(output_zip)
 
   # Re-sign the package after updating the metadata entry.
-  if OPTIONS.no_signing:
+  if no_signing:
     shutil.copy(prelim_signing, output_file)
   else:
-    SignOutput(prelim_signing, output_file)
+    SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
   with zipfile.ZipFile(output_file, allowZip64=True) as output_zip:
@@ -578,7 +591,7 @@
     else:
       tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
       if METADATA_PROTO_NAME in zip_file.namelist():
-        tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
+          tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
 
     return ','.join(tokens)
 
@@ -600,10 +613,13 @@
     return []
 
 
-def SignOutput(temp_zip_name, output_zip_name):
-  pw = OPTIONS.key_passwords[OPTIONS.package_key]
+def SignOutput(temp_zip_name, output_zip_name, package_key=None, pw=None):
+  if package_key is None:
+    package_key = OPTIONS.package_key
+  if pw is None and OPTIONS.key_passwords:
+    pw = OPTIONS.key_passwords[package_key]
 
-  SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
+  SignFile(temp_zip_name, output_zip_name, package_key, pw,
            whole_file=True)
 
 
@@ -715,7 +731,7 @@
   SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
   SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
 
-  def __init__(self, secondary=False):
+  def __init__(self, secondary=False, wipe_user_data=False):
     """Initializes a Payload instance.
 
     Args:
@@ -724,6 +740,7 @@
     self.payload_file = None
     self.payload_properties = None
     self.secondary = secondary
+    self.wipe_user_data = wipe_user_data
 
   def _Run(self, cmd):  # pylint: disable=no-self-use
     # Don't pipe (buffer) the output if verbose is set. Let
@@ -785,8 +802,8 @@
     self._Run(cmd)
 
     # 2. Sign the hashes.
-    signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
-    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+    signed_payload_sig_file = payload_signer.SignHashFile(payload_sig_file)
+    signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
 
     # 3. Insert the signatures back into the payload file.
     signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
@@ -799,24 +816,7 @@
            "--payload_signature_file", signed_payload_sig_file]
     self._Run(cmd)
 
-    # 4. Dump the signed payload properties.
-    properties_file = common.MakeTempFile(prefix="payload-properties-",
-                                          suffix=".txt")
-    cmd = ["brillo_update_payload", "properties",
-           "--payload", signed_payload_file,
-           "--properties_file", properties_file]
-    self._Run(cmd)
-
-    if self.secondary:
-      with open(properties_file, "a") as f:
-        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
-
-    if OPTIONS.wipe_user_data:
-      with open(properties_file, "a") as f:
-        f.write("POWERWASH=1\n")
-
     self.payload_file = signed_payload_file
-    self.payload_properties = properties_file
 
   def WriteToZip(self, output_zip):
     """Writes the payload to the given zip.
@@ -825,7 +825,23 @@
       output_zip: The output ZipFile instance.
     """
     assert self.payload_file is not None
-    assert self.payload_properties is not None
+    # 4. Dump the signed payload properties.
+    properties_file = common.MakeTempFile(prefix="payload-properties-",
+                                          suffix=".txt")
+    cmd = ["brillo_update_payload", "properties",
+           "--payload", self.payload_file,
+           "--properties_file", properties_file]
+    self._Run(cmd)
+
+    if self.secondary:
+      with open(properties_file, "a") as f:
+        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+
+    if self.wipe_user_data:
+      with open(properties_file, "a") as f:
+        f.write("POWERWASH=1\n")
+
+    self.payload_properties = properties_file
 
     if self.secondary:
       payload_arcname = PayloadGenerator.SECONDARY_PAYLOAD_BIN
@@ -946,6 +962,6 @@
     manifest_size = header[2]
     metadata_signature_size = header[3]
     metadata_total = 24 + manifest_size + metadata_signature_size
-    assert metadata_total < payload_size
+    assert metadata_total <= payload_size
 
     return (payload_offset, metadata_total)
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index 6a643de..4f342ac 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -81,7 +81,40 @@
                 signature_size)
     return int(signature_size)
 
-  def Sign(self, in_file):
+  @staticmethod
+  def _Run(cmd):
+    common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+
+  def SignPayload(self, unsigned_payload):
+
+    # 1. Generate hashes of the payload and metadata files.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ["brillo_update_payload", "hash",
+           "--unsigned_payload", unsigned_payload,
+           "--signature_size", str(self.maximum_signature_size),
+           "--metadata_hash_file", metadata_sig_file,
+           "--payload_hash_file", payload_sig_file]
+    self._Run(cmd)
+
+    # 2. Sign the hashes.
+    signed_payload_sig_file = self.SignHashFile(payload_sig_file)
+    signed_metadata_sig_file = self.SignHashFile(metadata_sig_file)
+
+    # 3. Insert the signatures back into the payload file.
+    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                              suffix=".bin")
+    cmd = ["brillo_update_payload", "sign",
+           "--unsigned_payload", unsigned_payload,
+           "--payload", signed_payload_file,
+           "--signature_size", str(self.maximum_signature_size),
+           "--metadata_signature_file", signed_metadata_sig_file,
+           "--payload_signature_file", signed_payload_sig_file]
+    self._Run(cmd)
+    return signed_payload_file
+
+
+  def SignHashFile(self, in_file):
     """Signs the given input file. Returns the output filename."""
     out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
     cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 161bec3..ad0f7a8 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -1030,7 +1030,7 @@
         0, proc.returncode,
         'Failed to run brillo_update_payload:\n{}'.format(stdoutdata))
 
-    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+    signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
 
     # Finally we can compare the two signatures.
     with open(signed_metadata_sig_file, 'rb') as verify_fp:
@@ -1170,7 +1170,7 @@
   def test_Sign(self):
     payload_signer = PayloadSigner()
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
@@ -1184,7 +1184,7 @@
     payload_signer = PayloadSigner(
         OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer="openssl")
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
@@ -1199,7 +1199,7 @@
     payload_signer = PayloadSigner(
         OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer=external_signer)
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
@@ -1222,7 +1222,7 @@
   @staticmethod
   def _create_payload_full(secondary=False):
     target_file = construct_target_files(secondary)
-    payload = PayloadGenerator(secondary)
+    payload = PayloadGenerator(secondary, OPTIONS.wipe_user_data)
     payload.Generate(target_file)
     return payload
 
@@ -1295,6 +1295,9 @@
     common.OPTIONS.wipe_user_data = True
     payload = self._create_payload_full()
     payload.Sign(PayloadSigner())
+    with tempfile.NamedTemporaryFile() as fp:
+      with zipfile.ZipFile(fp, "w") as zfp:
+        payload.WriteToZip(zfp)
 
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("POWERWASH=1", properties_fp.read())
@@ -1303,6 +1306,9 @@
   def test_Sign_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())
+    with tempfile.NamedTemporaryFile() as fp:
+      with zipfile.ZipFile(fp, "w") as zfp:
+        payload.WriteToZip(zfp)
 
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
@@ -1338,22 +1344,6 @@
         self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
 
   @test_utils.SkipIfExternalToolsUnavailable()
-  def test_WriteToZip_unsignedPayload(self):
-    """Unsigned payloads should not be allowed to be written to zip."""
-    payload = self._create_payload_full()
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
-    # Also test with incremental payload.
-    payload = self._create_payload_incremental()
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
-  @test_utils.SkipIfExternalToolsUnavailable()
   def test_WriteToZip_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())