Merge "Check that sum of DAP groups is smaller than super"
diff --git a/core/Makefile b/core/Makefile
index bab0661..191c0eb 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -3987,7 +3987,8 @@
     $(if $(DEVICE_MANIFEST_SKUS),,EMPTY_VENDOR_SKU_PLACEHOLDER)) \
   $(DEVICE_MANIFEST_SKUS)
 $(check_vintf_compatible_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_compatible_deps)
-	@echo -n -e 'Deps: \n  ' > $@
+	@echo "PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS=$(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS)" > $@
+	@echo -n -e 'Deps: \n  ' >> $@
 	@sed 's/ /\n  /g' <<< "$(PRIVATE_CHECK_VINTF_DEPS)" >> $@
 	@echo -n -e 'Args: \n  ' >> $@
 	@cat <<< "$(PRIVATE_CHECK_VINTF_ARGS)" >> $@
@@ -4043,7 +4044,7 @@
   $(CHECK_PARTITION_SIZES) $(if $(2),--logfile $(2),-v) $(1)
 endef
 
-check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes_log
+check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes.log
 droid_targets: $(check_all_partition_sizes_log)
 $(call dist-for-goals, droid_targets, $(check_all_partition_sizes_log))
 
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index d47930c..829a640 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -113,19 +113,29 @@
 
 my_enforced_uses_libraries :=
 ifdef LOCAL_ENFORCE_USES_LIBRARIES
+  my_verify_script := build/soong/scripts/manifest_check.py
+  my_uses_libs := $(patsubst %,--uses-library %,$(LOCAL_USES_LIBRARIES))
+  my_optional_uses_libs := $(patsubst %,--optional-uses-library %, \
+    $(LOCAL_OPTIONAL_USES_LIBRARIES))
+  my_relax_check := $(if $(filter true,$(RELAX_USES_LIBRARY_CHECK)), \
+    --enforce-uses-libraries-relax,)
   my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
-  $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(LOCAL_USES_LIBRARIES)
-  $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(LOCAL_OPTIONAL_USES_LIBRARIES)
-  $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(RELAX_USES_LIBRARY_CHECK)
-  $(my_enforced_uses_libraries): $(BUILD_SYSTEM)/verify_uses_libraries.sh $(AAPT)
+  $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(my_uses_libs)
+  $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs)
+  $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check)
+  $(my_enforced_uses_libraries): $(AAPT)
+  $(my_enforced_uses_libraries): $(my_verify_script)
   $(my_enforced_uses_libraries): $(my_prebuilt_src_file)
 	@echo Verifying uses-libraries: $<
 	rm -f $@
-	aapt_binary=$(AAPT) \
-	  uses_library_names="$(strip $(PRIVATE_USES_LIBRARIES))" \
-	  optional_uses_library_names="$(strip $(PRIVATE_OPTIONAL_USES_LIBRARIES))" \
-	  relax_check="$(strip $(PRIVATE_RELAX_CHECK))" \
-	  $(BUILD_SYSTEM)/verify_uses_libraries.sh $< $@
+	$(my_verify_script) \
+	  --enforce-uses-libraries \
+	  --enforce-uses-libraries-status $@ \
+	  --aapt $(AAPT) \
+	  $(PRIVATE_USES_LIBRARIES) \
+	  $(PRIVATE_OPTIONAL_USES_LIBRARIES) \
+	  $(PRIVATE_RELAX_CHECK) \
+	  $<
   $(built_module) : $(my_enforced_uses_libraries)
 endif
 
diff --git a/core/board_config.mk b/core/board_config.mk
index 245a639..57363fb 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -223,7 +223,10 @@
 .KATI_READONLY := $(_board_strip_readonly_list)
 
 INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
-INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
+ifneq (,$(BOARD_BOOTCONFIG))
+  INTERNAL_KERNEL_CMDLINE += bootconfig
+  INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
+endif
 
 ifneq ($(filter %64,$(TARGET_ARCH)),)
   TARGET_IS_64_BIT := true
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index f39b84a..228bad6 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -153,12 +153,6 @@
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
-# Disable CFI for arm32 (b/35157333).
-ifneq ($(filter arm,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
-  my_sanitize := $(filter-out cfi,$(my_sanitize))
-  my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
-endif
-
 # Also disable CFI if ASAN is enabled.
 ifneq ($(filter address,$(my_sanitize)),)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 9fdf7b8..dfe4dbf 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -158,6 +158,11 @@
   $(call add_json_bool,$(module),true))
 $(call end_json_map)
 
+$(call add_json_list, VendorSnapshotDirsIncluded,        $(VENDOR_SNAPSHOT_DIRS_INCLUDED))
+$(call add_json_list, VendorSnapshotDirsExcluded,        $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
+$(call add_json_list, RecoverySnapshotDirsIncluded,      $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
+$(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
+
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
 $(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
 
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 40b2ba8..5745451 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -39,3 +39,9 @@
 LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
 endif
 endif
+
+# Implicitly run this test under MTE SYNC for aarch64 binaries. This is a no-op
+# on non-MTE hardware.
+ifneq (,$(filter arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
+  LOCAL_WHOLE_STATIC_LIBRARIES += note_memtag_heap_sync
+endif
diff --git a/core/verify_uses_libraries.sh b/core/verify_uses_libraries.sh
deleted file mode 100755
index 1bd0a2c..0000000
--- a/core/verify_uses_libraries.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# apt_binary is $(AAPT) in the build.
-
-# Parse sdk, targetSdk, and uses librares in the APK, then cross reference against build specified ones.
-
-set -e
-local_apk=$1
-status_file=$2
-badging=$(${aapt_binary} dump badging "${local_apk}")
-export sdk_version=$(echo "${badging}" | grep "sdkVersion" | sed -n "s/sdkVersion:'\(.*\)'/\1/p")
-# Export target_sdk_version to the caller.
-export target_sdk_version=$(echo "${badging}" | grep "targetSdkVersion" | sed -n "s/targetSdkVersion:'\(.*\)'/\1/p")
-uses_libraries=$(echo "${badging}" | grep "uses-library" | sed -n "s/uses-library:'\(.*\)'/\1/p")
-optional_uses_libraries=$(echo "${badging}" | grep "uses-library-not-required" | sed -n "s/uses-library-not-required:'\(.*\)'/\1/p")
-
-errmsg=
-
-# Verify that the uses libraries match exactly.
-# Currently we validate the ordering of the libraries since it matters for resolution.
-single_line_libs=$(echo "${uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
-if [[ "${single_line_libs}" != "${uses_library_names}" ]]; then
-  errmsg="LOCAL_USES_LIBRARIES (${uses_library_names}) do not match (${single_line_libs}) in manifest for ${local_apk}"
-fi
-
-# Verify that the optional uses libraries match exactly.
-single_line_optional_libs=$(echo "${optional_uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
-if [[ "${single_line_optional_libs}" != "${optional_uses_library_names}" ]]; then
-  errmsg="LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
-fi
-
-if [[ ! -z "${errmsg}" ]]; then
-  echo "${errmsg}" > "${status_file}"
-  if [[ "${relax_check}" != true ]]; then
-    # fail immediately
-    echo "${errmsg}"
-    exit 1
-  fi
-else
-  touch "${status_file}"
-fi
diff --git a/envsetup.sh b/envsetup.sh
index 58fcd3b..344a01a 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -23,6 +23,7 @@
 - ggrep:      Greps on all local Gradle files.
 - gogrep:     Greps on all local Go files.
 - jgrep:      Greps on all local Java files.
+- ktgrep:     Greps on all local Kotlin files.
 - resgrep:    Greps on all local res/*.xml files.
 - mangrep:    Greps on all local AndroidManifest.xml files.
 - mgrep:      Greps on all local Makefiles and *.bp files.
@@ -1002,7 +1003,7 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts|proto)' \
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto)' \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -1010,7 +1011,7 @@
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
                 -exec grep --color -n "$@" {} +
         }
         ;;
@@ -1045,6 +1046,12 @@
         -exec grep --color -n "$@" {} +
 }
 
+function ktgrep()
+{
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.kt" \
+        -exec grep --color -n "$@" {} +
+}
+
 function cgrep()
 {
     find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
@@ -1093,7 +1100,7 @@
 
         function treegrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' \
+            find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' \
                 -exec grep --color -n -i "$@" {} +
         }
 
@@ -1107,7 +1114,7 @@
 
         function treegrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' -type f \
+            find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' -type f \
                 -exec grep --color -n -i "$@" {} +
         }
 
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b588c78..7092031 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -140,3 +140,10 @@
 PRODUCT_SYSTEM_PROPERTIES += \
     ro.iorapd.enable?=true
 
+# Enable Madvising of the whole art, odex and vdex files to MADV_WILLNEED.
+# The size specified here is the size limit of how much of the file
+# (in bytes) is madvised. Setting it to UINT_MAX.
+PRODUCT_SYSTEM_PROPERTIES += \
+    dalvik.vm.madvise.vdexfile.size=4294967295 \
+    dalvik.vm.madvise.odexfile.size=4294967295 \
+    dalvik.vm.madvise.artfile.size=4294967295
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 83f0a4b..cedad5b 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -65,7 +65,7 @@
 include $(BUILD_SYSTEM)/base_rules.mk
 $(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
 $(LOCAL_BUILT_MODULE): $(SOONG_ZIP) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
-	$(SOONG_ZIP) -o $@ -j -f $(PRIVATE_CERT)
+	$(SOONG_ZIP) -o $@ -j -symlinks=false -f $(PRIVATE_CERT)
 
 
 #######################################
@@ -88,5 +88,5 @@
 	    $(SOONG_ZIP) \
 	    $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem \
 	    $(extra_recovery_keys)
-	$(SOONG_ZIP) -o $@ -j \
+	$(SOONG_ZIP) -o $@ -j -symlinks=false \
 	    $(foreach key_file, $(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS), -f $(key_file))
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 6d88249..3b0c070 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -523,6 +523,23 @@
     ],
 }
 
+python_binary_host {
+    name: "verity_utils",
+    defaults: ["releasetools_binary_defaults"],
+    srcs: [
+        "verity_utils.py",
+    ],
+    libs: [
+        "releasetools_common",
+    ],
+    required: [
+        "append2simg",
+        "build_verity_metadata",
+        "build_verity_tree",
+        "fec",
+    ],
+}
+
 //
 // Tests.
 //
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 3dcabd5..3d0fc67 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -211,6 +211,10 @@
       Use the specified custom_image to update custom_partition when generating
       an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
       cus=cus_test.img"
+
+  --disable_vabc
+      Disable Virtual A/B Compression, for builds that have compression enabled
+      by default.
 """
 
 from __future__ import print_function
@@ -293,8 +297,6 @@
     'vendor_boot']
 
 
-
-
 class PayloadSigner(object):
   """A class that wraps the payload signing works.
 
@@ -761,10 +763,12 @@
   common.ZipDelete(target_file, POSTINSTALL_CONFIG)
   return target_file
 
+
 def ParseInfoDict(target_file_path):
   with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
     return common.LoadInfoDict(zfp)
 
+
 def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
   """Returns a target-files.zip for partial ota update package generation.
 
@@ -885,7 +889,7 @@
   with open(new_ab_partitions, 'w') as f:
     for partition in ab_partitions:
       if (partition in dynamic_partition_list and
-          partition not in super_block_devices):
+              partition not in super_block_devices):
         logger.info("Dropping %s from ab_partitions.txt", partition)
         continue
       f.write(partition + "\n")
@@ -959,32 +963,37 @@
 
   return target_file
 
+
 def GeneratePartitionTimestampFlags(partition_state):
   partition_timestamps = [
       part.partition_name + ":" + part.version
       for part in partition_state]
   return ["--partition_timestamps", ",".join(partition_timestamps)]
 
+
 def GeneratePartitionTimestampFlagsDowngrade(
-    pre_partition_state, post_partition_state):
+        pre_partition_state, post_partition_state):
   assert pre_partition_state is not None
   partition_timestamps = {}
   for part in pre_partition_state:
     partition_timestamps[part.partition_name] = part.version
   for part in post_partition_state:
     partition_timestamps[part.partition_name] = \
-      max(part.version, partition_timestamps[part.partition_name])
+        max(part.version, partition_timestamps[part.partition_name])
   return [
       "--partition_timestamps",
-      ",".join([key + ":" + val for (key, val) in partition_timestamps.items()])
+      ",".join([key + ":" + val for (key, val)
+                in partition_timestamps.items()])
   ]
 
+
 def IsSparseImage(filepath):
   with open(filepath, 'rb') as fp:
     # Magic for android sparse image format
     # https://source.android.com/devices/bootloader/images
     return fp.read(4) == b'\x3A\xFF\x26\xED'
 
+
 def SupportsMainlineGkiUpdates(target_file):
   """Return True if the build supports MainlineGKIUpdates.
 
@@ -1023,6 +1032,7 @@
   pattern = re.compile(r"com\.android\.gki\..*\.apex")
   return pattern.search(output) is not None
 
+
 def GenerateAbOtaPackage(target_file, output_file, source_file=None):
   """Generates an Android OTA package that has A/B update payload."""
   # Stage the output zip package for package signing.
@@ -1043,7 +1053,7 @@
     source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
     vendor_prop = source_info.info_dict.get("vendor.build.prop")
     if vendor_prop and \
-        vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true":
+            vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true":
       # TODO(zhangkelvin) Remove this once FEC on VABC is supported
       logger.info("Virtual AB Compression enabled, disabling FEC")
       OPTIONS.disable_fec_computation = True
@@ -1099,7 +1109,8 @@
   additional_args += ["--max_timestamp", max_timestamp]
 
   if SupportsMainlineGkiUpdates(source_file):
-    logger.warning("Detected build with mainline GKI, include full boot image.")
+    logger.warning(
+        "Detected build with mainline GKI, include full boot image.")
     additional_args.extend(["--full_boot", "true"])
 
   payload.Generate(
@@ -1133,7 +1144,7 @@
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
   if (target_info.get("verity") == "true" or
-      target_info.get("avb_enable") == "true"):
+          target_info.get("avb_enable") == "true"):
     care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
                      "META/" + x in target_zip.namelist()]
 
@@ -1153,7 +1164,7 @@
     apex_info_entry = target_zip.getinfo("META/apex_info.pb")
     with target_zip.open(apex_info_entry, "r") as zfp:
       common.ZipWriteStr(output_zip, "apex_info.pb", zfp.read(),
-                        compress_type=zipfile.ZIP_STORED)
+                         compress_type=zipfile.ZIP_STORED)
   except KeyError:
     logger.warning("target_file doesn't contain apex_info.pb %s", target_file)
 
@@ -1267,6 +1278,7 @@
       OPTIONS.disable_vabc = True
     elif o == "--spl_downgrade":
       OPTIONS.spl_downgrade = True
+      OPTIONS.wipe_user_data = True
     else:
       return False
     return True
@@ -1337,7 +1349,6 @@
     if OPTIONS.incremental_source is None:
       raise ValueError("Cannot generate downgradable full OTAs")
 
-
   # TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the
   # target-file and reload the info_dict. So the info will be consistent with
   # the modified target-file.
@@ -1345,7 +1356,6 @@
   logger.info("--- target info ---")
   common.DumpInfoDict(OPTIONS.info_dict)
 
-
   # Load the source build dict if applicable.
   if OPTIONS.incremental_source is not None:
     OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1356,15 +1366,15 @@
 
   if OPTIONS.partial:
     OPTIONS.info_dict['ab_partitions'] = \
-      list(
-          set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
-      )
+        list(
+        set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
+    )
     if OPTIONS.source_info_dict:
       OPTIONS.source_info_dict['ab_partitions'] = \
-        list(
-            set(OPTIONS.source_info_dict['ab_partitions']) &
-            set(OPTIONS.partial)
-        )
+          list(
+          set(OPTIONS.source_info_dict['ab_partitions']) &
+          set(OPTIONS.partial)
+      )
 
   # Load OEM dicts if provided.
   OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1373,7 +1383,7 @@
   # use_dynamic_partitions but target build does.
   if (OPTIONS.source_info_dict and
       OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
-      OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+          OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
     if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
       raise common.ExternalError(
           "Expect to generate incremental OTA for retrofitting dynamic "
@@ -1390,7 +1400,7 @@
   allow_non_ab = OPTIONS.info_dict.get("allow_non_ab") == "true"
   if OPTIONS.force_non_ab:
     assert allow_non_ab,\
-      "--force_non_ab only allowed on devices that supports non-A/B"
+        "--force_non_ab only allowed on devices that supports non-A/B"
     assert ab_update, "--force_non_ab only allowed on A/B devices"
 
   generate_ab = not OPTIONS.force_non_ab and ab_update
@@ -1408,10 +1418,10 @@
     private_key_path = OPTIONS.package_key + OPTIONS.private_key_suffix
     if not os.path.exists(private_key_path):
       raise common.ExternalError(
-                        "Private key {} doesn't exist. Make sure you passed the"
-                        " correct key path through -k option".format(
-                          private_key_path)
-                          )
+          "Private key {} doesn't exist. Make sure you passed the"
+          " correct key path through -k option".format(
+              private_key_path)
+      )
 
   if OPTIONS.source_info_dict:
     source_build_prop = OPTIONS.source_info_dict["build.prop"]
@@ -1419,14 +1429,14 @@
     source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     is_spl_downgrade = target_spl < source_spl
-    if is_spl_downgrade and not OPTIONS.spl_downgrade:
+    if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
       raise common.ExternalError(
-        "Target security patch level {} is older than source SPL {} applying "
-        "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
-        "to override this check. This script expects security patch level to "
-        "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
-        "separators other than -, so as long as it's used consistenly across "
-        "all SPL dates".format(target_spl, source_spl))
+          "Target security patch level {} is older than source SPL {} applying "
+          "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
+          "to override this check. This script expects security patch level to "
+          "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
+          "separators other than -, so as long as it's used consistenly across "
+          "all SPL dates".format(target_spl, source_spl))
     elif not is_spl_downgrade and OPTIONS.spl_downgrade:
       raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
                        " detected. Please only pass in this flag if you want a"
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index fc83689..8faa2d1 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -14,6 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"""
+Signs a given image using avbtool
+
+Usage:  verity_utils properties_file output_image
+"""
+
 from __future__ import print_function
 
 import logging
@@ -31,6 +37,9 @@
 BLOCK_SIZE = common.BLOCK_SIZE
 FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
 
+# From external/avb/avbtool.py
+MAX_VBMETA_SIZE = 64 * 1024
+MAX_FOOTER_SIZE = 4096
 
 class BuildVerityImageError(Exception):
   """An Exception raised during verity image building."""
@@ -714,3 +723,55 @@
         signing_args)
 
   return builder
+
+
+def GetDiskUsage(path):
+  """Returns the number of bytes that "path" occupies on host.
+
+  Args:
+    path: The directory or file to calculate size on.
+
+  Returns:
+    The number of bytes based on a 1K block_size.
+  """
+  cmd = ["du", "-b", "-k", "-s", path]
+  output = common.RunAndCheckOutput(cmd, verbose=False)
+  return int(output.split()[0]) * 1024
+
+
+def main(argv):
+  if len(argv) != 2:
+    print(__doc__)
+    sys.exit(1)
+
+  common.InitLogging()
+
+  dict_file = argv[0]
+  out_file = argv[1]
+
+  prop_dict = {}
+  with open(dict_file, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if not line or line.startswith("#"):
+        continue
+      k, v = line.split("=", 1)
+      prop_dict[k] = v
+
+  builder = CreateVerityImageBuilder(prop_dict)
+
+  if "partition_size" not in prop_dict:
+    image_size = GetDiskUsage(out_file)
+    # make sure that the image is big enough to hold vbmeta and footer
+    image_size = image_size + (MAX_VBMETA_SIZE + MAX_FOOTER_SIZE)
+    size = builder.CalculateDynamicPartitionSize(image_size)
+    prop_dict["partition_size"] = size
+
+  builder.Build(out_file)
+
+
+if __name__ == '__main__':
+  try:
+    main(sys.argv[1:])
+  finally:
+    common.Cleanup()