Merge "Add +/- buttons to expand/collapse warning categories."
diff --git a/core/Makefile b/core/Makefile
index 064af66..02bd4f7 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -1124,7 +1124,7 @@
 BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
 
 # Create symlink /system/vendor to /vendor if necessary.
-ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+ifdef BOARD_USES_VENDORIMAGE
 define create-system-vendor-symlink
 $(hide) if [ -d $(TARGET_OUT)/vendor ] && [ ! -h $(TARGET_OUT)/vendor ]; then \
   echo 'Non-symlink $(TARGET_OUT)/vendor detected!' 1>&2; \
@@ -1519,7 +1519,10 @@
 vendorimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-vendorimage-target)
 
-endif # BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+else ifdef BOARD_PREBUILT_VENDORIMAGE
+INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
+endif
 
 # -----------------------------------------------------------------
 # bring in the installer image generation defines if necessary
@@ -1902,6 +1905,10 @@
 	@# If breakpad symbols have been generated, add them to the zip.
 	$(hide) $(ACP) -r $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
 endif
+ifdef BOARD_PREBUILT_VENDORIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
 	$(hide) (cd $(zip_root) && \
@@ -1920,7 +1927,7 @@
 endif
 	$(hide) (cd $(zip_root) && zip -qX ../$(notdir $@) META/*filesystem_config.txt)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	    ./build/tools/releasetools/add_img_to_target_files -v -p $(HOST_OUT) $@
+	    ./build/tools/releasetools/add_img_to_target_files -a -v -p $(HOST_OUT) $@
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
@@ -2260,6 +2267,8 @@
 -include $(sort $(wildcard vendor/*/*/build/tasks/*.mk))
 -include $(sort $(wildcard device/*/*/build/tasks/*.mk))
 -include $(sort $(wildcard product/*/*/build/tasks/*.mk))
+# Also add test specifc tasks
+include $(sort $(wildcard platform_testing/build/tasks/*.mk))
 endif
 
 include $(BUILD_SYSTEM)/product-graph.mk
diff --git a/core/definitions.mk b/core/definitions.mk
index de1948f..5194918 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1084,6 +1084,19 @@
     $< $(PRIVATE_HEADER_OUTPUT_DIR) $@
 endef
 
+## Given a .aidl file path, generate the rule to compile it a .java file
+# $(1): a .aidl source file
+# $(2): a directory to place the generated .java files in
+# $(3): name of a variable to add the path to the generated source file to
+#
+# You must call this with $(eval).
+define define-aidl-java-rule
+define-aidl-java-rule-src := $(patsubst %.aidl,%.java,$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define-aidl-java-rule-src) : $(LOCAL_PATH)/$(1) $(AIDL)
+	$$(transform-aidl-to-java)
+$(3) += $$(define-aidl-java-rule-src)
+endef
+
 ## Given a .aidl file path generate the rule to compile it a .cpp file.
 # $(1): a .aidl source file
 # $(2): a directory to place the generated .cpp files in
@@ -2232,7 +2245,7 @@
 $1/classes.dex.toc: PRIVATE_INPUT_DEX_FILES := $1/classes*.dex
 $1/classes.dex.toc: $1/classes.dex $(DEXDUMP)
 	@echo Generating TOC: $$@
-	$(hide) $(DEXDUMP) -l xml $$(PRIVATE_INPUT_DEX_FILES) > $$@.tmp
+	$(hide) ANDROID_LOG_TAGS="*:e" $(DEXDUMP) -l xml $$(PRIVATE_INPUT_DEX_FILES) > $$@.tmp
 	$$(call commit-change-for-toc,$$@)
 endef
 
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 69102d1..a4a5bd3 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -49,7 +49,8 @@
 	@rm -f $(dir $@)/*.art $(dir $@)/*.oat
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
 	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
-	$(hide) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) --runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
+	$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
+		--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
 		--image-classes=$(PRELOADED_CLASSES) \
 		$(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
 		$(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 0a7472a..49ca154 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -173,6 +173,19 @@
 $(error TARGET_COPY_OUT_VENDOR must be either 'vendor' or 'system/vendor', seeing '$(TARGET_COPY_OUT_VENDOR)'.)
 endif
 PRODUCT_COPY_FILES := $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_VENDORIMAGE :=
+ifdef BOARD_PREBUILT_VENDORIMAGE
+BOARD_USES_VENDORIMAGE := true
+endif
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+BOARD_USES_VENDORIMAGE := true
+endif
+ifeq ($(TARGET_COPY_OUT_VENDOR),vendor)
+BOARD_USES_VENDORIMAGE := true
+else ifdef BOARD_USES_VENDORIMAGE
+$(error TARGET_COPY_OUT_VENDOR must be set to 'vendor' to use a vendor image)
+endif
 ###########################################
 
 
diff --git a/core/host_native_test.mk b/core/host_native_test.mk
index c6d6f52..2a6097d 100644
--- a/core/host_native_test.mk
+++ b/core/host_native_test.mk
@@ -19,8 +19,4 @@
 endif
 endif
 
-ifndef LOCAL_MODULE_RELATIVE_PATH
-LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
-endif
-
 include $(BUILD_HOST_EXECUTABLE)
diff --git a/core/host_shared_test_lib.mk b/core/host_shared_test_lib.mk
index 2c2063d..ed7e23a 100644
--- a/core/host_shared_test_lib.mk
+++ b/core/host_shared_test_lib.mk
@@ -1,10 +1 @@
-##################################################
-## A thin wrapper around BUILD_HOST_SHARED_LIBRARY
-## Common flags for host native tests are added.
-##################################################
-
 $(error BUILD_HOST_SHARED_TEST_LIBRARY is obsolete)
-
-include $(BUILD_SYSTEM)/host_test_internal.mk
-
-include $(BUILD_HOST_SHARED_LIBRARY)
diff --git a/core/host_test_internal.mk b/core/host_test_internal.mk
index 70f011b..473815b 100644
--- a/core/host_test_internal.mk
+++ b/core/host_test_internal.mk
@@ -23,3 +23,7 @@
 ifdef LOCAL_MODULE_PATH_64
 $(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
 endif
+
+ifndef LOCAL_MODULE_RELATIVE_PATH
+LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
+endif
diff --git a/core/java.mk b/core/java.mk
index 912ce0a..159de5a 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -277,10 +277,9 @@
 ## AIDL: Compile .aidl files to .java
 ###########################################################
 aidl_sources := $(filter %.aidl,$(LOCAL_SRC_FILES))
+aidl_java_sources :=
 
 ifneq ($(strip $(aidl_sources)),)
-aidl_java_sources := $(patsubst %.aidl,%.java,$(addprefix $(intermediates.COMMON)/src/, $(aidl_sources)))
-aidl_sources := $(addprefix $(LOCAL_PATH)/, $(aidl_sources))
 
 aidl_preprocess_import :=
 ifdef LOCAL_SDK_VERSION
@@ -294,19 +293,17 @@
 # build against the platform.
 LOCAL_AIDL_INCLUDES += $(FRAMEWORKS_BASE_JAVA_SRC_DIRS)
 endif # LOCAL_SDK_VERSION
-$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
 
-$(aidl_java_sources): $(intermediates.COMMON)/src/%.java: \
-        $(LOCAL_PATH)/%.aidl \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
-        $(AIDL) \
-        $(aidl_preprocess_import)
-	$(transform-aidl-to-java)
+$(foreach s,$(aidl_sources),\
+    $(eval $(call define-aidl-java-rule,$(s),$(intermediates.COMMON),aidl_java_sources)))
 $(foreach java,$(aidl_java_sources), \
     $(call include-depfile,$(java:%.java=%.P),$(java)))
 
-else
-aidl_java_sources :=
+$(aidl_java_sources) : $(LOCAL_ADDITIONAL_DEPENDENCIES) $(aidl_preprocess_import)
+
+$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
+$(aidl_java_sources : PRIVATE_MODULE := $(LOCAL_MODULE)
+
 endif
 
 ##########################################
diff --git a/core/native_test.mk b/core/native_test.mk
index d4b2a5b..bb93eb0 100644
--- a/core/native_test.mk
+++ b/core/native_test.mk
@@ -3,10 +3,13 @@
 ## Common flags for native tests are added.
 ###########################################
 
-# TODO: enforce NATIVE_TESTS once current users are gone
-ifndef LOCAL_MODULE_CLASS
-LOCAL_MODULE_CLASS := NATIVE_TESTS
+ifdef LOCAL_MODULE_CLASS
+ifneq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
+$(error $(LOCAL_PATH): LOCAL_MODULE_CLASS must be NATIVE_TESTS with BUILD_HOST_NATIVE_TEST)
 endif
+endif
+
+LOCAL_MODULE_CLASS := NATIVE_TESTS
 
 include $(BUILD_SYSTEM)/target_test_internal.mk
 
@@ -16,10 +19,4 @@
 endif
 endif
 
-ifneq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
-$(warning $(LOCAL_PATH): $(LOCAL_MODULE): LOCAL_MODULE_CLASS should be NATIVE_TESTS with BUILD_NATIVE_TEST)
-LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_NATIVE_TESTS)/$(LOCAL_MODULE)
-LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS)/$(LOCAL_MODULE)
-endif
-
 include $(BUILD_EXECUTABLE)
diff --git a/core/shared_test_lib.mk b/core/shared_test_lib.mk
index f3b8807..1ea9fe7 100644
--- a/core/shared_test_lib.mk
+++ b/core/shared_test_lib.mk
@@ -1,10 +1 @@
-#############################################
-## A thin wrapper around BUILD_SHARED_LIBRARY
-## Common flags for native tests are added.
-#############################################
-
 $(error BUILD_SHARED_TEST_LIBRARY is obsolete)
-
-include $(BUILD_SYSTEM)/target_test_internal.mk
-
-include $(BUILD_SHARED_LIBRARY)
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index d321aaa..1a22510 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -33,8 +33,6 @@
 $(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
 endif
 
-ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
 ifndef LOCAL_MODULE_RELATIVE_PATH
 LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
 endif
-endif
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/ApkVerifier.java b/tools/apksigner/core/src/com/android/apksigner/core/ApkVerifier.java
index d509a48..f12b47f 100644
--- a/tools/apksigner/core/src/com/android/apksigner/core/ApkVerifier.java
+++ b/tools/apksigner/core/src/com/android/apksigner/core/ApkVerifier.java
@@ -23,9 +23,13 @@
 import com.android.apksigner.core.internal.apk.v2.V2SchemeVerifier;
 import com.android.apksigner.core.internal.util.AndroidSdkVersion;
 import com.android.apksigner.core.util.DataSource;
+import com.android.apksigner.core.util.DataSources;
 import com.android.apksigner.core.zip.ZipFormatException;
 
+import java.io.Closeable;
+import java.io.File;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.security.NoSuchAlgorithmException;
 import java.security.cert.CertificateEncodingException;
 import java.security.cert.X509Certificate;
@@ -42,6 +46,8 @@
  *
  * <p>The verifier is designed to closely mimic the behavior of Android platforms. This is to enable
  * the verifier to be used for checking whether an APK's signatures will verify on Android.
+ *
+ * <p>Use {@link Builder} to obtain instances of this verifier.
  */
 public class ApkVerifier {
 
@@ -49,6 +55,57 @@
     private static final Map<Integer, String> SUPPORTED_APK_SIG_SCHEME_NAMES =
             Collections.singletonMap(APK_SIGNATURE_SCHEME_V2_ID, "APK Signature Scheme v2");
 
+    private final File mApkFile;
+    private final DataSource mApkDataSource;
+
+    private final int mMinSdkVersion;
+    private final int mMaxSdkVersion;
+
+    private ApkVerifier(
+            File apkFile,
+            DataSource apkDataSource,
+            int minSdkVersion,
+            int maxSdkVersion) {
+        mApkFile = apkFile;
+        mApkDataSource = apkDataSource;
+        mMinSdkVersion = minSdkVersion;
+        mMaxSdkVersion = maxSdkVersion;
+    }
+
+    /**
+     * Verifies the APK's signatures and returns the result of verification. The APK can be
+     * considered verified iff the result's {@link Result#isVerified()} returns {@code true}.
+     * The verification result also includes errors, warnings, and information about signers.
+     *
+     * @throws IOException if an I/O error is encountered while reading the APK
+     * @throws ZipFormatException if the APK is malformed at ZIP format level
+     * @throws NoSuchAlgorithmException if the APK's signatures cannot be verified because a
+     *         required cryptographic algorithm implementation is missing
+     * @throws IllegalStateException if this verifier's configuration is missing required
+     *         information.
+     */
+    public Result verify() throws IOException, ZipFormatException, NoSuchAlgorithmException,
+            IllegalStateException {
+        Closeable in = null;
+        try {
+            DataSource apk;
+            if (mApkDataSource != null) {
+                apk = mApkDataSource;
+            } else if (mApkFile != null) {
+                RandomAccessFile f = new RandomAccessFile(mApkFile, "r");
+                in = f;
+                apk = DataSources.asDataSource(f, 0, f.length());
+            } else {
+                throw new IllegalStateException("APK not provided");
+            }
+            return verify(apk, mMinSdkVersion, mMaxSdkVersion);
+        } finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+    }
+
     /**
      * Verifies the APK's signatures and returns the result of verification. The APK can be
      * considered verified iff the result's {@link Result#isVerified()} returns {@code true}.
@@ -65,7 +122,7 @@
      * @throws NoSuchAlgorithmException if the APK's signatures cannot be verified because a
      *         required cryptographic algorithm implementation is missing
      */
-    public Result verify(DataSource apk, int minSdkVersion, int maxSdkVersion)
+    private static Result verify(DataSource apk, int minSdkVersion, int maxSdkVersion)
             throws IOException, ZipFormatException, NoSuchAlgorithmException {
         if (minSdkVersion < 0) {
             throw new IllegalArgumentException(
@@ -1050,17 +1107,16 @@
      */
     private static class ByteArray {
         private final byte[] mArray;
+        private final int mHashCode;
 
         private ByteArray(byte[] arr) {
             mArray = arr;
+            mHashCode = Arrays.hashCode(mArray);
         }
 
         @Override
         public int hashCode() {
-            final int prime = 31;
-            int result = 1;
-            result = prime * result + Arrays.hashCode(mArray);
-            return result;
+            return mHashCode;
         }
 
         @Override
@@ -1075,10 +1131,103 @@
                 return false;
             }
             ByteArray other = (ByteArray) obj;
+            if (hashCode() != other.hashCode()) {
+                return false;
+            }
             if (!Arrays.equals(mArray, other.mArray)) {
                 return false;
             }
             return true;
         }
     }
+
+    /**
+     * Builder of {@link ApkVerifier} instances.
+     *
+     * <p>Although not required, it is best to provide the SDK version (API Level) of the oldest
+     * Android platform on which the APK is supposed to be installed -- see
+     * {@link #setMinCheckedPlatformVersion(int)}. Without this information, APKs which use security
+     * features not supported on ancient Android platforms (e.g., SHA-256 digests or ECDSA
+     * signatures) will not verify.
+     */
+    public static class Builder {
+        private final File mApkFile;
+        private final DataSource mApkDataSource;
+
+        private int mMinSdkVersion = 1;
+        private int mMaxSdkVersion = Integer.MAX_VALUE;
+
+        /**
+         * Constructs a new {@code Builder} for verifying the provided APK file.
+         */
+        public Builder(File apk) {
+            if (apk == null) {
+                throw new NullPointerException("apk == null");
+            }
+            mApkFile = apk;
+            mApkDataSource = null;
+        }
+
+        /**
+         * Constructs a new {@code Builder} for verifying the provided APK.
+         */
+        public Builder(DataSource apk) {
+            if (apk == null) {
+                throw new NullPointerException("apk == null");
+            }
+            mApkDataSource = apk;
+            mApkFile = null;
+        }
+
+        /**
+         * Sets the oldest Android platform version for which the APK is verified. APK verification
+         * will confirm that the APK is expected to install successfully on all known Android
+         * platforms starting from the platform version with the provided API Level.
+         *
+         * <p>By default, the APK is checked for all platform versions. Thus, APKs which use
+         * security features not supported on ancient Android platforms (e.g., SHA-256 digests or
+         * ECDSA signatures) will not verify by default.
+         *
+         * @param minSdkVersion API Level of the oldest platform for which to verify the APK
+         *
+         * @see #setCheckedPlatformVersions(int, int)
+         */
+        public Builder setMinCheckedPlatformVersion(int minSdkVersion) {
+            mMinSdkVersion = minSdkVersion;
+            mMaxSdkVersion = Integer.MAX_VALUE;
+            return this;
+        }
+
+        /**
+         * Sets the range of Android platform versions for which the APK is verified. APK
+         * verification will confirm that the APK is expected to install successfully on Android
+         * platforms whose API Levels fall into this inclusive range.
+         *
+         * <p>By default, the APK is checked for all platform versions. Thus, APKs which use
+         * security features not supported on ancient Android platforms (e.g., SHA-256 digests or
+         * ECDSA signatures) will not verify by default.
+         *
+         * @param minSdkVersion API Level of the oldest platform for which to verify the APK
+         * @param maxSdkVersion API Level of the newest platform for which to verify the APK
+         *
+         * @see #setMinCheckedPlatformVersion(int)
+         */
+        public Builder setCheckedPlatformVersions(int minSdkVersion, int maxSdkVersion) {
+            mMinSdkVersion = minSdkVersion;
+            mMaxSdkVersion = maxSdkVersion;
+            return this;
+        }
+
+        /**
+         * Returns an {@link ApkVerifier} initialized according to the configuration of this
+         * builder.
+         */
+        public ApkVerifier build() {
+            return new ApkVerifier(
+                    mApkFile,
+                    mApkDataSource,
+                    mMinSdkVersion,
+                    mMaxSdkVersion);
+        }
+    }
 }
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/util/RandomAccessFileDataSource.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/RandomAccessFileDataSource.java
new file mode 100644
index 0000000..208033d
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/RandomAccessFileDataSource.java
@@ -0,0 +1,165 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.util;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+
+import com.android.apksigner.core.util.DataSink;
+import com.android.apksigner.core.util.DataSource;
+
+/**
+ * {@link DataSource} backed by a {@link RandomAccessFile}.
+ */
+public class RandomAccessFileDataSource implements DataSource {
+
+    private static final int MAX_READ_CHUNK_SIZE = 65536;
+
+    private final RandomAccessFile mFile;
+    private final long mOffset;
+    private final long mSize;
+
+    /**
+     * Constructs a new {@code RandomAccessFileDataSource} based on the data contained in the
+     * specified the whole file. Changes to the contents of the file, including the size of the
+     * file, will be visible in this data source.
+     */
+    public RandomAccessFileDataSource(RandomAccessFile file) {
+        mFile = file;
+        mOffset = 0;
+        mSize = -1;
+    }
+
+    /**
+     * Constructs a new {@code RandomAccessFileDataSource} based on the data contained in the
+     * specified region of the provided file. Changes to the contents of the file will be visible in
+     * this data source.
+     */
+    public RandomAccessFileDataSource(RandomAccessFile file, long offset, long size) {
+        if (offset < 0) {
+            throw new IllegalArgumentException("offset: " + size);
+        }
+        if (size < 0) {
+            throw new IllegalArgumentException("size: " + size);
+        }
+        mFile = file;
+        mOffset = offset;
+        mSize = size;
+    }
+
+    @Override
+    public long size() {
+        if (mSize == -1) {
+            try {
+                return mFile.length();
+            } catch (IOException e) {
+                return 0;
+            }
+        } else {
+            return mSize;
+        }
+    }
+
+    @Override
+    public RandomAccessFileDataSource slice(long offset, long size) {
+        long sourceSize = size();
+        checkChunkValid(offset, size, sourceSize);
+        if ((offset == 0) && (size == sourceSize)) {
+            return this;
+        }
+
+        return new RandomAccessFileDataSource(mFile, mOffset + offset, size);
+    }
+
+    @Override
+    public void feed(long offset, long size, DataSink sink) throws IOException {
+        long sourceSize = size();
+        checkChunkValid(offset, size, sourceSize);
+        if (size == 0) {
+            return;
+        }
+
+        long chunkOffsetInFile = mOffset + offset;
+        long remaining = size;
+        byte[] buf = new byte[(int) Math.min(remaining, MAX_READ_CHUNK_SIZE)];
+        while (remaining > 0) {
+            int chunkSize = (int) Math.min(remaining, buf.length);
+            synchronized (mFile) {
+                mFile.seek(chunkOffsetInFile);
+                mFile.readFully(buf, 0, chunkSize);
+            }
+            sink.consume(buf, 0, chunkSize);
+            chunkOffsetInFile += chunkSize;
+            remaining -= chunkSize;
+        }
+    }
+
+    @Override
+    public void copyTo(long offset, int size, ByteBuffer dest) throws IOException {
+        long sourceSize = size();
+        checkChunkValid(offset, size, sourceSize);
+        if (size == 0) {
+            return;
+        }
+
+        long offsetInFile = mOffset + offset;
+        int remaining = size;
+        FileChannel fileChannel = mFile.getChannel();
+        while (remaining > 0) {
+            int chunkSize;
+            synchronized (mFile) {
+                fileChannel.position(offsetInFile);
+                chunkSize = fileChannel.read(dest);
+            }
+            offsetInFile += chunkSize;
+            remaining -= chunkSize;
+        }
+    }
+
+    @Override
+    public ByteBuffer getByteBuffer(long offset, int size) throws IOException {
+        ByteBuffer result = ByteBuffer.allocate(size);
+        copyTo(offset, size, result);
+        result.flip();
+        return result;
+    }
+
+    private static void checkChunkValid(long offset, long size, long sourceSize) {
+        if (offset < 0) {
+            throw new IllegalArgumentException("offset: " + offset);
+        }
+        if (size < 0) {
+            throw new IllegalArgumentException("size: " + size);
+        }
+        if (offset > sourceSize) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") > source size (" + sourceSize + ")");
+        }
+        long endOffset = offset + size;
+        if (endOffset < offset) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") + size (" + size + ") overflow");
+        }
+        if (endOffset > sourceSize) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") + size (" + size
+                            + ") > source size (" + sourceSize  +")");
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java
index 6ce0ac8..1cbb0af 100644
--- a/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java
+++ b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java
@@ -1,7 +1,9 @@
 package com.android.apksigner.core.util;
 
 import com.android.apksigner.core.internal.util.ByteBufferDataSource;
+import com.android.apksigner.core.internal.util.RandomAccessFileDataSource;
 
+import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
 
 /**
@@ -21,4 +23,26 @@
         }
         return new ByteBufferDataSource(buffer);
     }
+
+    /**
+     * Returns a {@link DataSource} backed by the provided {@link RandomAccessFile}. Changes to the
+     * file, including changes to size of file, will be visible in the data source.
+     */
+    public static DataSource asDataSource(RandomAccessFile file) {
+        if (file == null) {
+            throw new NullPointerException();
+        }
+        return new RandomAccessFileDataSource(file);
+    }
+
+    /**
+     * Returns a {@link DataSource} backed by the provided region of the {@link RandomAccessFile}.
+     * Changes to the file will be visible in the data source.
+     */
+    public static DataSource asDataSource(RandomAccessFile file, long offset, long size) {
+        if (file == null) {
+            throw new NullPointerException();
+        }
+        return new RandomAccessFileDataSource(file, offset, size);
+    }
 }
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index a13bed8..6966964 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -64,11 +64,6 @@
       Generate an OTA package that will wipe the user data partition
       when installed.
 
-  -n  (--no_prereq)
-      Omit the timestamp prereq check normally included at the top of
-      the build scripts (used for developer OTA packages which
-      legitimately need to go back and forth).
-
   --downgrade
       Intentionally generate an incremental OTA that updates from a newer
       build to an older one (based on timestamp comparison). "post-timestamp"
@@ -113,6 +108,17 @@
       Generate a log file that shows the differences in the source and target
       builds for an incremental package. This option is only meaningful when
       -i is specified.
+
+  --payload_signer <signer>
+      Specify the signer when signing the payload and metadata for A/B OTAs.
+      By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
+      with the package private key. If the private key cannot be accessed
+      directly, a payload signer that knows how to do that should be specified.
+      The signer will be supplied with "-inkey <path_to_key>",
+      "-in <input_file>" and "-out <output_file>" parameters.
+
+  --payload_signer_args <args>
+      Specify the arguments needed for payload signer.
 """
 
 import sys
@@ -124,6 +130,7 @@
 import multiprocessing
 import os
 import subprocess
+import shlex
 import tempfile
 import zipfile
 
@@ -139,7 +146,6 @@
 OPTIONS.prohibit_verbatim = set(("system/build.prop",))
 OPTIONS.patch_threshold = 0.95
 OPTIONS.wipe_user_data = False
-OPTIONS.omit_prereq = False
 OPTIONS.downgrade = False
 OPTIONS.extra_script = None
 OPTIONS.aslr_mode = True
@@ -160,6 +166,8 @@
 OPTIONS.stash_threshold = 0.8
 OPTIONS.gen_verify = False
 OPTIONS.log_diff = None
+OPTIONS.payload_signer = None
+OPTIONS.payload_signer_args = []
 
 def MostPopularKey(d, default):
   """Given a dict, return the key corresponding to the largest
@@ -561,10 +569,9 @@
 
   metadata["ota-type"] = "BLOCK" if block_based else "FILE"
 
-  if not OPTIONS.omit_prereq:
-    ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
-    ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
-    script.AssertOlderBuild(ts, ts_text)
+  ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
+  ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
+  script.AssertOlderBuild(ts, ts_text)
 
   AppendAssertions(script, OPTIONS.info_dict, oem_dict)
   device_specific.FullOTA_Assertions()
@@ -1162,17 +1169,19 @@
         "default_system_dev_certificate",
         "build/target/product/security/testkey")
 
-  # A/B updater expects key in RSA format.
-  cmd = ["openssl", "pkcs8",
-         "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
-         "-inform", "DER", "-nocrypt"]
-  rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
-  cmd.extend(["-out", rsa_key])
-  p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
-  p1.communicate()
-  assert p1.returncode == 0, "openssl pkcs8 failed"
+  # A/B updater expects a signing key in RSA format. Gets the key ready for
+  # later use in step 3, unless a payload_signer has been specified.
+  if OPTIONS.payload_signer is None:
+    cmd = ["openssl", "pkcs8",
+           "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
+           "-inform", "DER", "-nocrypt"]
+    rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
+    cmd.extend(["-out", rsa_key])
+    p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
+    p1.communicate()
+    assert p1.returncode == 0, "openssl pkcs8 failed"
 
-  # Stage the output zip package for signing.
+  # Stage the output zip package for package signing.
   temp_zip_file = tempfile.NamedTemporaryFile()
   output_zip = zipfile.ZipFile(temp_zip_file, "w",
                                compression=zipfile.ZIP_DEFLATED)
@@ -1229,21 +1238,29 @@
   signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
                                                  suffix=".bin")
   # 3a. Sign the payload hash.
-  cmd = ["openssl", "pkeyutl", "-sign",
-         "-inkey", rsa_key,
-         "-pkeyopt", "digest:sha256",
-         "-in", payload_sig_file,
-         "-out", signed_payload_sig_file]
+  if OPTIONS.payload_signer is not None:
+    cmd = [OPTIONS.payload_signer]
+    cmd.extend(OPTIONS.payload_signer_args)
+  else:
+    cmd = ["openssl", "pkeyutl", "-sign",
+           "-inkey", rsa_key,
+           "-pkeyopt", "digest:sha256"]
+  cmd.extend(["-in", payload_sig_file,
+              "-out", signed_payload_sig_file])
   p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
   p1.communicate()
   assert p1.returncode == 0, "openssl sign payload failed"
 
   # 3b. Sign the metadata hash.
-  cmd = ["openssl", "pkeyutl", "-sign",
-         "-inkey", rsa_key,
-         "-pkeyopt", "digest:sha256",
-         "-in", metadata_sig_file,
-         "-out", signed_metadata_sig_file]
+  if OPTIONS.payload_signer is not None:
+    cmd = [OPTIONS.payload_signer]
+    cmd.extend(OPTIONS.payload_signer_args)
+  else:
+    cmd = ["openssl", "pkeyutl", "-sign",
+           "-inkey", rsa_key,
+           "-pkeyopt", "digest:sha256"]
+  cmd.extend(["-in", metadata_sig_file,
+              "-out", signed_metadata_sig_file])
   p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
   p1.communicate()
   assert p1.returncode == 0, "openssl sign metadata failed"
@@ -1861,8 +1878,6 @@
       OPTIONS.full_bootloader = True
     elif o in ("-w", "--wipe_user_data"):
       OPTIONS.wipe_user_data = True
-    elif o in ("-n", "--no_prereq"):
-      OPTIONS.omit_prereq = True
     elif o == "--downgrade":
       OPTIONS.downgrade = True
       OPTIONS.wipe_user_data = True
@@ -1905,12 +1920,16 @@
       OPTIONS.gen_verify = True
     elif o == "--log_diff":
       OPTIONS.log_diff = a
+    elif o == "--payload_signer":
+      OPTIONS.payload_signer = a
+    elif o == "--payload_signer_args":
+      OPTIONS.payload_signer_args = shlex.split(a)
     else:
       return False
     return True
 
   args = common.ParseOptions(argv, __doc__,
-                             extra_opts="b:k:i:d:wne:t:a:2o:",
+                             extra_opts="b:k:i:d:we:t:a:2o:",
                              extra_long_opts=[
                                  "board_config=",
                                  "package_key=",
@@ -1918,7 +1937,6 @@
                                  "full_radio",
                                  "full_bootloader",
                                  "wipe_user_data",
-                                 "no_prereq",
                                  "downgrade",
                                  "extra_script=",
                                  "worker_threads=",
@@ -1934,6 +1952,8 @@
                                  "stash_threshold=",
                                  "gen_verify",
                                  "log_diff=",
+                                 "payload_signer=",
+                                 "payload_signer_args=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1949,8 +1969,7 @@
     # Otherwise the device may go back from arbitrary build with this full
     # OTA package.
     if OPTIONS.incremental_source is None:
-      raise ValueError("Cannot generate downgradable full OTAs - consider"
-                       "using --omit_prereq?")
+      raise ValueError("Cannot generate downgradable full OTAs")
 
   # Load the dict file from the zip directly to have a peek at the OTA type.
   # For packages using A/B update, unzipping is not needed.
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index f758ae0..3341f9f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -51,10 +51,12 @@
       in which they appear on the command line.
 
   -o  (--replace_ota_keys)
-      Replace the certificate (public key) used by OTA package
-      verification with the one specified in the input target_files
-      zip (in the META/otakeys.txt file).  Key remapping (-k and -d)
-      is performed on this key.
+      Replace the certificate (public key) used by OTA package verification
+      with the ones specified in the input target_files zip (in the
+      META/otakeys.txt file). Key remapping (-k and -d) is performed on the
+      keys. For A/B devices, the payload verification key will be replaced
+      as well. If there're multiple OTA keys, only the first one will be used
+      for payload verification.
 
   -t  (--tag_changes)  <+tag>,<-tag>,...
       Comma-separated list of changes to make to the set of tags (in
@@ -63,6 +65,19 @@
       removed.  Changes are processed in the order they appear.
       Default value is "-test-keys,-dev-keys,+release-keys".
 
+  --replace_verity_private_key <key>
+      Replace the private key used for verity signing. It expects a filename
+      WITHOUT the extension (e.g. verity_key).
+
+  --replace_verity_public_key <key>
+      Replace the certificate (public key) used for verity verification. The
+      key file replaces the one at BOOT/RAMDISK/verity_key (or ROOT/verity_key
+      for devices using system_root_image). It expects the key filename WITH
+      the extension (e.g. verity_key.pub).
+
+  --replace_verity_keyid <path_to_X509_PEM_cert_file>
+      Replace the veritykeyid in BOOT/cmdline of input_target_file_zip
+      with keyid of the cert pointed by <path_to_X509_PEM_cert_file>.
 """
 
 import sys
@@ -92,6 +107,7 @@
 OPTIONS.replace_ota_keys = False
 OPTIONS.replace_verity_public_key = False
 OPTIONS.replace_verity_private_key = False
+OPTIONS.replace_verity_keyid = False
 OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
 
 def GetApkCerts(tf_zip):
@@ -171,7 +187,9 @@
                  for i in input_tf_zip.infolist()
                  if i.filename.endswith('.apk')])
   rebuild_recovery = False
+  system_root_image = misc_info.get("system_root_image") == "true"
 
+  # tmpdir will only be used to regenerate the recovery-from-boot patch.
   tmpdir = tempfile.mkdtemp()
   def write_to_temp(fn, attr, data):
     fn = os.path.join(tmpdir, fn)
@@ -196,25 +214,6 @@
     data = input_tf_zip.read(info.filename)
     out_info = copy.copy(info)
 
-    # Replace keys if requested.
-    if (info.filename == "META/misc_info.txt" and
-        OPTIONS.replace_verity_private_key):
-      ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info,
-                              OPTIONS.replace_verity_private_key[1])
-    elif (info.filename in ("BOOT/RAMDISK/verity_key",
-                            "BOOT/verity_key") and
-          OPTIONS.replace_verity_public_key):
-      new_data = ReplaceVerityPublicKey(output_tf_zip, info.filename,
-                                        OPTIONS.replace_verity_public_key[1])
-      write_to_temp(info.filename, info.external_attr, new_data)
-    # Copy BOOT/, RECOVERY/, META/, ROOT/ to rebuild recovery patch.
-    elif (info.filename.startswith("BOOT/") or
-          info.filename.startswith("RECOVERY/") or
-          info.filename.startswith("META/") or
-          info.filename.startswith("ROOT/") or
-          info.filename == "SYSTEM/etc/recovery-resource.dat"):
-      write_to_temp(info.filename, info.external_attr, data)
-
     # Sign APKs.
     if info.filename.endswith(".apk"):
       name = os.path.basename(info.filename)
@@ -228,6 +227,8 @@
         # an APK we're not supposed to sign.
         print "NOT signing: %s" % (name,)
         common.ZipWriteStr(output_tf_zip, out_info, data)
+
+    # System properties.
     elif info.filename in ("SYSTEM/build.prop",
                            "VENDOR/build.prop",
                            "BOOT/RAMDISK/default.prop",
@@ -238,34 +239,94 @@
       if info.filename in ("BOOT/RAMDISK/default.prop",
                            "RECOVERY/RAMDISK/default.prop"):
         write_to_temp(info.filename, info.external_attr, new_data)
+
     elif info.filename.endswith("mac_permissions.xml"):
       print "rewriting %s with new keys." % (info.filename,)
       new_data = ReplaceCerts(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
+
+    # Trigger a rebuild of the recovery patch if needed.
     elif info.filename in ("SYSTEM/recovery-from-boot.p",
                            "SYSTEM/etc/recovery.img",
                            "SYSTEM/bin/install-recovery.sh"):
       rebuild_recovery = True
+
+    # Don't copy OTA keys if we're replacing them.
     elif (OPTIONS.replace_ota_keys and
-          info.filename in ("RECOVERY/RAMDISK/res/keys",
-                            "SYSTEM/etc/security/otacerts.zip")):
-      # don't copy these files if we're regenerating them below
+          info.filename in (
+              "BOOT/RAMDISK/res/keys",
+              "RECOVERY/RAMDISK/res/keys",
+              "SYSTEM/etc/security/otacerts.zip",
+              "SYSTEM/etc/update_engine/update-payload-key.pub.pem")):
       pass
+
+    # Skip META/misc_info.txt if we will replace the verity private key later.
     elif (OPTIONS.replace_verity_private_key and
           info.filename == "META/misc_info.txt"):
       pass
+
+    # Skip verity public key if we will replace it.
     elif (OPTIONS.replace_verity_public_key and
           info.filename in ("BOOT/RAMDISK/verity_key",
-                            "BOOT/verity_key")):
+                            "ROOT/verity_key")):
       pass
+
+    # Skip verity keyid (for system_root_image use) if we will replace it.
+    elif (OPTIONS.replace_verity_keyid and
+          info.filename == "BOOT/cmdline"):
+      pass
+
+    # Copy BOOT/, RECOVERY/, META/, ROOT/ to rebuild recovery patch. This case
+    # must come AFTER other matching rules.
+    elif (info.filename.startswith("BOOT/") or
+          info.filename.startswith("RECOVERY/") or
+          info.filename.startswith("META/") or
+          info.filename.startswith("ROOT/") or
+          info.filename == "SYSTEM/etc/recovery-resource.dat"):
+      write_to_temp(info.filename, info.external_attr, data)
+      common.ZipWriteStr(output_tf_zip, out_info, data)
+
+    # A non-APK file; copy it verbatim.
     else:
-      # a non-APK file; copy it verbatim
       common.ZipWriteStr(output_tf_zip, out_info, data)
 
   if OPTIONS.replace_ota_keys:
     new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
     if new_recovery_keys:
-      write_to_temp("RECOVERY/RAMDISK/res/keys", 0o755 << 16, new_recovery_keys)
+      if system_root_image:
+        recovery_keys_location = "BOOT/RAMDISK/res/keys"
+      else:
+        recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
+      # The "new_recovery_keys" has been already written into the output_tf_zip
+      # while calling ReplaceOtaKeys(). We're just putting the same copy to
+      # tmpdir in case we need to regenerate the recovery-from-boot patch.
+      write_to_temp(recovery_keys_location, 0o755 << 16, new_recovery_keys)
+
+  # Replace the keyid string in META/misc_info.txt.
+  if OPTIONS.replace_verity_private_key:
+    ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info,
+                            OPTIONS.replace_verity_private_key[1])
+
+  if OPTIONS.replace_verity_public_key:
+    if system_root_image:
+      dest = "ROOT/verity_key"
+    else:
+      dest = "BOOT/RAMDISK/verity_key"
+    # We are replacing the one in boot image only, since the one under
+    # recovery won't ever be needed.
+    new_data = ReplaceVerityPublicKey(
+        output_tf_zip, dest, OPTIONS.replace_verity_public_key[1])
+    write_to_temp(dest, 0o755 << 16, new_data)
+
+  # Replace the keyid string in BOOT/cmdline.
+  if OPTIONS.replace_verity_keyid:
+    new_cmdline = ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
+      OPTIONS.replace_verity_keyid[1])
+    # Writing the new cmdline to tmpdir is redundant as the bootimage
+    # gets build in the add_image_to_target_files and rebuild_recovery
+    # is not exercised while building the boot image for the A/B
+    # path
+    write_to_temp("BOOT/cmdline", 0o755 << 16, new_cmdline)
 
   if rebuild_recovery:
     recovery_img = common.GetBootableImage(
@@ -398,7 +459,8 @@
                            "build/target/product/security/testkey")
     mapped_keys.append(
         OPTIONS.key_map.get(devkey, devkey) + ".x509.pem")
-    print "META/otakeys.txt has no keys; using", mapped_keys[0]
+    print("META/otakeys.txt has no keys; using %s for OTA package"
+          " verification." % (mapped_keys[0],))
 
   # recovery uses a version of the key that has been slightly
   # predigested (by DumpPublicKey.java) and put in res/keys.
@@ -411,8 +473,13 @@
   new_recovery_keys, _ = p.communicate()
   if p.returncode != 0:
     raise common.ExternalError("failed to run dumpkeys")
-  common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys",
-                     new_recovery_keys)
+
+  # system_root_image puts the recovery keys at BOOT/RAMDISK.
+  if misc_info.get("system_root_image") == "true":
+    recovery_keys_location = "BOOT/RAMDISK/res/keys"
+  else:
+    recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
+  common.ZipWriteStr(output_tf_zip, recovery_keys_location, new_recovery_keys)
 
   # SystemUpdateActivity uses the x509.pem version of the keys, but
   # put into a zipfile system/etc/security/otacerts.zip.
@@ -426,8 +493,23 @@
   common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
                      temp_file.getvalue())
 
+  # For A/B devices, update the payload verification key.
+  if misc_info.get("ab_update") == "true":
+    # Unlike otacerts.zip that may contain multiple keys, we can only specify
+    # ONE payload verification key.
+    if len(mapped_keys) > 1:
+      print("\n  WARNING: Found more than one OTA keys; Using the first one"
+            " as payload verification key.\n\n")
+
+    print "Using %s for payload verification." % (mapped_keys[0],)
+    common.ZipWrite(
+        output_tf_zip,
+        mapped_keys[0],
+        arcname="SYSTEM/etc/update_engine/update-payload-key.pub.pem")
+
   return new_recovery_keys
 
+
 def ReplaceVerityPublicKey(targetfile_zip, filename, key_path):
   print "Replacing verity public key with %s" % key_path
   with open(key_path) as f:
@@ -435,6 +517,7 @@
   common.ZipWriteStr(targetfile_zip, filename, data)
   return data
 
+
 def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip,
                             misc_info, key_path):
   print "Replacing verity private key with %s" % key_path
@@ -444,6 +527,32 @@
   common.ZipWriteStr(targetfile_output_zip, "META/misc_info.txt", new_misc_info)
   misc_info["verity_key"] = key_path
 
+
+def ReplaceVerityKeyId(targetfile_input_zip, targetfile_output_zip, keypath):
+  in_cmdline = targetfile_input_zip.read("BOOT/cmdline")
+  # copy in_cmdline to output_zip if veritykeyid is not present in in_cmdline
+  if "veritykeyid" not in in_cmdline:
+    common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", in_cmdline)
+    return in_cmdline
+  out_cmdline = []
+  for param in in_cmdline.split():
+    if "veritykeyid" in param:
+      # extract keyid using openssl command
+      p = common.Run(["openssl", "x509", "-in", keypath, "-text"], stdout=subprocess.PIPE)
+      keyid, stderr = p.communicate()
+      keyid = re.search(r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
+      print "Replacing verity keyid with %s error=%s" % (keyid, stderr)
+      out_cmdline.append("veritykeyid=id:%s" % (keyid,))
+    else:
+      out_cmdline.append(param)
+
+  out_cmdline = ' '.join(out_cmdline)
+  out_cmdline = out_cmdline.strip()
+  print "out_cmdline %s" % (out_cmdline)
+  common.ZipWriteStr(targetfile_output_zip, "BOOT/cmdline", out_cmdline)
+  return out_cmdline
+
+
 def BuildKeyMap(misc_info, key_mapping_options):
   for s, d in key_mapping_options:
     if s is None:   # -d option
@@ -541,6 +650,8 @@
       OPTIONS.replace_verity_public_key = (True, a)
     elif o == "--replace_verity_private_key":
       OPTIONS.replace_verity_private_key = (True, a)
+    elif o == "--replace_verity_keyid":
+      OPTIONS.replace_verity_keyid = (True, a)
     else:
       return False
     return True
@@ -553,7 +664,8 @@
                                               "replace_ota_keys",
                                               "tag_changes=",
                                               "replace_verity_public_key=",
-                                              "replace_verity_private_key="],
+                                              "replace_verity_private_key=",
+                                              "replace_verity_keyid="],
                              extra_option_handler=option_handler)
 
   if len(args) != 2: