Merge changes Ica52e8f0,Id0ee07f9 into nyc-dev am: e39fa5ef8f am: feada74524
am: dbe7ec01c6

Change-Id: Ic825ad705088aa62974c4a654b149f06f4b1f848
diff --git a/core/Makefile b/core/Makefile
index 6a70b49..60f9837 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -799,6 +799,7 @@
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE),$(hide) echo "system_squashfs_block_size=$(BOARD_SYSTEMIMAGE_SQUASHFS_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashfs_disable_4k_align=$(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM),$(hide) echo "system_headroom=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
@@ -1092,7 +1093,8 @@
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
-	$(hide) build/tools/fileslist.py $(TARGET_OUT) > $@
+	$(hide) build/tools/fileslist.py $(TARGET_OUT) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
@@ -1495,7 +1497,8 @@
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
-	$(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $@
+	$(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 6722af4..6fc2935 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -210,7 +210,10 @@
   # Apk and its attachments reside in its own subdir.
   ifeq ($(LOCAL_MODULE_CLASS),APPS)
   # framework-res.apk doesn't like the additional layer.
-  ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+  ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+  # Neither do Runtime Resource Overlay apks, which contain just the overlaid resources.
+  else ifeq ($(LOCAL_IS_RUNTIME_RESOURCE_OVERLAY),true)
+  else
     my_module_path := $(my_module_path)/$(LOCAL_MODULE)
   endif
   endif
diff --git a/core/build_rro_package.mk b/core/build_rro_package.mk
new file mode 100644
index 0000000..9865b33
--- /dev/null
+++ b/core/build_rro_package.mk
@@ -0,0 +1,25 @@
+#############################################################################
+## Standard rules for installing runtime resouce overlay APKs.
+##
+## Set LOCAL_RRO_THEME to the theme name if the package should apply only to
+## a particular theme as set by ro.boot.vendor.overlay.theme system property.
+##
+## If LOCAL_RRO_THEME is not set, the package will apply always, independent
+## of themes.
+##
+#############################################################################
+
+LOCAL_IS_RUNTIME_RESOURCE_OVERLAY := true
+
+ifneq ($(LOCAL_SRC_FILES),)
+  $(error runtime resource overlay package should not contain sources)
+endif
+
+ifeq (S(LOCAL_RRO_THEME),)
+  LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay
+else
+  LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_THEME)
+endif
+
+include $(BUILD_SYSTEM)/package.mk
+
diff --git a/core/clang/HOST_x86_common.mk b/core/clang/HOST_x86_common.mk
index 9e71750..690c0f6 100644
--- a/core/clang/HOST_x86_common.mk
+++ b/core/clang/HOST_x86_common.mk
@@ -13,7 +13,8 @@
 ifeq ($(HOST_OS),linux)
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_ASFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin
 
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 5886610..f7567b4 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -363,6 +363,8 @@
 LOCAL_INIT_RC_32:=
 LOCAL_INIT_RC_64:=
 LOCAL_JAVA_LANGUAGE_VERSION:=
+LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
+LOCAL_RRO_THEME:=
 
 # Trim MAKEFILE_LIST so that $(call my-dir) doesn't need to
 # iterate over thousands of entries every time.
diff --git a/core/config.mk b/core/config.mk
index 5b9f1f8..2847d34 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -85,6 +85,7 @@
 BUILD_HOST_EXECUTABLE:= $(BUILD_SYSTEM)/host_executable.mk
 BUILD_PACKAGE:= $(BUILD_SYSTEM)/package.mk
 BUILD_PHONY_PACKAGE:= $(BUILD_SYSTEM)/phony_package.mk
+BUILD_RRO_PACKAGE:= $(BUILD_SYSTEM)/build_rro_package.mk
 BUILD_HOST_PREBUILT:= $(BUILD_SYSTEM)/host_prebuilt.mk
 BUILD_PREBUILT:= $(BUILD_SYSTEM)/prebuilt.mk
 BUILD_MULTI_PREBUILT:= $(BUILD_SYSTEM)/multi_prebuilt.mk
diff --git a/core/product.mk b/core/product.mk
index 332b015..c69e963 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -124,6 +124,7 @@
     PRODUCT_SYSTEM_BASE_FS_PATH \
     PRODUCT_VENDOR_BASE_FS_PATH \
     PRODUCT_SHIPPING_API_LEVEL \
+    PRODUCT_SYSTEM_HEADROOM \
 
 
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 6438d51..4117332 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -319,6 +319,9 @@
   PRODUCT_MANUFACTURER := unknown
 endif
 
+# Add reserved headroom to a system image.
+PRODUCT_SYSTEM_HEADROOM := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_HEADROOM))
+
 ifeq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_CHARACTERISTICS),)
   TARGET_AAPT_CHARACTERISTICS := default
 else
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index b08a28a..afa8389 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -61,6 +61,7 @@
     device/generic/goldfish/init.ranchu.rc:root/init.ranchu.rc \
     device/generic/goldfish/fstab.ranchu:root/fstab.ranchu \
     device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc \
+    device/generic/goldfish/input/goldfish_rotary.idc:system/usr/idc/goldfish_rotary.idc \
     frameworks/native/data/etc/android.hardware.usb.accessory.xml:system/etc/permissions/android.hardware.usb.accessory.xml
 
 PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
diff --git a/tools/fileslist.py b/tools/fileslist.py
index a11efaa..b9e7350 100755
--- a/tools/fileslist.py
+++ b/tools/fileslist.py
@@ -15,12 +15,24 @@
 # limitations under the License.
 #
 
-import operator, os, sys
+import json, hashlib, operator, os, sys
 
 def get_file_size(path):
   st = os.lstat(path)
   return st.st_size;
 
+def get_file_digest(path):
+  if os.path.isfile(path) == False:
+    return "----------------------------------------------------------------"
+  digest = hashlib.sha256()
+  with open(path, 'rb') as f:
+    while True:
+      buf = f.read(1024*1024)
+      if not buf:
+        break
+      digest.update(buf)
+  return digest.hexdigest();
+
 def main(argv):
   output = []
   roots = argv[1:]
@@ -30,16 +42,17 @@
       relative = dir[base:]
       for f in files:
         try:
-          row = (
-              get_file_size(os.path.sep.join((dir, f))),
-              os.path.sep.join((relative, f)),
-            )
+          path = os.path.sep.join((dir, f))
+          row = {
+              "Size": get_file_size(path),
+              "Name": os.path.sep.join((relative, f)),
+              "SHA256": get_file_digest(path),
+            }
           output.append(row)
         except os.error:
           pass
-  output.sort(key=operator.itemgetter(0), reverse=True)
-  for row in output:
-    print "%12d  %s" % row
+  output.sort(key=operator.itemgetter("Size", "Name"), reverse=True)
+  print json.dumps(output, indent=2, separators=(',',': '))
 
 if __name__ == '__main__':
   main(sys.argv)
diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py
new file mode 100755
index 0000000..ff40d51
--- /dev/null
+++ b/tools/fileslist_util.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import getopt, json, sys
+
+def PrintFileNames(path):
+  with open(path) as jf:
+    data = json.load(jf)
+  for line in data:
+    print(line["Name"])
+
+def PrintCanonicalList(path):
+  with open(path) as jf:
+    data = json.load(jf)
+  for line in data:
+    print "{0:12d}  {1}".format(line["Size"], line["Name"])
+
+def PrintUsage(name):
+  print("""
+Usage: %s -[nc] json_files_list
+ -n produces list of files only
+ -c produces classic installed-files.txt
+""" % (name))
+
+def main(argv):
+  try:
+    opts, args = getopt.getopt(argv[1:], "nc", "")
+  except getopt.GetoptError, err:
+    print(err)
+    PrintUsage(argv[0])
+    sys.exit(2)
+
+  if len(opts) == 0:
+    print("No conversion option specified")
+    PrintUsage(argv[0])
+    sys.exit(2)
+
+  if len(args) == 0:
+    print("No input file specified")
+    PrintUsage(argv[0])
+    sys.exit(2)
+
+  for o, a in opts:
+    if o == ("-n"):
+      PrintFileNames(args[0])
+      sys.exit()
+    elif o == ("-c"):
+      PrintCanonicalList(args[0])
+      sys.exit()
+    else:
+      assert False, "Unsupported option"
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 0bbd8f6..5a0a411 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -19,7 +19,31 @@
 not have an IMAGES/ top-level subdirectory), produce the images and
 add them to the zipfile.
 
-Usage:  add_img_to_target_files target_files
+Usage:  add_img_to_target_files [flag] target_files
+
+  -a  (--add_missing)
+      Build and add missing images to "IMAGES/". If this option is
+      not specified, this script will simply exit when "IMAGES/"
+      directory exists in the target file.
+
+  -r  (--rebuild_recovery)
+      Rebuild the recovery patch and write it to the system image. Only
+      meaningful when system image needs to be rebuilt.
+
+  --replace_verity_private_key
+      Replace the private key used for verity signing. (same as the option
+      in sign_target_files_apks)
+
+  --replace_verity_public_key
+       Replace the certificate (public key) used for verity verification. (same
+       as the option in sign_target_files_apks)
+
+  --is_signing
+      Skip building & adding the images for "userdata" and "cache" if we
+      are signing the target files.
+
+  --verity_signer_path
+      Specify the signer path to build verity metadata.
 """
 
 import sys
@@ -45,6 +69,7 @@
 OPTIONS.rebuild_recovery = False
 OPTIONS.replace_verity_public_key = False
 OPTIONS.replace_verity_private_key = False
+OPTIONS.is_signing = False
 OPTIONS.verity_signer_path = None
 
 def GetCareMap(which, imgname):
@@ -364,10 +389,11 @@
   if has_system_other:
     banner("system_other")
     AddSystemOther(output_zip)
-  banner("userdata")
-  AddUserdata(output_zip)
-  banner("cache")
-  AddCache(output_zip)
+  if not OPTIONS.is_signing:
+    banner("userdata")
+    AddUserdata(output_zip)
+    banner("cache")
+    AddCache(output_zip)
 
   # For devices using A/B update, copy over images from RADIO/ to IMAGES/ and
   # make sure we have all the needed images ready under IMAGES/.
@@ -414,6 +440,8 @@
       OPTIONS.replace_verity_private_key = (True, a)
     elif o == "--replace_verity_public_key":
       OPTIONS.replace_verity_public_key = (True, a)
+    elif o == "--is_signing":
+      OPTIONS.is_signing = True
     elif o == "--verity_signer_path":
       OPTIONS.verity_signer_path = a
     else:
@@ -425,6 +453,7 @@
       extra_long_opts=["add_missing", "rebuild_recovery",
                        "replace_verity_public_key=",
                        "replace_verity_private_key=",
+                       "is_signing",
                        "verity_signer_path="],
       extra_option_handler=option_handler)
 
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 24ecd15..fdf7271 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -109,7 +109,8 @@
   Args:
     partition_size: the size of the partition to be verified.
   Returns:
-    The size of the partition adjusted for verity metadata.
+    A tuple of the size of the partition adjusted for verity metadata, and
+    the size of verity metadata.
   """
   key = "%d %d" % (partition_size, fec_supported)
   if key in AdjustPartitionSizeForVerity.results:
@@ -121,27 +122,31 @@
 
   # verity tree and fec sizes depend on the partition size, which
   # means this estimate is always going to be unnecessarily small
-  lo = partition_size - GetVeritySize(hi, fec_supported)
+  verity_size = GetVeritySize(hi, fec_supported)
+  lo = partition_size - verity_size
   result = lo
 
   # do a binary search for the optimal size
   while lo < hi:
     i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
-    size = i + GetVeritySize(i, fec_supported)
-    if size <= partition_size:
+    v = GetVeritySize(i, fec_supported)
+    if i + v <= partition_size:
       if result < i:
         result = i
+        verity_size = v
       lo = i + BLOCK_SIZE
     else:
       hi = i
 
-  AdjustPartitionSizeForVerity.results[key] = result
-  return result
+  AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
+  return (result, verity_size)
 
 AdjustPartitionSizeForVerity.results = {}
 
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path):
-  cmd = "fec -e %s %s %s" % (sparse_image_path, verity_path, verity_fec_path)
+def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
+                   padding_size):
+  cmd = "fec -e -p %d %s %s %s" % (padding_size, sparse_image_path,
+                                   verity_path, verity_fec_path)
   print cmd
   status, output = commands.getstatusoutput(cmd)
   if status:
@@ -207,7 +212,7 @@
 
 def BuildVerifiedImage(data_image_path, verity_image_path,
                        verity_metadata_path, verity_fec_path,
-                       fec_supported):
+                       padding_size, fec_supported):
   if not Append(verity_image_path, verity_metadata_path,
                 "Could not append verity metadata!"):
     return False
@@ -215,7 +220,7 @@
   if fec_supported:
     # build FEC for the entire partition, including metadata
     if not BuildVerityFEC(data_image_path, verity_image_path,
-                          verity_fec_path):
+                          verity_fec_path, padding_size):
       return False
 
     if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"):
@@ -253,7 +258,7 @@
     True on success, False otherwise.
   """
   # get properties
-  image_size = prop_dict["partition_size"]
+  image_size = int(prop_dict["partition_size"])
   block_dev = prop_dict["verity_block_device"]
   signer_key = prop_dict["verity_key"] + ".pk8"
   if OPTIONS.verity_signer_path is not None:
@@ -284,10 +289,17 @@
     return False
 
   # build the full verified image
+  target_size = int(prop_dict["original_partition_size"])
+  verity_size = int(prop_dict["verity_size"])
+
+  padding_size = target_size - image_size - verity_size
+  assert padding_size >= 0
+
   if not BuildVerifiedImage(out_file,
                             verity_image_path,
                             verity_metadata_path,
                             verity_fec_path,
+                            padding_size,
                             fec_supported):
     shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
@@ -358,12 +370,13 @@
   # verified.
   if verity_supported and is_verity_partition:
     partition_size = int(prop_dict.get("partition_size"))
-    adjusted_size = AdjustPartitionSizeForVerity(partition_size,
-                                                 verity_fec_supported)
+    (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size,
+                                                                verity_fec_supported)
     if not adjusted_size:
       return False
     prop_dict["partition_size"] = str(adjusted_size)
     prop_dict["original_partition_size"] = str(partition_size)
+    prop_dict["verity_size"] = str(verity_size)
 
   if fs_type.startswith("ext"):
     build_command = ["mkuserimg.sh"]
@@ -436,11 +449,11 @@
     shutil.rmtree(staging_system, ignore_errors=True)
     shutil.copytree(origin_in, staging_system, symlinks=True)
 
-  reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
+  has_reserved_blocks = prop_dict.get("has_ext4_reserved_blocks") == "true"
   ext4fs_output = None
 
   try:
-    if reserved_blocks and fs_type.startswith("ext4"):
+    if fs_type.startswith("ext4"):
       (ext4fs_output, exit_code) = RunCommand(build_command)
     else:
       (_, exit_code) = RunCommand(build_command)
@@ -461,7 +474,9 @@
   # not writable even with root privilege. It only affects devices using
   # file-based OTA and a kernel version of 3.10 or greater (currently just
   # sprout).
-  if reserved_blocks and fs_type.startswith("ext4"):
+  # Separately, check if there's enough headroom space available. This is useful for
+  # devices with low disk space that have system image variation between builds.
+  if (has_reserved_blocks or "partition_headroom" in prop_dict) and fs_type.startswith("ext4"):
     assert ext4fs_output is not None
     ext4fs_stats = re.compile(
         r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
@@ -469,14 +484,21 @@
     m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
     used_blocks = int(m.groupdict().get('used_blocks'))
     total_blocks = int(m.groupdict().get('total_blocks'))
-    reserved_blocks = min(4096, int(total_blocks * 0.02))
-    adjusted_blocks = total_blocks - reserved_blocks
+    reserved_blocks = 0
+    headroom_blocks = 0
+    adjusted_blocks = total_blocks
+    if has_reserved_blocks:
+      reserved_blocks = min(4096, int(total_blocks * 0.02))
+      adjusted_blocks -= reserved_blocks
+    if "partition_headroom" in prop_dict:
+      headroom_blocks = int(prop_dict.get('partition_headroom')) / BLOCK_SIZE
+      adjusted_blocks -= headroom_blocks
     if used_blocks > adjusted_blocks:
       mount_point = prop_dict.get("mount_point")
       print("Error: Not enough room on %s (total: %d blocks, used: %d blocks, "
-            "reserved: %d blocks, available: %d blocks)" % (
+            "reserved: %d blocks, headroom: %d blocks, available: %d blocks)" % (
                 mount_point, total_blocks, used_blocks, reserved_blocks,
-                adjusted_blocks))
+                headroom_blocks, adjusted_blocks))
       return False
 
   if not fs_spans_partition:
@@ -544,9 +566,10 @@
   d["mount_point"] = mount_point
   if mount_point == "system":
     copy_prop("fs_type", "fs_type")
-    # Copy the generic sysetem fs type first, override with specific one if
+    # Copy the generic system fs type first, override with specific one if
     # available.
     copy_prop("system_fs_type", "fs_type")
+    copy_prop("system_headroom", "partition_headroom")
     copy_prop("system_size", "partition_size")
     copy_prop("system_journal_size", "journal_size")
     copy_prop("system_verity_block_device", "verity_block_device")
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 2ecc5cb..73131a6 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,26 +77,28 @@
     with temporary=True) to this one."""
     self.script.extend(other.script)
 
-  def AssertOemProperty(self, name, value):
-    """Assert that a property on the OEM paritition matches a value."""
+  def AssertOemProperty(self, name, values):
+    """Assert that a property on the OEM paritition matches allowed values."""
     if not name:
       raise ValueError("must specify an OEM property")
-    if not value:
+    if not values:
       raise ValueError("must specify the OEM value")
+    get_prop_command = None
     if common.OPTIONS.oem_no_mount:
-      cmd = ('getprop("{name}") == "{value}" || '
-             'abort("E{code}: This package expects the value \\"{value}\\" for '
-             '\\"{name}\\"; this has value \\"" + '
-             'getprop("{name}") + "\\".");').format(
-                 code=common.ErrorCode.OEM_PROP_MISMATCH,
-                 name=name, value=value)
+      get_prop_command = 'getprop("%s")' % name
     else:
-      cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || '
-             'abort("E{code}: This package expects the value \\"{value}\\" for '
-             '\\"{name}\\" on the OEM partition; this has value \\"" + '
-             'file_getprop("/oem/oem.prop", "{name}") + "\\".");').format(
-                 code=common.ErrorCode.OEM_PROP_MISMATCH,
-                 name=name, value=value)
+      get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
+
+    cmd = ''
+    for value in values:
+      cmd += '%s == "%s" || ' % (get_prop_command, value)
+    cmd += (
+        'abort("E{code}: This package expects the value \\"{values}\\" for '
+        '\\"{name}\\"; this has value \\"" + '
+        '{get_prop_command} + "\\".");').format(
+            code=common.ErrorCode.OEM_PROP_MISMATCH,
+            get_prop_command=get_prop_command, name=name,
+            values='\\" or \\"'.join(values))
     self.script.append(cmd)
 
   def AssertSomeFingerprint(self, *fp):
@@ -121,6 +123,17 @@
                common.ErrorCode.THUMBPRINT_MISMATCH, " or ".join(fp))
     self.script.append(cmd)
 
+  def AssertFingerprintOrThumbprint(self, fp, tp):
+    """Assert that the current recovery build fingerprint is fp, or thumbprint
+       is tp."""
+    cmd = ('getprop("ro.build.fingerprint") == "{fp}" ||\n'
+           '    getprop("ro.build.thumbprint") == "{tp}" ||\n'
+           '    abort("Package expects build fingerprint of {fp} or '
+           'thumbprint of {tp}; this device has a fingerprint of " '
+           '+ getprop("ro.build.fingerprint") + " and a thumbprint of " '
+           '+ getprop("ro.build.thumbprint") + ".");').format(fp=fp, tp=tp)
+    self.script.append(cmd)
+
   def AssertOlderBuild(self, timestamp, timestamp_text):
     """Assert that the build on the device is older (or the same as)
     the given timestamp."""
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 24b42ee..199e700 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -50,9 +50,11 @@
       Remount and verify the checksums of the files written to the
       system and vendor (if used) partitions.  Incremental builds only.
 
-  -o  (--oem_settings)  <file>
-      Use the file to specify the expected OEM-specific properties
-      on the OEM partition of the intended device.
+  -o  (--oem_settings)  <main_file[,additional_files...]>
+      Comma seperated list of files used to specify the expected OEM-specific
+      properties on the OEM partition of the intended device.
+      Multiple expected values can be used by providing multiple files.
+
 
   --oem_no_mount
       For devices with OEM-specific properties but without an OEM partition,
@@ -464,20 +466,38 @@
                   whole_file=True)
 
 
-def AppendAssertions(script, info_dict, oem_dict=None):
+def AppendAssertions(script, info_dict, oem_dicts=None):
   oem_props = info_dict.get("oem_fingerprint_properties")
-  if oem_props is None or len(oem_props) == 0:
+  if not oem_props:
     device = GetBuildProp("ro.product.device", info_dict)
     script.AssertDevice(device)
   else:
-    if oem_dict is None:
+    if not oem_dicts:
       raise common.ExternalError(
           "No OEM file provided to answer expected assertions")
     for prop in oem_props.split():
-      if oem_dict.get(prop) is None:
+      values = []
+      for oem_dict in oem_dicts:
+        if oem_dict.get(prop):
+          values.append(oem_dict[prop])
+      if not values:
         raise common.ExternalError(
             "The OEM file is missing the property %s" % prop)
-      script.AssertOemProperty(prop, oem_dict.get(prop))
+      script.AssertOemProperty(prop, values)
+
+
+def _LoadOemDicts(script, recovery_mount_options=None):
+  """Returns the list of loaded OEM properties dict."""
+  oem_dicts = None
+  if OPTIONS.oem_source is None:
+    raise common.ExternalError("OEM source required for this build")
+  if not OPTIONS.oem_no_mount:
+    script.Mount("/oem", recovery_mount_options)
+  oem_dicts = []
+  for oem_file in OPTIONS.oem_source:
+    oem_dicts.append(common.LoadDictionaryFromLines(
+        open(oem_file).readlines()))
+  return oem_dicts
 
 
 def _WriteRecoveryImageToBoot(script, output_zip):
@@ -590,19 +610,15 @@
 
   oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
-  oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    if not OPTIONS.oem_no_mount:
-      script.Mount("/oem", recovery_mount_options)
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
+  oem_dicts = None
+  if oem_props:
+    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
 
   metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dict,
+      "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
                                          OPTIONS.info_dict),
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+      "pre-device": GetOemProperty("ro.product.device", oem_props,
+                                   oem_dicts and oem_dicts[0],
                                    OPTIONS.info_dict),
       "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
   }
@@ -626,7 +642,7 @@
     ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
     script.AssertOlderBuild(ts, ts_text)
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dict)
+  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
   device_specific.FullOTA_Assertions()
 
   # Two-step package strategy (in chronological order, which is *not*
@@ -677,7 +693,7 @@
 
   # Dump fingerprints
   script.Print("Target: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.info_dict))
+      oem_props, oem_dicts and oem_dicts[0], OPTIONS.info_dict))
 
   device_specific.FullOTA_InstallBegin()
 
@@ -876,20 +892,17 @@
       source_version, OPTIONS.target_info_dict,
       fstab=OPTIONS.source_info_dict["fstab"])
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.source_info_dict.get(
       "recovery_mount_options")
-  oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    if not OPTIONS.oem_no_mount:
-      script.Mount("/oem", recovery_mount_options)
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
+  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
+  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
+  oem_dicts = None
+  if source_oem_props or target_oem_props:
+    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
+                                   oem_dicts and oem_dicts[0],
                                    OPTIONS.source_info_dict),
       "ota-type": "BLOCK",
   }
@@ -906,9 +919,9 @@
       metadata=metadata,
       info_dict=OPTIONS.source_info_dict)
 
-  source_fp = CalculateFingerprint(oem_props, oem_dict,
+  source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
                                    OPTIONS.source_info_dict)
-  target_fp = CalculateFingerprint(oem_props, oem_dict,
+  target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
                                    OPTIONS.target_info_dict)
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
@@ -973,7 +986,7 @@
   else:
     vendor_diff = None
 
-  AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+  AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -1024,32 +1037,39 @@
     script.Comment("Stage 1/3")
 
   # Dump fingerprints
-  script.Print("Source: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.source_info_dict))
-  script.Print("Target: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.target_info_dict))
+  script.Print("Source: %s" % (source_fp,))
+  script.Print("Target: %s" % (target_fp,))
 
   script.Print("Verifying current system...")
 
   device_specific.IncrementalOTA_VerifyBegin()
 
-  if oem_props is None:
-    # When blockimgdiff version is less than 3 (non-resumable block-based OTA),
-    # patching on a device that's already on the target build will damage the
-    # system. Because operations like move don't check the block state, they
-    # always apply the changes unconditionally.
-    if blockimgdiff_version <= 2:
+  # When blockimgdiff version is less than 3 (non-resumable block-based OTA),
+  # patching on a device that's already on the target build will damage the
+  # system. Because operations like move don't check the block state, they
+  # always apply the changes unconditionally.
+  if blockimgdiff_version <= 2:
+    if source_oem_props is None:
       script.AssertSomeFingerprint(source_fp)
     else:
-      script.AssertSomeFingerprint(source_fp, target_fp)
-  else:
-    if blockimgdiff_version <= 2:
       script.AssertSomeThumbprint(
           GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-    else:
+
+  else: # blockimgdiff_version > 2
+    if source_oem_props is None and target_oem_props is None:
+      script.AssertSomeFingerprint(source_fp, target_fp)
+    elif source_oem_props is not None and target_oem_props is not None:
       script.AssertSomeThumbprint(
           GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
           GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+    elif source_oem_props is None and target_oem_props is not None:
+      script.AssertFingerprintOrThumbprint(
+          source_fp,
+          GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
+    else:
+      script.AssertFingerprintOrThumbprint(
+          target_fp,
+          GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
 
   # Check the required cache size (i.e. stashed blocks).
   size = []
@@ -1176,18 +1196,16 @@
   oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.info_dict.get(
       "recovery_mount_options")
-  oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    script.Mount("/oem", recovery_mount_options)
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
+  oem_dicts = None
+  if oem_props:
+    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
 
-  target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.info_dict)
+  target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
+                                   OPTIONS.info_dict)
   metadata = {
       "post-build": target_fp,
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+      "pre-device": GetOemProperty("ro.product.device", oem_props,
+                                   oem_dicts and oem_dicts[0],
                                    OPTIONS.info_dict),
       "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
   }
@@ -1201,7 +1219,7 @@
       metadata=metadata,
       info_dict=OPTIONS.info_dict)
 
-  AppendAssertions(script, OPTIONS.info_dict, oem_dict)
+  AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
 
   script.Print("Verifying device images against %s..." % target_fp)
   script.AppendExtra("")
@@ -1273,26 +1291,25 @@
 
   # Metadata to comply with Android OTA package format.
   oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
-  oem_dict = None
+  oem_dicts = None
   if oem_props:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
+    oem_dicts = _LoadOemDicts(None)
 
   metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dict,
+      "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
                                          OPTIONS.info_dict),
       "post-build-incremental" : GetBuildProp("ro.build.version.incremental",
                                               OPTIONS.info_dict),
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+      "pre-device": GetOemProperty("ro.product.device", oem_props,
+                                   oem_dicts and oem_dicts[0],
                                    OPTIONS.info_dict),
       "ota-required-cache": "0",
       "ota-type": "AB",
   }
 
   if source_file is not None:
-    metadata["pre-build"] = CalculateFingerprint(oem_props, oem_dict,
+    metadata["pre-build"] = CalculateFingerprint(oem_props,
+                                                 oem_dicts and oem_dicts[0],
                                                  OPTIONS.source_info_dict)
     metadata["pre-build-incremental"] = GetBuildProp(
         "ro.build.version.incremental", OPTIONS.source_info_dict)
@@ -1562,20 +1579,17 @@
       source_version, OPTIONS.target_info_dict,
       fstab=OPTIONS.source_info_dict["fstab"])
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.source_info_dict.get(
       "recovery_mount_options")
-  oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    if not OPTIONS.oem_no_mount:
-      script.Mount("/oem", recovery_mount_options)
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
+  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
+  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
+  oem_dicts = None
+  if source_oem_props or target_oem_props:
+    oem_dicts = _LoadOemDicts(script, recovery_mount_options)
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
+                                   oem_dicts and oem_dicts[0],
                                    OPTIONS.source_info_dict),
       "ota-type": "FILE",
   }
@@ -1600,17 +1614,25 @@
   else:
     vendor_diff = None
 
-  target_fp = CalculateFingerprint(oem_props, oem_dict,
+  target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
                                    OPTIONS.target_info_dict)
-  source_fp = CalculateFingerprint(oem_props, oem_dict,
+  source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
                                    OPTIONS.source_info_dict)
 
-  if oem_props is None:
+  if source_oem_props is None and target_oem_props is None:
     script.AssertSomeFingerprint(source_fp, target_fp)
-  else:
+  elif source_oem_props is not None and target_oem_props is not None:
     script.AssertSomeThumbprint(
         GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
         GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_fp,
+        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_fp,
+        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
 
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
@@ -1640,7 +1662,7 @@
   #  0.1 for unpacking verbatim files, symlinking, and doing the
   #      device-specific commands.
 
-  AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+  AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -1980,7 +2002,7 @@
     elif o == "--override_timestamp":
       OPTIONS.timestamp = True
     elif o in ("-o", "--oem_settings"):
-      OPTIONS.oem_source = a
+      OPTIONS.oem_source = a.split(',')
     elif o == "--oem_no_mount":
       OPTIONS.oem_no_mount = True
     elif o in ("-e", "--extra_script"):
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 1677a44..52b526c 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -713,7 +713,9 @@
   common.ZipClose(input_zip)
   common.ZipClose(output_zip)
 
-  add_img_to_target_files.AddImagesToTargetFiles(args[1])
+  # Skip building userdata.img and cache.img when signing the target files.
+  new_args = ["--is_signing", args[1]]
+  add_img_to_target_files.main(new_args)
 
   print "done."