Merge "Add libc_malloc_debug to the always build list."
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 64d84e3..19cb651 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -371,6 +371,9 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+# $(PRODUCT_OUT)/recovery/root/sdcard goes from symlink to folder.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/sdcard)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/core/Makefile b/core/Makefile
index 18e01f9..f64dc28 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -517,6 +517,15 @@
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
+# BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+ $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE must be enabled for BOARD_USES_RECOVERY_AS_BOOT.)
+endif
+endif
+
+# We build recovery as boot image if BOARD_USES_RECOVERY_AS_BOOT is true.
+ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
$(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore)
else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)) # TARGET_BOOTIMAGE_USE_EXT2 != true
@@ -563,6 +572,7 @@
$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
endif # TARGET_BOOTIMAGE_USE_EXT2
+endif # BOARD_USES_RECOVERY_AS_BOOT
else # TARGET_NO_KERNEL
# HACK: The top-level targets depend on the bootimage. Not all targets
@@ -685,6 +695,19 @@
$(hide) zip -qjX $@ $<
$(remove-timestamps-from-package)
+# Carry the public key for update_engine if it's a non-Brillo target that
+# uses the AB updater. We use the same key as otacerts but in RSA public key
+# format.
+ifeq ($(AB_OTA_UPDATER),true)
+ifeq ($(BRILLO),)
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
+$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
+ $(hide) rm -f $@
+ $(hide) mkdir -p $(dir $@)
+ $(hide) openssl x509 -pubkey -noout -in $< > $@
+endif
+endif
+
.PHONY: otacerts
otacerts: $(TARGET_OUT_ETC)/security/otacerts.zip
@@ -781,6 +804,8 @@
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(FUTILITY)" >> $(1))
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
+$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
+ $(hide) echo "recovery_as_boot=true" >> $(1))
$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
$(hide) echo "system_root_image=true" >> $(1);\
echo "ramdisk_dir=$(TARGET_ROOT_OUT)" >> $(1))
@@ -790,7 +815,8 @@
# -----------------------------------------------------------------
# Recovery image
-ifdef INSTALLED_RECOVERYIMAGE_TARGET
+# Recovery image exists if we are building recovery, or building recovery as boot.
+ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
INTERNAL_RECOVERYIMAGE_FILES := $(filter $(TARGET_RECOVERY_OUT)/%, \
$(ALL_DEFAULT_INSTALLED_MODULES))
@@ -840,12 +866,22 @@
recovery_fstab := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery.fstab))
endif
-ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
+# Prior to A/B update, we used to have:
+# boot.img + recovery-from-boot.p + recovery-resource.dat = recovery.img.
+# recovery-resource.dat is needed only if we carry a patch of the boot and
+# recovery images and invoke install-recovery.sh on the first boot post an
+# OTA update.
+#
+# We no longer need that if one of the following conditions holds:
+# a) We carry a full copy of the recovery image
+# (BOARD_USES_FULL_RECOVERY_IMAGE = true);
+# b) We build a single image that contains boot and recovery both
+# (BOARD_USES_RECOVERY_AS_BOOT = true).
+
+ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT)))
# Named '.dat' so we don't attempt to use imgdiff for patching it.
RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat
else
-# We carry a full copy of the recovery image. recovery-resource.dat is no
-# longer needed.
RECOVERY_RESOURCE_ZIP :=
endif
@@ -897,9 +933,9 @@
define build-recoveryimage-target
@echo ----- Making recovery image ------
$(hide) mkdir -p $(TARGET_RECOVERY_OUT)
- $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/tmp
+ $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
@echo Copying baseline ramdisk...
- $(hide) rsync -a --exclude=etc $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac.
+ $(hide) rsync -a --exclude=etc --exclude=sdcard $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac.
@echo Modifying ramdisk contents...
$(hide) rm -f $(TARGET_RECOVERY_ROOT_OUT)/init*.rc
$(hide) cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/
@@ -929,10 +965,25 @@
$(BOOT_SIGNER) /recovery $(1) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1))
$(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \
$(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
- $(hide) $(call assert-max-image-size,$(1),$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
+ $(if $(filter true,BOARD_USES_RECOVERY_AS_BOOT), \
+ $(hide) $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE)), \
+ $(hide) $(call assert-max-image-size,$(1),$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)))
@echo ----- Made recovery image: $(1) --------
endef
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
+ $(INSTALLED_RAMDISK_TARGET) \
+ $(INTERNAL_RECOVERYIMAGE_FILES) \
+ $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
+ $(INSTALLED_2NDBOOTLOADER_TARGET) \
+ $(recovery_build_prop) $(recovery_resource_deps) \
+ $(recovery_fstab) \
+ $(RECOVERY_INSTALL_OTA_KEYS)
+ $(call pretty,"Target boot image from recovery: $@")
+ $(call build-recoveryimage-target, $@)
+endif
+
$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
$(INSTALLED_RAMDISK_TARGET) \
$(INSTALLED_BOOTIMAGE_TARGET) \
@@ -944,7 +995,7 @@
$(RECOVERY_INSTALL_OTA_KEYS)
$(call build-recoveryimage-target, $@)
-ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
+ifdef RECOVERY_RESOURCE_ZIP
$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME)
$(hide) mkdir -p $(dir $@)
$(hide) find $(TARGET_RECOVERY_ROOT_OUT)/res -type f | sort | zip -0qrjX $@ -@
@@ -1386,7 +1437,10 @@
$(HOST_OUT_EXECUTABLES)/append2simg \
$(HOST_OUT_EXECUTABLES)/img2simg \
$(HOST_OUT_EXECUTABLES)/boot_signer \
- $(HOST_OUT_EXECUTABLES)/fec
+ $(HOST_OUT_EXECUTABLES)/fec \
+ $(HOST_OUT_EXECUTABLES)/brillo_update_payload \
+ $(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
+ $(HOST_OUT_EXECUTABLES)/delta_generator
# Shared libraries.
OTATOOLS += \
@@ -1404,7 +1458,16 @@
$(HOST_LIBRARY_PATH)/libext2_profile-host$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libext2_quota-host$(HOST_SHLIB_SUFFIX) \
$(HOST_LIBRARY_PATH)/libext2_uuid-host$(HOST_SHLIB_SUFFIX) \
- $(HOST_LIBRARY_PATH)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX)
+ $(HOST_LIBRARY_PATH)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libbrillo$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libbrillo-stream$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libbrillo-http$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libchrome$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libcurl-host$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libevent-host$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libprotobuf-cpp-lite-rtti$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libssl-host$(HOST_SHLIB_SUFFIX) \
+ $(HOST_LIBRARY_PATH)/libz-host$(HOST_SHLIB_SUFFIX)
.PHONY: otatools
otatools: $(OTATOOLS)
@@ -1482,6 +1545,13 @@
$(BUILT_TARGET_FILES_PACKAGE): $(built_ota_tools)
endif
+# If we are using recovery as boot, output recovery files to BOOT/.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+else
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
+endif
+
# Depending on the various images guarantees that the underlying
# directories are up-to-date.
$(BUILT_TARGET_FILES_PACKAGE): \
@@ -1500,28 +1570,28 @@
@echo "Package target files: $@"
$(hide) rm -rf $@ $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)
-ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
@# Components of the recovery image
- $(hide) mkdir -p $(zip_root)/RECOVERY
+ $(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT)
$(hide) $(call package_files-copy-root, \
- $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/RECOVERY/RAMDISK)
+ $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
ifdef INSTALLED_KERNEL_TARGET
- $(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/RECOVERY/kernel
+ $(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
endif
ifdef INSTALLED_2NDBOOTLOADER_TARGET
$(hide) $(ACP) \
- $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/RECOVERY/second
+ $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
endif
ifdef BOARD_KERNEL_CMDLINE
- $(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/RECOVERY/cmdline
+ $(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
endif
ifdef BOARD_KERNEL_BASE
- $(hide) echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/RECOVERY/base
+ $(hide) echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/base
endif
ifdef BOARD_KERNEL_PAGESIZE
- $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/RECOVERY/pagesize
+ $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize
endif
-endif
+endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
@# Components of the boot image
$(hide) mkdir -p $(zip_root)/BOOT
ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
@@ -1532,6 +1602,8 @@
$(hide) $(call package_files-copy-root, \
$(TARGET_ROOT_OUT),$(zip_root)/BOOT/RAMDISK)
endif
+ @# If we are using recovery as boot, this is already done when processing recovery.
+ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
ifdef INSTALLED_KERNEL_TARGET
$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
endif
@@ -1548,6 +1620,7 @@
ifdef BOARD_KERNEL_PAGESIZE
$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/BOOT/pagesize
endif
+endif # BOARD_USES_RECOVERY_AS_BOOT
$(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\
mkdir -p $(zip_root)/RADIO; \
$(ACP) $(t) $(zip_root)/RADIO/$(notdir $(t));)
@@ -1586,6 +1659,9 @@
ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
$(hide) echo "boot_size=$(BOARD_BOOTIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
endif
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),)
+ $(hide) echo "recovery_as_boot=$(BOARD_USES_RECOVERY_AS_BOOT)" >> $(zip_root)/META/misc_info.txt
+endif
ifeq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
$(hide) echo "no_recovery=true" >> $(zip_root)/META/misc_info.txt
endif
@@ -1638,6 +1714,7 @@
done
@# Include the build type in META/misc_info.txt so the server can easily differentiate production builds.
$(hide) echo "build_type=$(TARGET_BUILD_VARIANT)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "ab_update=true" >> $(zip_root)/META/misc_info.txt
ifdef OSRELEASED_DIRECTORY
$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
@@ -1655,9 +1732,8 @@
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="VENDOR/" } /^VENDOR\// {print "vendor/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/vendor_filesystem_config.txt
ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="ROOT/" } /^ROOT\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/root_filesystem_config.txt
-else
- $(hide) zipinfo -1 $@ | awk 'BEGIN { FS="BOOT/RAMDISK/" } /^BOOT\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/boot_filesystem_config.txt
endif
+ $(hide) zipinfo -1 $@ | awk 'BEGIN { FS="BOOT/RAMDISK/" } /^BOOT\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/boot_filesystem_config.txt
ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="RECOVERY/RAMDISK/" } /^RECOVERY\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/recovery_filesystem_config.txt
endif
diff --git a/core/binary.mk b/core/binary.mk
index 4cb62b3..af8d747 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -449,7 +449,6 @@
ifeq ($(LOCAL_CPP_EXTENSION),)
LOCAL_CPP_EXTENSION := .cpp
endif
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CPP_EXTENSION := $(LOCAL_CPP_EXTENSION)
# Certain modules like libdl have to have symbols resolved at runtime and blow
# up if --no-undefined is passed to the linker.
@@ -734,24 +733,26 @@
aidl_gen_cpp :=
ifneq ($(aidl_src),)
+# Use the intermediates directory to avoid writing our own .cpp -> .o rules.
aidl_gen_cpp_root := $(intermediates)/aidl-generated/src
aidl_gen_include_root := $(intermediates)/aidl-generated/include
-aidl_gen_cpp := $(patsubst %.aidl,%$(LOCAL_CPP_EXTENSION),$(aidl_src))
-aidl_gen_cpp := $(addprefix $(aidl_gen_cpp_root)/,$(aidl_gen_cpp))
+# Multi-architecture builds have distinct intermediates directories.
+# Thus we'll actually generate source for each architecture.
+$(foreach s,$(aidl_src),\
+ $(eval $(call define-aidl-cpp-rule,$(s),$(aidl_gen_cpp_root),aidl_gen_cpp)))
+ifeq ($(BUILDING_WITH_NINJA),true)
+$(foreach cpp,$(aidl_gen_cpp), \
+ $(eval $(cpp) : .KATI_DEPFILE := $(addsuffix .aidl.P,$(basename $(cpp)))))
+else
+ -include $(addsuffix .aidl.P,$(basename $(aidl_gen_cpp)))
+endif
-# TODO(wiley): we could pass down a flag here to only generate the server or
-# client side of the binder interface.
+
$(aidl_gen_cpp) : PRIVATE_MODULE := $(LOCAL_MODULE)
$(aidl_gen_cpp) : PRIVATE_HEADER_OUTPUT_DIR := $(aidl_gen_include_root)
$(aidl_gen_cpp) : PRIVATE_AIDL_FLAGS := $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
-# Multi-architecture builds have distinct intermediates directories.
-# Define rules for both architectures.
-$(aidl_gen_cpp) : $(aidl_gen_cpp_root)/%$(LOCAL_CPP_EXTENSION) : $(LOCAL_PATH)/%.aidl $(AIDL_CPP)
- $(transform-aidl-to-cpp)
--include $(addsuffix .P,$(basename $(aidl_gen_cpp)))
-
# Add generated headers to include paths.
my_c_includes += $(aidl_gen_include_root)
my_export_c_include_dirs += $(aidl_gen_include_root)
@@ -761,79 +762,57 @@
endif # $(aidl_src) non-empty
###########################################################
-## YACC: Compile .y and .yy files to .cpp and the to .o.
+## YACC: Compile .y/.yy files to .c/.cpp and then to .o.
###########################################################
y_yacc_sources := $(filter %.y,$(my_src_files))
-y_yacc_cpps := $(addprefix \
- $(intermediates)/,$(y_yacc_sources:.y=$(LOCAL_CPP_EXTENSION)))
+y_yacc_cs := $(addprefix \
+ $(intermediates)/,$(y_yacc_sources:.y=.c))
+ifneq ($(y_yacc_cs),)
+$(y_yacc_cs): $(intermediates)/%.c: \
+ $(TOPDIR)$(LOCAL_PATH)/%.y \
+ $(my_additional_dependencies)
+ $(call transform-y-to-c-or-cpp)
+
+my_generated_sources += $(y_yacc_cs)
+endif
yy_yacc_sources := $(filter %.yy,$(my_src_files))
yy_yacc_cpps := $(addprefix \
$(intermediates)/,$(yy_yacc_sources:.yy=$(LOCAL_CPP_EXTENSION)))
-
-yacc_cpps := $(y_yacc_cpps) $(yy_yacc_cpps)
-yacc_headers := $(yacc_cpps:$(LOCAL_CPP_EXTENSION)=.h)
-yacc_objects := $(yacc_cpps:$(LOCAL_CPP_EXTENSION)=.o)
-
-ifneq ($(strip $(y_yacc_cpps)),)
-$(y_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
- $(TOPDIR)$(LOCAL_PATH)/%.y \
- $(lex_cpps) $(my_additional_dependencies)
- $(call transform-y-to-cpp,$(PRIVATE_CPP_EXTENSION))
-$(yacc_headers): $(intermediates)/%.h: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
-endif
-
-ifneq ($(strip $(yy_yacc_cpps)),)
+ifneq ($(yy_yacc_cpps),)
$(yy_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
$(TOPDIR)$(LOCAL_PATH)/%.yy \
- $(lex_cpps) $(my_additional_dependencies)
- $(call transform-y-to-cpp,$(PRIVATE_CPP_EXTENSION))
-$(yacc_headers): $(intermediates)/%.h: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
-endif
+ $(my_additional_dependencies)
+ $(call transform-y-to-c-or-cpp)
-ifneq ($(strip $(yacc_cpps)),)
-$(yacc_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(yacc_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(yacc_objects): $(intermediates)/%.o: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
- $(transform-$(PRIVATE_HOST)cpp-to-o)
+my_generated_sources += $(yy_yacc_cpps)
endif
###########################################################
-## LEX: Compile .l and .ll files to .cpp and then to .o.
+## LEX: Compile .l/.ll files to .c/.cpp and then to .o.
###########################################################
l_lex_sources := $(filter %.l,$(my_src_files))
-l_lex_cpps := $(addprefix \
- $(intermediates)/,$(l_lex_sources:.l=$(LOCAL_CPP_EXTENSION)))
+l_lex_cs := $(addprefix \
+ $(intermediates)/,$(l_lex_sources:.l=.c))
+ifneq ($(l_lex_cs),)
+$(l_lex_cs): $(intermediates)/%.c: \
+ $(TOPDIR)$(LOCAL_PATH)/%.l
+ $(transform-l-to-c-or-cpp)
+
+my_generated_sources += $(l_lex_cs)
+endif
ll_lex_sources := $(filter %.ll,$(my_src_files))
ll_lex_cpps := $(addprefix \
$(intermediates)/,$(ll_lex_sources:.ll=$(LOCAL_CPP_EXTENSION)))
-
-lex_cpps := $(l_lex_cpps) $(ll_lex_cpps)
-lex_objects := $(lex_cpps:$(LOCAL_CPP_EXTENSION)=.o)
-
-ifneq ($(strip $(l_lex_cpps)),)
-$(l_lex_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
- $(TOPDIR)$(LOCAL_PATH)/%.l
- $(transform-l-to-cpp)
-endif
-
-ifneq ($(strip $(ll_lex_cpps)),)
+ifneq ($(ll_lex_cpps),)
$(ll_lex_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
$(TOPDIR)$(LOCAL_PATH)/%.ll
- $(transform-l-to-cpp)
-endif
+ $(transform-l-to-c-or-cpp)
-ifneq ($(strip $(lex_cpps)),)
-$(lex_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(lex_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(lex_objects): $(intermediates)/%.o: \
- $(intermediates)/%$(LOCAL_CPP_EXTENSION) \
- $(my_additional_dependencies) \
- $(yacc_headers)
- $(transform-$(PRIVATE_HOST)cpp-to-o)
+my_generated_sources += $(ll_lex_cpps)
endif
###########################################################
@@ -928,6 +907,7 @@
endif
gen_asm_objects := $(gen_S_objects) $(gen_s_objects)
+$(gen_asm_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
###########################################################
## o: Include generated .o files in output.
@@ -1064,6 +1044,7 @@
endif
asm_objects := $(dotdot_objects_S) $(dotdot_objects_s) $(asm_objects_S) $(asm_objects_s)
+$(asm_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
# .asm for x86/x86_64 needs to be compiled with yasm.
@@ -1138,8 +1119,6 @@
$(gen_c_objects) \
$(objc_objects) \
$(objcpp_objects) \
- $(yacc_objects) \
- $(lex_objects) \
$(proto_generated_objects) \
$(addprefix $(TOPDIR)$(LOCAL_PATH)/,$(LOCAL_PREBUILT_OBJ_FILES))
diff --git a/core/build-system.html b/core/build-system.html
index 3f638c3..bddde6a 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -270,6 +270,7 @@
<li>Installs non-APK modules that have no tags specified.
<li>Installs APKs according to the product definition files; tags
are ignored for APK modules.
+ <li><code>ro.adb.secure=1</code>
<li><code>ro.secure=1</code>
<li><code>ro.debuggable=0</code>
<li><code>adb</code> is disabled by default.
diff --git a/core/ccache.mk b/core/ccache.mk
index c99d933..5c2ae23 100644
--- a/core/ccache.mk
+++ b/core/ccache.mk
@@ -14,7 +14,7 @@
# limitations under the License.
#
-ifneq ($(USE_CCACHE),)
+ifneq ($(filter-out false,$(USE_CCACHE)),)
# The default check uses size and modification time, causing false misses
# since the mtime depends when the repo was checked out
export CCACHE_COMPILERCHECK := content
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index 5ac5477..eeffa51 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -69,5 +69,4 @@
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-arm-android
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RPATH := /data/vendor/lib:/$(TARGET_COPY_OUT_VENDOR)/lib:/data/lib
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 532ca77..15b0172 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -67,5 +67,4 @@
# Address sanitizer clang config
ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-aarch64-android
-ADDRESS_SANITIZER_RPATH := /data/vendor/lib64:/$(TARGET_COPY_OUT_VENDOR)/lib64:/data/lib64
ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index f982439..9ca86a1 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -74,5 +74,4 @@
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-i686-android
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RPATH := /data/vendor/lib:/$(TARGET_COPY_OUT_VENDOR)/lib:/data/lib
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/arm.mk b/core/clang/arm.mk
index e66aa6c..4053bb2 100644
--- a/core/clang/arm.mk
+++ b/core/clang/arm.mk
@@ -29,8 +29,7 @@
-fno-partial-inlining \
-fno-strict-volatile-bitfields \
-fno-tree-copy-prop \
- -fno-tree-loop-optimize \
- -Wa,--noexecstack
+ -fno-tree-loop-optimize
define subst-clang-incompatible-arm-flags
$(subst -march=armv5te,-march=armv5t,\
diff --git a/core/clang/arm64.mk b/core/clang/arm64.mk
index ab395b3..cad7321 100644
--- a/core/clang/arm64.mk
+++ b/core/clang/arm64.mk
@@ -13,8 +13,7 @@
-frerun-cse-after-loop \
-frename-registers \
-fno-strict-volatile-bitfields \
- -fno-align-jumps \
- -Wa,--noexecstack
+ -fno-align-jumps
# We don't have any arm64 flags to substitute yet.
define subst-clang-incompatible-arm64-flags
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 756bf49..6df3e17 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -13,6 +13,16 @@
CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
+# RenderScript-specific tools
+# These are tied to the version of LLVM directly in external/, so they might
+# trail the host prebuilts being used for the rest of the build process.
+RS_LLVM_PREBUILTS_VERSION := 3.8
+RS_LLVM_PREBUILTS_BASE := prebuilts/clang/host
+RS_LLVM_PREBUILTS_PATH := $(RS_LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(RS_LLVM_PREBUILTS_VERSION)/bin
+RS_CLANG := $(RS_LLVM_PREBUILTS_PATH)/clang$(BUILD_EXECUTABLE_SUFFIX)
+RS_LLVM_AS := $(RS_LLVM_PREBUILTS_PATH)/llvm-as$(BUILD_EXECUTABLE_SUFFIX)
+RS_LLVM_LINK := $(RS_LLVM_PREBUILTS_PATH)/llvm-link$(BUILD_EXECUTABLE_SUFFIX)
+
# Clang flags for all host or target rules
CLANG_CONFIG_EXTRA_ASFLAGS :=
CLANG_CONFIG_EXTRA_CFLAGS :=
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 6cf344f..49e56c4 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -187,6 +187,7 @@
LOCAL_DBUS_PROXY_PREFIX:=
LOCAL_INIT_RC:=
LOCAL_MODULE_HOST_OS:=
+LOCAL_NOTICE_FILE:=
# arch specific variables
LOCAL_SRC_FILES_$(TARGET_ARCH):=
diff --git a/core/combo/HOST_CROSS_windows-x86.mk b/core/combo/HOST_CROSS_windows-x86.mk
index 1ac1059..b6cbebc 100644
--- a/core/combo/HOST_CROSS_windows-x86.mk
+++ b/core/combo/HOST_CROSS_windows-x86.mk
@@ -32,8 +32,8 @@
$(combo_var_prefix)GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
# Use C99-compliant printf functions (%zd).
$(combo_var_prefix)GLOBAL_CFLAGS += -D__USE_MINGW_ANSI_STDIO=1
-# Admit to using >= Win2K. Both are needed because of <_mingw.h>.
-$(combo_var_prefix)GLOBAL_CFLAGS += -D_WIN32_WINNT=0x0500 -DWINVER=0x0500
+# Admit to using >= Vista. Both are needed because of <_mingw.h>.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D_WIN32_WINNT=0x0600 -DWINVER=0x0600
# Get 64-bit off_t and related functions.
$(combo_var_prefix)GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index b91e072..510aae5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -101,7 +101,7 @@
-ffunction-sections \
-fdata-sections \
-funwind-tables \
- -fstack-protector \
+ -fstack-protector-strong \
-Wa,--noexecstack \
-Werror=format-security \
-D_FORTIFY_SOURCE=2 \
diff --git a/core/combo/TARGET_linux-arm64.mk b/core/combo/TARGET_linux-arm64.mk
index bc89bc9..6a1d861 100644
--- a/core/combo/TARGET_linux-arm64.mk
+++ b/core/combo/TARGET_linux-arm64.mk
@@ -75,7 +75,7 @@
-fno-strict-aliasing \
TARGET_GLOBAL_CFLAGS += \
- -fstack-protector \
+ -fstack-protector-strong \
-ffunction-sections \
-fdata-sections \
-funwind-tables \
diff --git a/core/config.mk b/core/config.mk
index 556d52b..34be76b 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -127,36 +127,11 @@
# Set common values
# ###############################################################
-# These can be changed to modify both host and device modules.
-COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith
-COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
-
-# Force gcc to always output color diagnostics. Ninja will strip the ANSI
-# color codes if it is not running in a terminal.
-COMMON_GLOBAL_CFLAGS += -fdiagnostics-color
-
-COMMON_GLOBAL_CPPFLAGS:= -Wsign-promo
-COMMON_RELEASE_CPPFLAGS:=
-
-GLOBAL_CFLAGS_NO_OVERRIDE := \
- -Werror=int-to-pointer-cast \
- -Werror=pointer-to-int-cast \
-
-GLOBAL_CLANG_CFLAGS_NO_OVERRIDE := \
- -Werror=address-of-temporary \
- -Werror=null-dereference \
- -Werror=return-type \
-
-GLOBAL_CPPFLAGS_NO_OVERRIDE :=
-
# Set the extensions used for various packages
COMMON_PACKAGE_SUFFIX := .zip
COMMON_JAVA_PACKAGE_SUFFIX := .jar
COMMON_ANDROID_PACKAGE_SUFFIX := .apk
-# list of flags to turn specific warnings in to errors
-TARGET_ERROR_FLAGS := -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point
-
ifdef TMPDIR
JAVA_TMPDIR_ARG := -Djava.io.tmpdir=$(TMPDIR)
else
@@ -186,35 +161,6 @@
# See envsetup.mk for a description of SCAN_EXCLUDE_DIRS
FIND_LEAVES_EXCLUDES := $(addprefix --prune=, $(OUT_DIR) $(SCAN_EXCLUDE_DIRS) .repo .git)
-# ---------------------------------------------------------------
-# We run gcc/clang with PWD=/proc/self/cwd to remove the $TOP
-# from the debug output. That way two builds in two different
-# directories will create the same output.
-# /proc doesn't exist on Darwin.
-ifeq ($(HOST_OS),linux)
-RELATIVE_PWD := PWD=/proc/self/cwd
-# Remove this useless prefix from the debug output.
-COMMON_GLOBAL_CFLAGS += -fdebug-prefix-map=/proc/self/cwd=
-else
-RELATIVE_PWD :=
-endif
-
-# ---------------------------------------------------------------
-# Allow the C/C++ macros __DATE__ and __TIME__ to be set to the
-# build date and time, so that a build may be repeated.
-# Write the date and time to a file so that the command line
-# doesn't change every time, which would cause ninja to rebuild
-# the files.
-$(shell mkdir -p $(OUT_DIR) && \
- $(DATE) "+%b %_d %Y" > $(OUT_DIR)/build_c_date.txt && \
- $(DATE) +%T > $(OUT_DIR)/build_c_time.txt)
-BUILD_DATETIME_C_DATE := $$(cat $(OUT_DIR)/build_c_date.txt)
-BUILD_DATETIME_C_TIME := $$(cat $(OUT_DIR)/build_c_time.txt)
-
-ifeq ($(OVERRIDE_C_DATE_TIME),true)
-COMMON_GLOBAL_CFLAGS += -Wno-builtin-macro-redefined -D__DATE__="\"$(BUILD_DATETIME_C_DATE)\"" -D__TIME__=\"$(BUILD_DATETIME_C_TIME)\"
-endif
-
# The build system exposes several variables for where to find the kernel
# headers:
# TARGET_DEVICE_KERNEL_HEADERS is automatically created for the current
@@ -284,7 +230,7 @@
# Commands to generate .toc file common to ELF .so files.
define _gen_toc_command_for_elf
$(hide) ($($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)READELF) -d $(1) | grep SONAME || echo "No SONAME for $1") > $(2)
-$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)NM) -gD -f p $(1) | cut -f1-2 -d" " >> $(2)
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)READELF) --dyn-syms $(1) | awk '{$$2=""; $$3=""; print}' >> $(2)
endef
# Commands to generate .toc file from Darwin dynamic library.
@@ -460,6 +406,8 @@
DX := $(HOST_OUT_EXECUTABLES)/dx
MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
+USE_PREBUILT_SDK_TOOLS_IN_PLACE := true
+
# Override the definitions above for unbundled and PDK builds
ifneq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
prebuilt_sdk_tools := prebuilts/sdk/tools
@@ -583,8 +531,6 @@
# Tool to merge AndroidManifest.xmls
ANDROID_MANIFEST_MERGER := java -classpath prebuilts/devtools/tools/lib/manifest-merger.jar com.android.manifmerger.Main merge
-YACC_HEADER_SUFFIX:= .hpp
-
COLUMN:= column
# We may not have the right JAVA_HOME/PATH set up yet when this is run from envsetup.sh.
@@ -627,6 +573,65 @@
# Set up final options.
# ###############################################################
+ifneq ($(COMMON_GLOBAL_CFLAGS)$(COMMON_GLOBAL_CPPFLAGS),)
+$(warning COMMON_GLOBAL_C(PP)FLAGS changed)
+$(info *** Device configurations are no longer allowed to change the global flags.)
+$(info *** COMMON_GLOBAL_CFLAGS: $(COMMON_GLOBAL_CFLAGS))
+$(info *** COMMON_GLOBAL_CPPFLAGS: $(COMMON_GLOBAL_CPPFLAGS))
+$(error bailing...)
+endif
+
+# These can be changed to modify both host and device modules.
+COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith
+COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
+
+# Force gcc to always output color diagnostics. Ninja will strip the ANSI
+# color codes if it is not running in a terminal.
+COMMON_GLOBAL_CFLAGS += -fdiagnostics-color
+
+COMMON_GLOBAL_CPPFLAGS:= -Wsign-promo
+COMMON_RELEASE_CPPFLAGS:=
+
+GLOBAL_CFLAGS_NO_OVERRIDE := \
+ -Werror=int-to-pointer-cast \
+ -Werror=pointer-to-int-cast \
+
+GLOBAL_CLANG_CFLAGS_NO_OVERRIDE := \
+ -Werror=address-of-temporary \
+ -Werror=null-dereference \
+ -Werror=return-type \
+
+GLOBAL_CPPFLAGS_NO_OVERRIDE :=
+
+# list of flags to turn specific warnings in to errors
+TARGET_ERROR_FLAGS := -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -Werror=date-time
+
+# We run gcc/clang with PWD=/proc/self/cwd to remove the $TOP
+# from the debug output. That way two builds in two different
+# directories will create the same output.
+# /proc doesn't exist on Darwin.
+ifeq ($(HOST_OS),linux)
+RELATIVE_PWD := PWD=/proc/self/cwd
+# Remove this useless prefix from the debug output.
+COMMON_GLOBAL_CFLAGS += -fdebug-prefix-map=/proc/self/cwd=
+else
+RELATIVE_PWD :=
+endif
+
+# Allow the C/C++ macros __DATE__ and __TIME__ to be set to the
+# build date and time, so that a build may be repeated.
+# Write the date and time to a file so that the command line
+# doesn't change every time, which would cause ninja to rebuild
+# the files.
+$(shell mkdir -p $(OUT_DIR) && \
+ $(DATE) "+%b %_d %Y" > $(OUT_DIR)/build_c_date.txt && \
+ $(DATE) +%T > $(OUT_DIR)/build_c_time.txt)
+BUILD_DATETIME_C_DATE := $$(cat $(OUT_DIR)/build_c_date.txt)
+BUILD_DATETIME_C_TIME := $$(cat $(OUT_DIR)/build_c_time.txt)
+ifeq ($(OVERRIDE_C_DATE_TIME),true)
+COMMON_GLOBAL_CFLAGS += -Wno-builtin-macro-redefined -D__DATE__="\"$(BUILD_DATETIME_C_DATE)\"" -D__TIME__=\"$(BUILD_DATETIME_C_TIME)\"
+endif
+
HOST_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
HOST_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
@@ -706,7 +711,7 @@
# allow overriding default Java libraries on a per-target basis
ifeq ($(TARGET_DEFAULT_JAVA_LIBRARIES),)
- TARGET_DEFAULT_JAVA_LIBRARIES := core-libart core-junit ext framework okhttp
+ TARGET_DEFAULT_JAVA_LIBRARIES := core-oj core-libart core-junit ext framework okhttp
endif
# Flags for DEX2OAT
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index df52e72..6e96880 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -117,7 +117,6 @@
$(my_shared_libraries) \
$(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
my_static_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)
- my_ldflags += -Wl,-rpath,$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RPATH)
my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
# Make sure linker_asan get installed.
diff --git a/core/definitions.mk b/core/definitions.mk
index 742a56c..916dfa2 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -142,11 +142,27 @@
endef
###########################################################
+## Remove any makefiles that are being handled by soong
+###########################################################
+ifeq ($(USE_SOONG),true)
+define filter-soong-makefiles
+$(foreach mk,$(1),\
+ $(if $(wildcard $(patsubst %/Android.mk,%/Android.bp,$(mk))),\
+ $(info skipping $(mk) ...),\
+ $(mk)))
+endef
+else
+define filter-soong-makefiles
+$(1)
+endef
+endif
+
+###########################################################
## Retrieve a list of all makefiles immediately below some directory
###########################################################
define all-makefiles-under
-$(sort $(wildcard $(1)/*/Android.mk))
+$(sort $(call filter-soong-makefiles,$(wildcard $(1)/*/Android.mk)))
endef
###########################################################
@@ -157,8 +173,9 @@
# $(1): directory to search under
# Ignores $(1)/Android.mk
define first-makefiles-under
-$(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
- --mindepth=2 $(1) Android.mk)
+$(call filter-soong-makefiles,\
+ $(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
+ --mindepth=2 $(1) Android.mk))
endef
###########################################################
@@ -178,7 +195,8 @@
# $(1): List of directories to look for under this directory
define all-named-subdir-makefiles
-$(sort $(wildcard $(addsuffix /Android.mk, $(addprefix $(call my-dir)/,$(1)))))
+$(sort $(call filter-soong-makefiles,\
+ $(wildcard $(addsuffix /Android.mk, $(addprefix $(call my-dir)/,$(1))))))
endef
###########################################################
@@ -875,7 +893,7 @@
## Commands for running lex
###########################################################
-define transform-l-to-cpp
+define transform-l-to-c-or-cpp
@echo "Lex: $(PRIVATE_MODULE) <= $<"
@mkdir -p $(dir $@)
$(hide) $(LEX) -o$@ $<
@@ -884,20 +902,14 @@
###########################################################
## Commands for running yacc
##
-## Because the extension of c++ files can change, the
-## extension must be specified in $1.
-## E.g, "$(call transform-y-to-cpp,.cpp)"
###########################################################
-define transform-y-to-cpp
+define transform-y-to-c-or-cpp
@echo "Yacc: $(PRIVATE_MODULE) <= $<"
@mkdir -p $(dir $@)
-$(YACC) $(PRIVATE_YACCFLAGS) -o $@ $<
-touch $(@:$1=$(YACC_HEADER_SUFFIX))
-echo '#ifndef '$(@F:$1=_h) > $(@:$1=.h)
-echo '#define '$(@F:$1=_h) >> $(@:$1=.h)
-cat $(@:$1=$(YACC_HEADER_SUFFIX)) >> $(@:$1=.h)
-echo '#endif' >> $(@:$1=.h)
+$(YACC) $(PRIVATE_YACCFLAGS) \
+ --defines=$(basename $@).h \
+ -o $@ $<
endef
###########################################################
@@ -977,10 +989,22 @@
@mkdir -p $(dir $@)
@mkdir -p $(PRIVATE_HEADER_OUTPUT_DIR)
@echo "Generating C++ from AIDL: $(PRIVATE_MODULE) <= $<"
-$(hide) $(AIDL_CPP) -d$(basename $@).P $(PRIVATE_AIDL_FLAGS) \
+$(hide) $(AIDL_CPP) -d$(basename $@).aidl.P $(PRIVATE_AIDL_FLAGS) \
$< $(PRIVATE_HEADER_OUTPUT_DIR) $@
endef
+## Given a .aidl file path generate the rule to compile it a .cpp file.
+# $(1): a .aidl source file
+# $(2): a directory to place the generated .cpp files in
+# $(3): name of a variable to add the path to the generated source file to
+#
+# You must call this with $(eval).
+define define-aidl-cpp-rule
+define-aidl-cpp-rule-src := $(patsubst %.aidl,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define-aidl-cpp-rule-src) : $(LOCAL_PATH)/$(1) $(AIDL_CPP)
+ $$(transform-aidl-to-cpp)
+$(3) += $$(define-aidl-cpp-rule-src)
+endef
###########################################################
## Commands for running java-event-log-tags.py
@@ -1756,7 +1780,8 @@
@$(call emit-line,$(wordlist 4401,4600,$(1)),$(2))
@$(call emit-line,$(wordlist 4601,4800,$(1)),$(2))
@$(call emit-line,$(wordlist 4801,5000,$(1)),$(2))
- @$(if $(wordlist 5001,5002,$(1)),$(error Too many words ($(words $(1)))))
+ @$(call emit-line,$(wordlist 5001,5200,$(1)),$(2))
+ @$(if $(wordlist 5201,5202,$(1)),$(error Too many words ($(words $(1)))))
endef
# For a list of jar files, unzip them to a specified directory,
@@ -1896,7 +1921,6 @@
-D jack.import.resource.policy=keep-first \
-D jack.import.type.policy=keep-first \
--output-jack $(PRIVATE_CLASSES_JACK) \
- -D jack.java.source.version=1.7 \
$(if $(PRIVATE_JACK_INCREMENTAL_DIR),--incremental-folder $(PRIVATE_JACK_INCREMENTAL_DIR)) \
--output-dex $(PRIVATE_JACK_INTERMEDIATES_DIR) \
$(addprefix --config-jarjar ,$(strip $(PRIVATE_JARJAR_RULES))) \
@@ -2037,7 +2061,6 @@
$(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \
-D jack.import.resource.policy=keep-first \
-D jack.import.type.policy=keep-first \
- -D jack.java.source.version=1.7 \
$(if $(PRIVATE_JACK_INCREMENTAL_DIR),--incremental-folder $(PRIVATE_JACK_INCREMENTAL_DIR)) \
--output-jack $@ \
$(addprefix --config-jarjar ,$(strip $(PRIVATE_JARJAR_RULES))) \
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 3f52876..97e77ba 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -97,5 +97,6 @@
--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
--include-patch-information --runtime-arg -Xnorelocate --no-generate-debug-info \
--abort-on-hard-verifier-error \
+ --no-inline-from=core-oj.jar \
$(PRIVATE_DEX_PREOPT_FLAGS)
endef
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index afd61eb..f8fc655 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -42,15 +42,18 @@
# The rule to install boot.art and boot.oat
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP)
- $(call copy-file-to-target)
- $(hide) $(ACP) -fp $(patsubst %.art,%.oat,$<) $(patsubst %.art,%.oat,$@)
+ $(hide) $(ACP) -fp $(dir $<)/*.art $(dir $@)
+ $(hide) $(ACP) -fp $(dir $<)/*.oat $(dir $@)
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
# Use dex2oat debug version for better error reporting
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(DEX2OAT_DEPENDENCY)
- @echo "target dex2oat: $@ ($?)"
+ @echo "target dex2oat: $@"
@mkdir -p $(dir $@)
@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
+ @rm -f $(dir $@)/*.art $(dir $@)/*.oat
+ @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
+ @rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
$(hide) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) --runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
--image-classes=$(PRELOADED_CLASSES) \
$(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
@@ -63,4 +66,5 @@
--instruction-set-variant=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT) \
--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
--android-root=$(PRODUCT_OUT)/system --include-patch-information --runtime-arg -Xnorelocate --no-generate-debug-info \
+ --multi-image --no-inline-from=core-oj.jar \
$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(COMPILED_CLASSES_FLAGS)
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index 1421761..f791884 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -73,8 +73,8 @@
$(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
endif
else
- LOCAL_JAVA_LIBRARIES := core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
- $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-libart)
+ LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+ $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
endif # LOCAL_SDK_VERSION
LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 050d445..46c1463 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -154,6 +154,10 @@
ifeq ($(TARGET_ARCH),)
$(error TARGET_ARCH not defined by board config: $(board_config_mk))
endif
+ifneq ($(MALLOC_IMPL),)
+ $(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
+ $(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
+endif
TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
board_config_mk :=
@@ -426,3 +430,7 @@
ifeq ($(PRINT_BUILD_CONFIG),)
PRINT_BUILD_CONFIG := true
endif
+
+ifeq ($(USE_CLANG_PLATFORM_BUILD),)
+USE_CLANG_PLATFORM_BUILD := true
+endif
diff --git a/core/goma.mk b/core/goma.mk
index 01a1d81..6535b3e 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -15,7 +15,7 @@
#
# Notice: this works only with Google's Goma build infrastructure.
-ifneq ($(USE_GOMA),)
+ifneq ($(filter-out false,$(USE_GOMA)),)
# Check if USE_NINJA is not false because GNU make won't work well
# with goma. Note this file is evaluated twice, once by GNU make and
# once by kati with USE_NINJA=false. We do this check in the former
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 59ecf4e..683b8b8 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -28,6 +28,7 @@
#######################################
ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ LOCAL_JAVA_LIBRARIES += core-oj-hostdex
LOCAL_JAVA_LIBRARIES += core-libart-hostdex
endif
@@ -126,7 +127,7 @@
$(built_dex): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
$(built_dex): $(java_sources) $(java_resource_sources) $(full_jack_lib_deps) \
$(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_MODULE_MAKEFILE_DEP) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK)
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK) | setup-jack-server
@echo Building with Jack: $@
$(jack-java-to-dex)
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
index 6248e1a..47ca002 100644
--- a/core/host_dalvik_static_java_library.mk
+++ b/core/host_dalvik_static_java_library.mk
@@ -24,6 +24,7 @@
LOCAL_UNINSTALLABLE_MODULE := true
LOCAL_IS_STATIC_JAVA_LIBRARY := true
USE_CORE_LIB_BOOTCLASSPATH := true
+LOCAL_JAVA_LIBRARIES += core-oj-hostdex
LOCAL_JAVA_LIBRARIES += core-libart-hostdex
intermediates.COMMON := $(call intermediates-dir-for,JAVA_LIBRARIES,$(LOCAL_MODULE),true,COMMON,)
@@ -49,7 +50,7 @@
$(full_classes_jack): $(java_sources) $(java_resource_sources) $(full_jack_lib_deps) \
$(jar_manifest_file) $(layers_file) $(LOCAL_MODULE_MAKEFILE_DEP) \
$(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
- $(JACK)
+ $(JACK) | setup-jack-server
@echo Building with Jack: $@
$(java-to-jack)
diff --git a/core/host_native_test.mk b/core/host_native_test.mk
index b54bd3a..7cba1ae 100644
--- a/core/host_native_test.mk
+++ b/core/host_native_test.mk
@@ -5,4 +5,22 @@
include $(BUILD_SYSTEM)/host_test_internal.mk
+needs_symlink :=
+ifndef LOCAL_MULTILIB
+ ifndef LOCAL_32_BIT_ONLY
+ LOCAL_MULTILIB := both
+
+ ifeq (,$(LOCAL_MODULE_STEM_32)$(LOCAL_MODULE_STEM_64))
+ LOCAL_MODULE_STEM_32 := $(LOCAL_MODULE)32
+ LOCAL_MODULE_STEM_64 := $(LOCAL_MODULE)64
+ needs_symlink := true
+ endif
+ endif
+endif
+
include $(BUILD_HOST_EXECUTABLE)
+
+ifdef needs_symlink
+include $(BUILD_SYSTEM)/executable_prefer_symlink.mk
+needs_symlink :=
+endif
diff --git a/core/java.mk b/core/java.mk
index f6f52dc..a0ab4c5 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -654,7 +654,7 @@
$(LOCAL_MODULE_MAKEFILE_DEP) $(JACK)
ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-$(full_classes_jack): $(jack_all_deps)
+$(full_classes_jack): $(jack_all_deps) | setup-jack-server
@echo Building with Jack: $@
$(java-to-jack)
@@ -666,7 +666,7 @@
else #LOCAL_IS_STATIC_JAVA_LIBRARY
$(built_dex_intermediate): PRIVATE_CLASSES_JACK := $(full_classes_jack)
-$(built_dex_intermediate): $(jack_all_deps)
+$(built_dex_intermediate): $(jack_all_deps) | setup-jack-server
@echo Building with Jack: $@
$(jack-java-to-dex)
@@ -687,7 +687,7 @@
$(noshrob_classes_jack): PRIVATE_JACK_INCREMENTAL_DIR :=
endif
$(noshrob_classes_jack): PRIVATE_JACK_PROGUARD_FLAGS :=
-$(noshrob_classes_jack): $(jack_all_deps)
+$(noshrob_classes_jack): $(jack_all_deps) | setup-jack-server
@echo Building with Jack: $@
$(java-to-jack)
endif # full_classes_jar is defined
diff --git a/core/java_common.mk b/core/java_common.mk
index f11e723..aee3193 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -137,7 +137,7 @@
# No bootclasspath. But we still need "" to prevent javac from using default host bootclasspath.
my_bootclasspath := ""
else # LOCAL_NO_STANDARD_LIBRARIES
-my_bootclasspath := $(call java-lib-files,core-libart)
+my_bootclasspath := $(call java-lib-files,core-oj):$(call java-lib-files,core-libart)
endif # LOCAL_NO_STANDARD_LIBRARIES
else
ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
@@ -161,7 +161,7 @@
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
my_bootclasspath := ""
else
-my_bootclasspath := $(call java-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
+my_bootclasspath := $(call java-lib-files,core-oj-hostdex,$(LOCAL_IS_HOST_MODULE)):$(call java-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
endif
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(my_bootclasspath)
@@ -277,7 +277,7 @@
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
my_bootclasspath :=
else
-my_bootclasspath := $(call jack-lib-files,core-libart)
+my_bootclasspath := $(call jack-lib-files,core-oj):$(call jack-lib-files,core-libart)
endif
else # LOCAL_SDK_VERSION
ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
@@ -304,7 +304,7 @@
ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
my_bootclasspath :=
else
-my_bootclasspath := $(call jack-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
+my_bootclasspath := $(call jack-lib-files,core-oj-hostdex,$(LOCAL_IS_HOST_MODULE)):$(call jack-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
endif
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath)
full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
diff --git a/core/main.mk b/core/main.mk
index c29731c..32b0b39 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -324,6 +324,11 @@
-include $(TOPDIR)prebuilts/sdk/tools/jack_versions.mk
-include $(TOPDIR)prebuilts/sdk/tools/jack_for_module.mk
+#
+# -----------------------------------------------------------------
+# Install and start Jack server
+-include $(TOPDIR)prebuilts/sdk/tools/jack_server_setup.mk
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
@@ -360,6 +365,10 @@
# Target is secure in user builds.
ADDITIONAL_DEFAULT_PROPERTIES += ro.secure=1
+ ifeq ($(user_variant),user)
+ ADDITIONAL_DEFAULT_PROPERTIES += ro.adb.secure=1
+ endif
+
ifeq ($(user_variant),userdebug)
# Pick up some extra useful tools
tags_to_install += debug
@@ -525,11 +534,12 @@
# --mindepth=2 makes the prunes not work.
subdir_makefiles := \
$(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) $(subdirs) Android.mk)
+
ifeq ($(USE_SOONG),true)
-subdir_makefiles := $(SOONG_ANDROID_MK) $(subdir_makefiles)
+subdir_makefiles := $(SOONG_ANDROID_MK) $(call filter-soong-makefiles,$(subdir_makefiles))
endif
-$(foreach mk, $(subdir_makefiles), $(info including $(mk) ...)$(eval include $(mk)))
+$(foreach mk, $(subdir_makefiles),$(info including $(mk) ...)$(eval include $(mk)))
endif # dont_bother
diff --git a/core/ninja.mk b/core/ninja.mk
index 750aae5..2878514 100644
--- a/core/ninja.mk
+++ b/core/ninja.mk
@@ -1,6 +1,15 @@
+NINJA ?= prebuilts/ninja/$(HOST_PREBUILT_TAG)/ninja
+
+ifeq ($(USE_SOONG),true)
+USE_SOONG_FOR_KATI := true
+endif
+
+ifeq ($(USE_SOONG_FOR_KATI),true)
+include $(BUILD_SYSTEM)/soong.mk
+else
KATI ?= $(HOST_OUT_EXECUTABLES)/ckati
MAKEPARALLEL ?= $(HOST_OUT_EXECUTABLES)/makeparallel
-NINJA ?= prebuilts/ninja/$(HOST_PREBUILT_TAG)/ninja
+endif
KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
@@ -119,8 +128,6 @@
endif
ifeq ($(USE_SOONG),true)
-include $(BUILD_SYSTEM)/soong.mk
-
COMBINED_BUILD_NINJA := $(OUT_DIR)/combined$(KATI_NINJA_SUFFIX).ninja
$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) $(SOONG_ANDROID_MK)
@@ -147,6 +154,7 @@
@echo Running kati to generate build$(KATI_NINJA_SUFFIX).ninja...
+$(hide) $(KATI_MAKEPARALLEL) $(KATI) --ninja --ninja_dir=$(OUT_DIR) --ninja_suffix=$(KATI_NINJA_SUFFIX) --regen --ignore_dirty=$(OUT_DIR)/% --no_ignore_dirty=$(SOONG_ANDROID_MK) --ignore_optional_include=$(OUT_DIR)/%.P --detect_android_echo $(KATI_FIND_EMULATOR) -f build/core/main.mk $(KATI_GOALS) --gen_all_targets BUILDING_WITH_NINJA=true SOONG_ANDROID_MK=$(SOONG_ANDROID_MK)
+ifneq ($(USE_SOONG_FOR_KATI),true)
KATI_CXX := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_CFLAGS) $(CLANG_HOST_GLOBAL_CPPFLAGS)
KATI_LD := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_LDFLAGS)
# Build static ckati. Unfortunately Mac OS X doesn't officially support static exectuables.
@@ -168,6 +176,7 @@
MAKEPARALLEL_INTERMEDIATES_PATH := $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/makeparallel_intermediates
MAKEPARALLEL_BIN_PATH := $(HOST_OUT_EXECUTABLES)
include build/tools/makeparallel/Makefile
+endif
.PHONY: FORCE
FORCE:
diff --git a/core/notice_files.mk b/core/notice_files.mk
index cf2dad6..e7f8974 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -2,7 +2,11 @@
## Track NOTICE files
###########################################################
+ifneq ($(LOCAL_NOTICE_FILE),)
+notice_file:=$(strip $(LOCAL_NOTICE_FILE))
+else
notice_file:=$(strip $(wildcard $(LOCAL_PATH)/NOTICE))
+endif
ifeq ($(LOCAL_MODULE_CLASS),GYP)
# We ignore NOTICE files for modules of type GYP.
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index 3397d9c..95eb0cc 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -58,6 +58,7 @@
target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
target/common/obj/JAVA_LIBRARIES/bouncycastle_intermediates \
target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \
+ target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
target/common/obj/JAVA_LIBRARIES/core-junit_intermediates \
target/common/obj/JAVA_LIBRARIES/ext_intermediates \
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index b0af228..601a79c 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -325,7 +325,7 @@
ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
$(intermediates.COMMON)/classes.jack : PRIVATE_JILL_FLAGS:=$(LOCAL_JILL_FLAGS)
$(intermediates.COMMON)/classes.jack : $(my_src_jar) $(LOCAL_MODULE_MAKEFILE_DEP) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JILL_JAR) $(JACK)
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JILL_JAR) $(JACK) | setup-jack-server
$(transform-jar-to-jack)
# Update timestamps of .toc files for prebuilts so dependents will be
diff --git a/core/soong.mk b/core/soong.mk
index c464ba8..646c68e 100644
--- a/core/soong.mk
+++ b/core/soong.mk
@@ -4,6 +4,9 @@
SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android.mk
SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
SOONG_IN_MAKE := $(SOONG_OUT_DIR)/.soong.in_make
+SOONG_HOST_EXECUTABLES := $(SOONG_OUT_DIR)/host/$(HOST_PREBUILT_TAG)/bin
+KATI := $(SOONG_HOST_EXECUTABLES)/ckati
+MAKEPARALLEL := $(SOONG_HOST_EXECUTABLES)/makeparallel
ifeq (,$(filter /%,$(SOONG_OUT_DIR)))
SOONG_TOP_RELPATH := $(shell python -c "import os; print os.path.relpath('$(TOP)', '$(SOONG_OUT_DIR)')")
@@ -23,9 +26,8 @@
$(hide) mkdir -p $(dir $@)
$(hide) (\
echo '{'; \
- echo ' "Device_uses_jemalloc": $(if $(filter dlmalloc,$(MALLOC_IMPL)),false,true),'; \
- echo ' "Device_uses_dlmalloc": $(if $(filter dlmalloc,$(MALLOC_IMPL)),true,false),'; \
- echo ' $(if $(BOARD_MALLOC_ALIGNMENT),"Dlmalloc_alignment": $(BOARD_MALLOC_ALIGNMENT)$(comma),)'; \
+ echo ' "Device_uses_jemalloc": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
+ echo ' "Device_uses_dlmalloc": $(if $(filter true,$(MALLOC_SVELTE)),true,false),'; \
echo ' "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
echo ' "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
echo ' "Brillo": $(if $(BRILLO),true,false),'; \
@@ -58,4 +60,7 @@
# Build an Android.mk listing all soong outputs as prebuilts
$(SOONG_ANDROID_MK): $(SOONG) $(SOONG_VARIABLES) $(SOONG_IN_MAKE) FORCE
- $(hide) $(SOONG) $(SOONG_BUILD_NINJA) $(NINJA_ARGS)
+ $(hide) $(SOONG) $(KATI) $(MAKEPARALLEL) $(NINJA_ARGS)
+
+$(KATI): $(SOONG_ANDROID_MK)
+$(MAKEPARALLEL): $(SOONG_ANDROID_MK)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 9b7b46a..d956124 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -23,6 +23,10 @@
LOCAL_IS_STATIC_JAVA_LIBRARY := true
LOCAL_MODULE_CLASS := JAVA_LIBRARIES
+#################################
+include $(BUILD_SYSTEM)/configure_local_jack.mk
+#################################
+
# Hack to build static Java library with Android resource
# See bug 5714516
all_resources :=
@@ -59,10 +63,6 @@
LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_PROGUARD_FLAGS)
-#################################
-include $(BUILD_SYSTEM)/configure_local_jack.mk
-#################################
-
ifdef LOCAL_JACK_ENABLED
ifndef LOCAL_JACK_PROGUARD_FLAGS
LOCAL_JACK_PROGUARD_FLAGS := $(LOCAL_PROGUARD_FLAGS)
diff --git a/core/tasks/check_boot_jars/package_whitelist.txt b/core/tasks/check_boot_jars/package_whitelist.txt
index 4d62615..48fa4a1 100644
--- a/core/tasks/check_boot_jars/package_whitelist.txt
+++ b/core/tasks/check_boot_jars/package_whitelist.txt
@@ -2,7 +2,7 @@
# Each line is interpreted as a regular expression.
###################################################
-# core-libart.jar
+# core-libart.jar & core-oj.jar
java\.awt\.font
java\.beans
java\.io
@@ -13,6 +13,9 @@
java\.math
java\.net
java\.nio
+java\.nio\.file
+java\.nio\.file\.spi
+java\.nio\.file\.attribute
java\.nio\.channels
java\.nio\.channels\.spi
java\.nio\.charset
@@ -24,6 +27,7 @@
java\.security\.spec
java\.sql
java\.text
+java\.text\.spi
java\.util
java\.util\.concurrent
java\.util\.concurrent\.atomic
@@ -32,6 +36,7 @@
java\.util\.logging
java\.util\.prefs
java\.util\.regex
+java\.util\.spi
java\.util\.zip
javax\.crypto
javax\.crypto\.interfaces
@@ -54,10 +59,18 @@
javax\.xml\.transform\.stream
javax\.xml\.validation
javax\.xml\.xpath
-sun\.misc
org\.w3c\.dom
org\.w3c\.dom\.ls
org\.w3c\.dom\.traversal
+# OpenJdk internal implementation.
+sun\.misc
+sun\.util.*
+sun\.text.*
+sun\.security.*
+sun\.reflect.*
+sun\.nio.*
+sun\.net.*
+com\.sun\..*
# TODO: Move these internal org.apache.harmony classes to libcore.*
org\.apache\.harmony\.crypto\.internal
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 6288ce8..d923c26 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -126,6 +126,7 @@
$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6) $(7)
endef
+OJ_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-oj,,COMMON)
CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
CONSCRYPT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt,,COMMON)
BOUNCYCASTLE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,bouncycastle,,COMMON)
@@ -142,7 +143,7 @@
TZDATAUPDATETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,tzdata_update-tests,,COMMON)
GEN_CLASSPATH := \
- $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar:$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar
+ $(OJ_INTERMEDIATES)/classes.jar:$(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar:$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar
CTS_CORE_XMLS := \
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml \
@@ -352,10 +353,11 @@
CORE_VM_TEST_TF_DESC := $(CTS_TESTCASES_OUT)/android.core.vm-tests-tf.xml
# core tests only needed to get hold of junit-framework-classes
+OJ_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-oj,,COMMON)
CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
-GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(TF_JAR)
+GEN_CLASSPATH := $(OJ_INTERMEDIATES)/classes.jar:$(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(TF_JAR)
$(CORE_VM_TEST_TF_DESC): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
# Please see big comment above on why this line depends on javalib.jar instead of classes.jar
diff --git a/libs/host/Android.mk b/libs/host/Android.mk
index 7eb3aa1..bc25e4b 100644
--- a/libs/host/Android.mk
+++ b/libs/host/Android.mk
@@ -4,11 +4,12 @@
LOCAL_SRC_FILES:= \
CopyFile.c
-LOCAL_CFLAGS_darwin += -DMACOSX_RSRC
+LOCAL_CFLAGS := -Werror -Wall
LOCAL_MODULE:= libhost
LOCAL_MODULE_HOST_OS := darwin linux windows
LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
LOCAL_CXX_STL := none
# acp uses libhost, so we can't use
diff --git a/libs/host/CopyFile.c b/libs/host/CopyFile.c
index b2a3e7f..5be012c 100644
--- a/libs/host/CopyFile.c
+++ b/libs/host/CopyFile.c
@@ -25,6 +25,7 @@
#include <assert.h>
#if defined(_WIN32)
+#include <direct.h> /* For _mkdir() */
# define mkdir(path,mode) _mkdir(path)
# define S_ISLNK(s) 0
# define lstat stat
@@ -66,9 +67,9 @@
*/
static bool isHiresMtime(const struct stat* pSrcStat)
{
-#if defined(__CYGWIN__) || defined(__MINGW32__)
- return 0;
-#elif defined(MACOSX_RSRC)
+#if defined(_WIN32)
+ return 0;
+#elif defined(__APPLE__)
return pSrcStat->st_mtimespec.tv_nsec > 0;
#else
return pSrcStat->st_mtim.tv_nsec > 0;
@@ -82,7 +83,9 @@
*/
static bool isSameFile(const struct stat* pSrcStat, const struct stat* pDstStat)
{
-#ifndef HAVE_VALID_STAT_ST_INO
+#if defined(_WIN32)
+ (void)pSrcStat;
+ (void)pDstStat;
/* with MSVCRT.DLL, stat always sets st_ino to 0, and there is no simple way to */
/* get the equivalent information with Win32 (Cygwin does some weird stuff in */
/* its winsup/cygwin/fhandler_disk_file.cc to emulate this, too complex for us) */
@@ -100,6 +103,7 @@
static void printNotNewerMsg(const char* src, const char* dst, unsigned int options)
{
+ (void)src;
if ((options & COPY_VERBOSE_MASK) > 1)
printf(" '%s' is up-to-date\n", dst);
}
@@ -291,7 +295,8 @@
if (copyResult != 0)
return -1;
-#ifdef MACOSX_RSRC
+#if defined(__APPLE__)
+ // Copy Mac OS X resource forks too.
{
char* srcRsrcName = NULL;
char* dstRsrcName = NULL;
@@ -533,6 +538,7 @@
struct stat srcStat;
int retVal = 0;
int statResult, statErrno;
+ (void)isCmdLine;
/*
* Stat the source file. If it doesn't exist, fail.
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index 152f0c8..142b062 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -1,5 +1,5 @@
qemu. u:object_r:qemu_prop:s0
-emu. u:object_r:qemu_prop:s0
-emulator. u:object_r:qemu_prop:s0
-radio.noril u:object_r:radio_noril_prop:s0
-opengles. u:object_r:opengles_prop:s0
+ro.emu. u:object_r:qemu_prop:s0
+ro.emulator. u:object_r:qemu_prop:s0
+ro.radio.noril u:object_r:radio_noril_prop:s0
+ro.opengles. u:object_r:opengles_prop:s0
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index a34e4f2..44772eb 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -18,9 +18,6 @@
# no hardware camera
USE_CAMERA_STUB := true
-# customize the malloced address to be 16-byte aligned
-BOARD_MALLOC_ALIGNMENT := 16
-
# Enable dex-preoptimization to speed up the first boot sequence
# of an SDK AVD. Note that this operation only works on Linux for now
ifeq ($(HOST_OS),linux)
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index fda9e45..cbfda6b 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -24,9 +24,6 @@
# no hardware camera
USE_CAMERA_STUB := true
-# customize the malloced address to be 16-byte aligned
-BOARD_MALLOC_ALIGNMENT := 16
-
# Enable dex-preoptimization to speed up the first boot sequence
# of an SDK AVD. Note that this operation only works on Linux for now
ifeq ($(HOST_OS),linux)
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index 27c10af..c39ab2c 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -63,7 +63,6 @@
libfilterfw \
libkeystore \
libgatekeeper \
- libsqlite_jni \
libwilhelm \
logd \
make_ext4fs \
@@ -83,6 +82,7 @@
# The order of PRODUCT_BOOT_JARS matters.
PRODUCT_BOOT_JARS := \
+ core-oj \
core-libart \
conscrypt \
okhttp \
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index a12d2d1..3e3220b 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -61,7 +61,6 @@
libfilterfw \
libgatekeeper \
libkeystore \
- libsqlite_jni \
libwilhelm \
libdrmframework_jni \
libdrmframework \
@@ -80,6 +79,7 @@
# The order matters
PRODUCT_BOOT_JARS := \
+ core-oj \
core-libart \
conscrypt \
okhttp \
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 28aa37b..71456dc 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -22,6 +22,7 @@
adbd \
atrace \
bootanimation \
+ bootstat \
debuggerd \
dumpstate \
dumpsys \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index c177981..94efe52 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -22,6 +22,7 @@
bouncycastle \
cacerts \
conscrypt \
+ core-oj \
core-junit \
core-libart \
dalvikvm \
@@ -39,6 +40,8 @@
libicui18n \
libicuuc \
libjavacore \
+ libopenjdk \
+ libopenjdkjvm \
libnativehelper \
libssl \
libz \
diff --git a/tools/acp/Android.mk b/tools/acp/Android.mk
index e31ad3a..eec9c9d 100644
--- a/tools/acp/Android.mk
+++ b/tools/acp/Android.mk
@@ -3,20 +3,13 @@
# Custom version of cp.
LOCAL_PATH:= $(call my-dir)
+
include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
LOCAL_SRC_FILES := \
acp.c
-ifeq ($(HOST_OS),darwin)
-LOCAL_CFLAGS += -DMACOSX_RSRC
-endif
-ifeq ($(HOST_OS),linux)
-endif
-
LOCAL_STATIC_LIBRARIES := libhost
-LOCAL_C_INCLUDES := build/libs/host/include
LOCAL_MODULE := acp
LOCAL_ACP_UNAVAILABLE := true
LOCAL_CXX_STL := none
diff --git a/tools/atree/Android.mk b/tools/atree/Android.mk
index d895810..f598db5 100644
--- a/tools/atree/Android.mk
+++ b/tools/atree/Android.mk
@@ -12,7 +12,6 @@
LOCAL_STATIC_LIBRARIES := \
libhost
-LOCAL_C_INCLUDES := build/libs/host/include
LOCAL_MODULE := atree
diff --git a/tools/droiddoc/templates-ds/class.cs b/tools/droiddoc/templates-ds/class.cs
index 23e57ab..d82f1c1 100644
--- a/tools/droiddoc/templates-ds/class.cs
+++ b/tools/droiddoc/templates-ds/class.cs
@@ -184,7 +184,6 @@
<tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
<td class="jd-typecol"><nobr>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs var:method.final ?>
<?cs var:method.static ?>
<?cs call:type_link(method.generic) ?>
@@ -535,7 +534,6 @@
<?cs var:method.static ?>
<?cs var:method.final ?>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs call:type_link(method.returnType) ?>
</span>
<span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-ndk/class.cs b/tools/droiddoc/templates-ndk/class.cs
index 7aa99f9..e033fa2 100644
--- a/tools/droiddoc/templates-ndk/class.cs
+++ b/tools/droiddoc/templates-ndk/class.cs
@@ -188,7 +188,6 @@
<tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
<td class="jd-typecol"><nobr>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs var:method.final ?>
<?cs var:method.static ?>
<?cs call:type_link(method.generic) ?>
@@ -558,7 +557,6 @@
<?cs var:method.static ?>
<?cs var:method.final ?>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs call:type_link(method.returnType) ?>
</span>
<span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sac/assets/js/docs.js b/tools/droiddoc/templates-sac/assets/js/docs.js
index 195c9e5..d0c12a8 100644
--- a/tools/droiddoc/templates-sac/assets/js/docs.js
+++ b/tools/droiddoc/templates-sac/assets/js/docs.js
@@ -2015,10 +2015,7 @@
});
function updateResultTitle(query) {
- $("#searchTitle").html("Results for <em>" + (query) + "</em>");
-// For some reason, the escapeHTML function wasn't working for me. TODO fix
-// this by copying in a comparable library function.
-// $("#searchTitle").html("Results for <em>" + escapeHTML(query) + "</em>");
+ $("#searchTitle").html("Results for <em>" + encodeURIComponent(query) + "</em>");
}
// forcefully regain key-up event control (previously jacked by search api)
@@ -2055,13 +2052,6 @@
return queryParts[1];
}
-/* returns the given string with all HTML brackets converted to entities
- TODO: move this to the site's JS library */
-function escapeHTML(string) {
- return string.replace(/</g,"<")
- .replace(/>/g,">");
-}
-
/* ######################################################## */
/* ################# JAVADOC REFERENCE ################### */
diff --git a/tools/droiddoc/templates-sac/class.cs b/tools/droiddoc/templates-sac/class.cs
index 4003ff5..440e705 100644
--- a/tools/droiddoc/templates-sac/class.cs
+++ b/tools/droiddoc/templates-sac/class.cs
@@ -185,7 +185,6 @@
<tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
<td class="jd-typecol"><nobr>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs var:method.final ?>
<?cs var:method.static ?>
<?cs call:type_link(method.generic) ?>
@@ -539,7 +538,6 @@
<?cs var:method.static ?>
<?cs var:method.final ?>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs call:type_link(method.returnType) ?>
</span>
<span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sac/footer.cs b/tools/droiddoc/templates-sac/footer.cs
index ea22605..e43adb0 100644
--- a/tools/droiddoc/templates-sac/footer.cs
+++ b/tools/droiddoc/templates-sac/footer.cs
@@ -2,7 +2,7 @@
<style>.feedback { float: right !Important }</style>
<div class="feedback">
<a href="#" class="button" onclick=" try {
- userfeedback.api.startFeedback({'productId':'715571','authuser':'1'});return false;}catch(e){}">Send Feedback</a>
+ userfeedback.api.startFeedback({'productId':'715571','authuser':'1'});return false;}catch(e){}">Site Feedback</a>
</div>
<div id="copyright">
<?cs call:custom_cc_copyright() ?>
diff --git a/tools/droiddoc/templates-sdk-dev/class.cs b/tools/droiddoc/templates-sdk-dev/class.cs
index 693eaed..649aaff 100644
--- a/tools/droiddoc/templates-sdk-dev/class.cs
+++ b/tools/droiddoc/templates-sdk-dev/class.cs
@@ -190,7 +190,6 @@
<tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
<td class="jd-typecol"><nobr>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs var:method.final ?>
<?cs var:method.static ?>
<?cs call:type_link(method.generic) ?>
@@ -560,7 +559,6 @@
<?cs var:method.static ?>
<?cs var:method.final ?>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs call:type_link(method.returnType) ?>
</span>
<span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sdk/class.cs b/tools/droiddoc/templates-sdk/class.cs
index 44eae97..93fcf88 100644
--- a/tools/droiddoc/templates-sdk/class.cs
+++ b/tools/droiddoc/templates-sdk/class.cs
@@ -188,7 +188,6 @@
<tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
<td class="jd-typecol"><nobr>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs var:method.final ?>
<?cs var:method.static ?>
<?cs call:type_link(method.generic) ?>
@@ -558,7 +557,6 @@
<?cs var:method.static ?>
<?cs var:method.final ?>
<?cs var:method.abstract ?>
- <?cs var:method.synchronized ?>
<?cs call:type_link(method.returnType) ?>
</span>
<span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/makeparallel/Android.bp b/tools/makeparallel/Android.bp
new file mode 100644
index 0000000..cb81817
--- /dev/null
+++ b/tools/makeparallel/Android.bp
@@ -0,0 +1,26 @@
+// Copyright 2016 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_binary_host {
+ name: "makeparallel",
+ srcs: [
+ "makeparallel.cpp",
+ ],
+ cflags: ["-Wall", "-Werror"],
+ target: {
+ linux: {
+ host_ldlibs: ["-lrt", "-lpthread"],
+ },
+ },
+}
diff --git a/tools/makeparallel/makeparallel.cpp b/tools/makeparallel/makeparallel.cpp
index 576fe8d..cf125fa 100644
--- a/tools/makeparallel/makeparallel.cpp
+++ b/tools/makeparallel/makeparallel.cpp
@@ -343,7 +343,7 @@
// child
int ret = execvp(path, args.data());
if (ret < 0) {
- error(errno, errno, "exec failed");
+ error(errno, errno, "exec %s failed", path);
}
abort();
}
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 1a5b93d..1d338ee 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -613,12 +613,15 @@
def_cmd = stashes[idx][1]
assert (idx, sr) in def_cmd.stash_before
def_cmd.stash_before.remove((idx, sr))
- new_blocks += sr.size()
+ # Add up blocks that violates space limit and print total number to
+ # screen later.
+ new_blocks += cmd.tgt_ranges.size()
cmd.ConvertToNew()
- print(" Total %d blocks are packed as new blocks due to insufficient "
- "cache size." % (new_blocks,))
+ num_of_bytes = new_blocks * self.tgt.blocksize
+ print(" Total %d blocks (%d bytes) are packed as new blocks due to "
+ "insufficient cache size." % (new_blocks, num_of_bytes))
def ComputePatches(self, prefix):
print("Reticulating splines...")
@@ -987,29 +990,36 @@
too many blocks (greater than MAX_BLOCKS_PER_DIFF_TRANSFER), we split it
into smaller pieces by getting multiple Transfer()s.
- The downside is that after splitting, we can no longer use imgdiff but
- only bsdiff."""
-
- MAX_BLOCKS_PER_DIFF_TRANSFER = 1024
+ The downside is that after splitting, we may increase the package size
+ since the split pieces don't align well. According to our experiments,
+ 1/8 of the cache size as the per-piece limit appears to be optimal.
+ Compared to the fixed 1024-block limit, it reduces the overall package
+ size by 30% volantis, and 20% for angler and bullhead."""
# We care about diff transfers only.
if style != "diff" or not split:
Transfer(tgt_name, src_name, tgt_ranges, src_ranges, style, by_id)
return
+ pieces = 0
+ cache_size = common.OPTIONS.cache_size
+ split_threshold = 0.125
+ max_blocks_per_transfer = int(cache_size * split_threshold /
+ self.tgt.blocksize)
+
# Change nothing for small files.
- if (tgt_ranges.size() <= MAX_BLOCKS_PER_DIFF_TRANSFER and
- src_ranges.size() <= MAX_BLOCKS_PER_DIFF_TRANSFER):
+ if (tgt_ranges.size() <= max_blocks_per_transfer and
+ src_ranges.size() <= max_blocks_per_transfer):
Transfer(tgt_name, src_name, tgt_ranges, src_ranges, style, by_id)
return
- pieces = 0
- while (tgt_ranges.size() > MAX_BLOCKS_PER_DIFF_TRANSFER and
- src_ranges.size() > MAX_BLOCKS_PER_DIFF_TRANSFER):
+ while (tgt_ranges.size() > max_blocks_per_transfer and
+ src_ranges.size() > max_blocks_per_transfer):
tgt_split_name = "%s-%d" % (tgt_name, pieces)
src_split_name = "%s-%d" % (src_name, pieces)
- tgt_first = tgt_ranges.first(MAX_BLOCKS_PER_DIFF_TRANSFER)
- src_first = src_ranges.first(MAX_BLOCKS_PER_DIFF_TRANSFER)
+ tgt_first = tgt_ranges.first(max_blocks_per_transfer)
+ src_first = src_ranges.first(max_blocks_per_transfer)
+
Transfer(tgt_split_name, src_split_name, tgt_first, src_first, style,
by_id)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 2b8d4ee..e6ad18b 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -129,8 +129,8 @@
AdjustPartitionSizeForVerity.results = {}
-def BuildVerityFEC(sparse_image_path, verity_fec_path, prop_dict):
- cmd = "fec -e %s %s" % (sparse_image_path, verity_fec_path)
+def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path):
+ cmd = "fec -e %s %s %s" % (sparse_image_path, verity_path, verity_fec_path)
print cmd
status, output = commands.getstatusoutput(cmd)
if status:
@@ -182,13 +182,33 @@
return False
return True
-def BuildVerifiedImage(data_image_path, verity_image_path,
- verity_metadata_path):
- if not Append2Simg(data_image_path, verity_image_path,
- "Could not append verity tree!"):
+def Append(target, file_to_append, error_message):
+ cmd = 'cat %s >> %s' % (file_to_append, target)
+ print cmd
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print "%s: %s" % (error_message, output)
return False
- if not Append2Simg(data_image_path, verity_metadata_path,
- "Could not append verity metadata!"):
+ return True
+
+def BuildVerifiedImage(data_image_path, verity_image_path,
+ verity_metadata_path, verity_fec_path,
+ fec_supported):
+ if not Append(verity_image_path, verity_metadata_path,
+ "Could not append verity metadata!"):
+ return False
+
+ if fec_supported:
+ # build FEC for the entire partition, including metadata
+ if not BuildVerityFEC(data_image_path, verity_image_path,
+ verity_fec_path):
+ return False
+
+ if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"):
+ return False
+
+ if not Append2Simg(data_image_path, verity_image_path,
+ "Could not append verity data!"):
return False
return True
@@ -252,20 +272,12 @@
# build the full verified image
if not BuildVerifiedImage(out_file,
verity_image_path,
- verity_metadata_path):
+ verity_metadata_path,
+ verity_fec_path,
+ fec_supported):
shutil.rmtree(tempdir_name, ignore_errors=True)
return False
- if fec_supported:
- # build FEC for the entire partition, including metadata
- if not BuildVerityFEC(out_file, verity_fec_path, prop_dict):
- shutil.rmtree(tempdir_name, ignore_errors=True)
- return False
-
- if not Append2Simg(out_file, verity_fec_path, "Could not append FEC!"):
- shutil.rmtree(tempdir_name, ignore_errors=True)
- return False
-
shutil.rmtree(tempdir_name, ignore_errors=True)
return True
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index ee0cdf9..95aeb62 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -494,8 +494,11 @@
info_dict = OPTIONS.info_dict
# With system_root_image == "true", we don't pack ramdisk into the boot image.
- has_ramdisk = (info_dict.get("system_root_image", None) != "true" or
- prebuilt_name != "boot.img")
+ # Unless "recovery_as_boot" is specified, in which case we carry the ramdisk
+ # for recovery.
+ has_ramdisk = (info_dict.get("system_root_image") != "true" or
+ prebuilt_name != "boot.img" or
+ info_dict.get("recovery_as_boot") == "true")
fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
@@ -1238,9 +1241,6 @@
self.partition = partition
self.check_first_block = check_first_block
- # Due to http://b/20939131, check_first_block is disabled temporarily.
- assert not self.check_first_block
-
if version is None:
version = 1
if OPTIONS.info_dict:
@@ -1304,14 +1304,8 @@
script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
'block_image_verify("%s", '
'package_extract_file("%s.transfer.list"), '
- '"%s.new.dat", "%s.patch.dat") || '
- '(block_image_recover("%s", "%s") && '
- 'block_image_verify("%s", '
- 'package_extract_file("%s.transfer.list"), '
- '"%s.new.dat", "%s.patch.dat"))) then') % (
+ '"%s.new.dat", "%s.patch.dat")) then') % (
self.device, ranges_str, self.src.TotalSha1(),
- self.device, partition, partition, partition,
- self.device, ranges_str,
self.device, partition, partition, partition))
elif self.version == 3:
script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
@@ -1326,22 +1320,36 @@
script.Print('Verified %s image...' % (partition,))
script.AppendExtra('else')
- # When generating incrementals for the system and vendor partitions,
- # explicitly check the first block (which contains the superblock) of
- # the partition to see if it's what we expect. If this check fails,
- # give an explicit log message about the partition having been
- # remounted R/W (the most likely explanation) and the need to flash to
- # get OTAs working again.
- if self.check_first_block:
- self._CheckFirstBlock(script)
+ if self.version >= 4:
+
+ # Bug: 21124327
+ # When generating incrementals for the system and vendor partitions in
+ # version 4 or newer, explicitly check the first block (which contains
+ # the superblock) of the partition to see if it's what we expect. If
+ # this check fails, give an explicit log message about the partition
+ # having been remounted R/W (the most likely explanation).
+ if self.check_first_block:
+ script.AppendExtra('check_first_block("%s");' % (self.device,))
+
+ # If version >= 4, try block recovery before abort update
+ script.AppendExtra((
+ 'ifelse (block_image_recover("{device}", "{ranges}") && '
+ 'block_image_verify("{device}", '
+ 'package_extract_file("{partition}.transfer.list"), '
+ '"{partition}.new.dat", "{partition}.patch.dat"), '
+ 'ui_print("{partition} recovered successfully."), '
+ 'abort("{partition} partition fails to recover"));\n'
+ 'endif;').format(device=self.device, ranges=ranges_str,
+ partition=partition))
# Abort the OTA update. Note that the incremental OTA cannot be applied
# even if it may match the checksum of the target partition.
# a) If version < 3, operations like move and erase will make changes
# unconditionally and damage the partition.
# b) If version >= 3, it won't even reach here.
- script.AppendExtra(('abort("%s partition has unexpected contents");\n'
- 'endif;') % (partition,))
+ else:
+ script.AppendExtra(('abort("%s partition has unexpected contents");\n'
+ 'endif;') % (partition,))
def _WritePostInstallVerifyScript(self, script):
partition = self.partition
@@ -1410,22 +1418,9 @@
return ctx.hexdigest()
- # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
- # remounting R/W. Will change the checking to a finer-grained way to
- # mask off those bits.
- def _CheckFirstBlock(self, script):
- r = rangelib.RangeSet((0, 1))
- srchash = self._HashBlocks(self.src, r)
-
- script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
- 'abort("%s has been remounted R/W; '
- 'reflash device to reenable OTA updates");')
- % (self.device, r.to_string_raw(), srchash,
- self.device))
DataImage = blockimgdiff.DataImage
-
# map recovery.fstab's fs_types to mount/format "partition types"
PARTITION_TYPES = {
"yaffs2": "MTD",
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 5259ede..a567760 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -109,6 +109,7 @@
import multiprocessing
import os
+import subprocess
import tempfile
import zipfile
@@ -811,7 +812,12 @@
int(i) for i in
OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+ # Check first block of system partition for remount R/W only if
+ # disk type is ext4
+ system_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+ check_first_block = system_partition.fs_type=="ext4"
system_diff = common.BlockDifference("system", system_tgt, system_src,
+ check_first_block,
version=blockimgdiff_version)
if HasVendorPartition(target_zip):
@@ -821,7 +827,13 @@
OPTIONS.source_info_dict)
vendor_tgt = GetImage("vendor", OPTIONS.target_tmp,
OPTIONS.target_info_dict)
+
+ # Check first block of vendor partition for remount R/W only if
+ # disk type is ext4
+ vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+ check_first_block = vendor_partition.fs_type=="ext4"
vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
+ check_first_block,
version=blockimgdiff_version)
else:
vendor_diff = None
@@ -1067,6 +1079,124 @@
WriteMetadata(metadata, output_zip)
+def WriteABOTAPackageWithBrilloScript(target_file, output_file,
+ source_file=None):
+ """Generate an Android OTA package that has A/B update payload."""
+
+ # Setup signing keys.
+ if OPTIONS.package_key is None:
+ OPTIONS.package_key = OPTIONS.info_dict.get(
+ "default_system_dev_certificate",
+ "build/target/product/security/testkey")
+
+ # A/B updater expects key in RSA format.
+ cmd = ["openssl", "pkcs8",
+ "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
+ "-inform", "DER", "-nocrypt"]
+ rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
+ cmd.extend(["-out", rsa_key])
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "openssl pkcs8 failed"
+
+ # Stage the output zip package for signing.
+ temp_zip_file = tempfile.NamedTemporaryFile()
+ output_zip = zipfile.ZipFile(temp_zip_file, "w",
+ compression=zipfile.ZIP_DEFLATED)
+
+ # Metadata to comply with Android OTA package format.
+ oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
+ oem_dict = None
+ if oem_props:
+ if OPTIONS.oem_source is None:
+ raise common.ExternalError("OEM source required for this build")
+ oem_dict = common.LoadDictionaryFromLines(
+ open(OPTIONS.oem_source).readlines())
+
+ metadata = {
+ "post-build": CalculateFingerprint(oem_props, oem_dict,
+ OPTIONS.info_dict),
+ "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ OPTIONS.info_dict),
+ "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
+ }
+
+ if source_file is not None:
+ metadata["pre-build"] = CalculateFingerprint(oem_props, oem_dict,
+ OPTIONS.source_info_dict)
+
+ # 1. Generate payload.
+ payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+ cmd = ["brillo_update_payload", "generate",
+ "--payload", payload_file,
+ "--target_image", target_file]
+ if source_file is not None:
+ cmd.extend(["--source_image", source_file])
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "brillo_update_payload generate failed"
+
+ # 2. Generate hashes of the payload and metadata files.
+ payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+ cmd = ["brillo_update_payload", "hash",
+ "--unsigned_payload", payload_file,
+ "--signature_size", "256",
+ "--metadata_hash_file", metadata_sig_file,
+ "--payload_hash_file", payload_sig_file]
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "brillo_update_payload hash failed"
+
+ # 3. Sign the hashes and insert them back into the payload file.
+ signed_payload_sig_file = common.MakeTempFile(prefix="signed-sig-",
+ suffix=".bin")
+ signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
+ suffix=".bin")
+ # 3a. Sign the payload hash.
+ cmd = ["openssl", "pkeyutl", "-sign",
+ "-inkey", rsa_key,
+ "-pkeyopt", "digest:sha256",
+ "-in", payload_sig_file,
+ "-out", signed_payload_sig_file]
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "openssl sign payload failed"
+
+ # 3b. Sign the metadata hash.
+ cmd = ["openssl", "pkeyutl", "-sign",
+ "-inkey", rsa_key,
+ "-pkeyopt", "digest:sha256",
+ "-in", metadata_sig_file,
+ "-out", signed_metadata_sig_file]
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "openssl sign metadata failed"
+
+ # 3c. Insert the signatures back into the payload file.
+ signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+ suffix=".bin")
+ cmd = ["brillo_update_payload", "sign",
+ "--unsigned_payload", payload_file,
+ "--payload", signed_payload_file,
+ "--signature_size", "256",
+ "--metadata_signature_file", signed_metadata_sig_file,
+ "--payload_signature_file", signed_payload_sig_file]
+ p1 = common.Run(cmd, stdout=subprocess.PIPE)
+ p1.wait()
+ assert p1.returncode == 0, "brillo_update_payload sign failed"
+
+ # Add the signed payload file into the zip.
+ common.ZipWrite(output_zip, signed_payload_file, arcname="payload.bin",
+ compress_type=zipfile.ZIP_STORED)
+ WriteMetadata(metadata, output_zip)
+
+ # Sign the whole package to comply with the Android OTA package format.
+ common.ZipClose(output_zip)
+ SignOutput(temp_zip_file.name, output_file)
+ temp_zip_file.close()
+
+
class FileDifference(object):
def __init__(self, partition, source_zip, target_zip, output_zip):
self.deferred_patch_list = None
@@ -1672,6 +1802,37 @@
common.Usage(__doc__)
sys.exit(1)
+ # Load the dict file from the zip directly to have a peek at the OTA type.
+ # For packages using A/B update, unzipping is not needed.
+ input_zip = zipfile.ZipFile(args[0], "r")
+ OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+ common.ZipClose(input_zip)
+
+ ab_update = OPTIONS.info_dict.get("ab_update") == "true"
+
+ if ab_update:
+ if OPTIONS.incremental_source is not None:
+ OPTIONS.target_info_dict = OPTIONS.info_dict
+ source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
+ OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+ common.ZipClose(source_zip)
+
+ if OPTIONS.verbose:
+ print "--- target info ---"
+ common.DumpInfoDict(OPTIONS.info_dict)
+
+ if OPTIONS.incremental_source is not None:
+ print "--- source info ---"
+ common.DumpInfoDict(OPTIONS.source_info_dict)
+
+ WriteABOTAPackageWithBrilloScript(
+ target_file=args[0],
+ output_file=args[1],
+ source_file=OPTIONS.incremental_source)
+
+ print "done."
+ return
+
if OPTIONS.extra_script is not None:
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
@@ -1725,6 +1886,7 @@
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
+ # Non A/B OTAs rely on /cache partition to store temporary files.
cache_size = OPTIONS.info_dict.get("cache_size", None)
if cache_size is None:
print "--- can't determine the cache partition size ---"
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
index da1501b..c2cf572 100644
--- a/tools/signapk/Android.mk
+++ b/tools/signapk/Android.mk
@@ -19,7 +19,7 @@
# ============================================================
include $(CLEAR_VARS)
LOCAL_MODULE := signapk
-LOCAL_SRC_FILES := SignApk.java
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
LOCAL_JAR_MANIFEST := SignApk.mf
LOCAL_STATIC_JAVA_LIBRARIES := bouncycastle-host bouncycastle-bcpkix-host conscrypt-host
LOCAL_REQUIRED_MODULES := libconscrypt_openjdk_jni
diff --git a/tools/signapk/src/com/android/signapk/ApkSignerV2.java b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
new file mode 100644
index 0000000..ee5e72e
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
@@ -0,0 +1,730 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+import java.nio.BufferUnderflowException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.security.DigestException;
+import java.security.InvalidAlgorithmParameterException;
+import java.security.InvalidKeyException;
+import java.security.KeyFactory;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.Signature;
+import java.security.SignatureException;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.security.spec.AlgorithmParameterSpec;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.MGF1ParameterSpec;
+import java.security.spec.PSSParameterSpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * APK Signature Scheme v2 signer.
+ *
+ * <p>APK Signature Scheme v2 is a whole-file signature scheme which aims to protect every single
+ * bit of the APK, as opposed to the JAR Signature Scheme which protects only the names and
+ * uncompressed contents of ZIP entries.
+ */
+public abstract class ApkSignerV2 {
+ /*
+ * The two main goals of APK Signature Scheme v2 are:
+ * 1. Detect any unauthorized modifications to the APK. This is achieved by making the signature
+ * cover every byte of the APK being signed.
+ * 2. Enable much faster signature and integrity verification. This is achieved by requiring
+ * only a minimal amount of APK parsing before the signature is verified, thus completely
+ * bypassing ZIP entry decompression and by making integrity verification parallelizable by
+ * employing a hash tree.
+ *
+ * The generated signature block is wrapped into an APK Signing Block and inserted into the
+ * original APK immediately before the start of ZIP Central Directory. This is to ensure that
+ * JAR and ZIP parsers continue to work on the signed APK. The APK Signing Block is designed for
+ * extensibility. For example, a future signature scheme could insert its signatures there as
+ * well. The contract of the APK Signing Block is that all contents outside of the block must be
+ * protected by signatures inside the block.
+ */
+
+ public static final int SIGNATURE_RSA_PSS_WITH_SHA256 = 0x0101;
+ public static final int SIGNATURE_RSA_PSS_WITH_SHA512 = 0x0102;
+ public static final int SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256 = 0x0103;
+ public static final int SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512 = 0x0104;
+ public static final int SIGNATURE_ECDSA_WITH_SHA256 = 0x0201;
+ public static final int SIGNATURE_ECDSA_WITH_SHA512 = 0x0202;
+ public static final int SIGNATURE_DSA_WITH_SHA256 = 0x0301;
+ public static final int SIGNATURE_DSA_WITH_SHA512 = 0x0302;
+
+ /**
+ * {@code .SF} file header section attribute indicating that the APK is signed not just with
+ * JAR signature scheme but also with APK Signature Scheme v2 or newer. This attribute
+ * facilitates v2 signature stripping detection.
+ *
+ * <p>The attribute contains a comma-separated set of signature scheme IDs.
+ */
+ public static final String SF_ATTRIBUTE_ANDROID_APK_SIGNED_NAME = "X-Android-APK-Signed";
+ // TODO: Adjust the value when signing scheme finalized.
+ public static final String SF_ATTRIBUTE_ANDROID_APK_SIGNED_VALUE = "1234567890";
+
+ private static final int CONTENT_DIGEST_CHUNKED_SHA256 = 0;
+ private static final int CONTENT_DIGEST_CHUNKED_SHA512 = 1;
+
+ private static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024;
+
+ private static final byte[] APK_SIGNING_BLOCK_MAGIC =
+ new byte[] {
+ 0x41, 0x50, 0x4b, 0x20, 0x53, 0x69, 0x67, 0x20,
+ 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32,
+ };
+ private static final int APK_SIGNATURE_SCHEME_V2_BLOCK_ID = 0x7109871a;
+
+ private ApkSignerV2() {}
+
+ /**
+ * Signer configuration.
+ */
+ public static final class SignerConfig {
+ /** Private key. */
+ public PrivateKey privateKey;
+
+ /**
+ * Certificates, with the first certificate containing the public key corresponding to
+ * {@link #privateKey}.
+ */
+ public List<X509Certificate> certificates;
+
+ /**
+ * List of signature algorithms with which to sign (see {@code SIGNATURE_...} constants).
+ */
+ public List<Integer> signatureAlgorithms;
+ }
+
+ /**
+ * Signs the provided APK using APK Signature Scheme v2 and returns the signed APK as a list of
+ * consecutive chunks.
+ *
+ * <p>NOTE: To enable APK signature verifier to detect v2 signature stripping, header sections
+ * of META-INF/*.SF files of APK being signed must contain the
+ * {@code X-Android-APK-Signed: true} attribute.
+ *
+ * @param inputApk contents of the APK to be signed. The APK starts at the current position
+ * of the buffer and ends at the limit of the buffer.
+ * @param signerConfigs signer configurations, one for each signer.
+ *
+ * @throws ApkParseException if the APK cannot be parsed.
+ * @throws InvalidKeyException if a signing key is not suitable for this signature scheme or
+ * cannot be used in general.
+ * @throws SignatureException if an error occurs when computing digests of generating
+ * signatures.
+ */
+ public static ByteBuffer[] sign(
+ ByteBuffer inputApk,
+ List<SignerConfig> signerConfigs)
+ throws ApkParseException, InvalidKeyException, SignatureException {
+ // Slice/create a view in the inputApk to make sure that:
+ // 1. inputApk is what's between position and limit of the original inputApk, and
+ // 2. changes to position, limit, and byte order are not reflected in the original.
+ ByteBuffer originalInputApk = inputApk;
+ inputApk = originalInputApk.slice();
+ inputApk.order(ByteOrder.LITTLE_ENDIAN);
+
+ // Locate ZIP End of Central Directory (EoCD), Central Directory, and check that Central
+ // Directory is immediately followed by the ZIP End of Central Directory.
+ int eocdOffset = ZipUtils.findZipEndOfCentralDirectoryRecord(inputApk);
+ if (eocdOffset == -1) {
+ throw new ApkParseException("Failed to locate ZIP End of Central Directory");
+ }
+ if (ZipUtils.isZip64EndOfCentralDirectoryLocatorPresent(inputApk, eocdOffset)) {
+ throw new ApkParseException("ZIP64 format not supported");
+ }
+ inputApk.position(eocdOffset);
+ long centralDirSizeLong = ZipUtils.getZipEocdCentralDirectorySizeBytes(inputApk);
+ if (centralDirSizeLong > Integer.MAX_VALUE) {
+ throw new ApkParseException(
+ "ZIP Central Directory size out of range: " + centralDirSizeLong);
+ }
+ int centralDirSize = (int) centralDirSizeLong;
+ long centralDirOffsetLong = ZipUtils.getZipEocdCentralDirectoryOffset(inputApk);
+ if (centralDirOffsetLong > Integer.MAX_VALUE) {
+ throw new ApkParseException(
+ "ZIP Central Directory offset in file out of range: " + centralDirOffsetLong);
+ }
+ int centralDirOffset = (int) centralDirOffsetLong;
+ int expectedEocdOffset = centralDirOffset + centralDirSize;
+ if (expectedEocdOffset < centralDirOffset) {
+ throw new ApkParseException(
+ "ZIP Central Directory extent too large. Offset: " + centralDirOffset
+ + ", size: " + centralDirSize);
+ }
+ if (eocdOffset != expectedEocdOffset) {
+ throw new ApkParseException(
+ "ZIP Central Directory not immeiately followed by ZIP End of"
+ + " Central Directory. CD end: " + expectedEocdOffset
+ + ", EoCD start: " + eocdOffset);
+ }
+
+ // Create ByteBuffers holding the contents of everything before ZIP Central Directory,
+ // ZIP Central Directory, and ZIP End of Central Directory.
+ inputApk.clear();
+ ByteBuffer beforeCentralDir = getByteBuffer(inputApk, centralDirOffset);
+ ByteBuffer centralDir = getByteBuffer(inputApk, eocdOffset - centralDirOffset);
+ // Create a copy of End of Central Directory because we'll need modify its contents later.
+ byte[] eocdBytes = new byte[inputApk.remaining()];
+ inputApk.get(eocdBytes);
+ ByteBuffer eocd = ByteBuffer.wrap(eocdBytes);
+ eocd.order(inputApk.order());
+
+ // Figure which which digests to use for APK contents.
+ Set<Integer> contentDigestAlgorithms = new HashSet<>();
+ for (SignerConfig signerConfig : signerConfigs) {
+ for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+ contentDigestAlgorithms.add(
+ getSignatureAlgorithmContentDigestAlgorithm(signatureAlgorithm));
+ }
+ }
+
+ // Compute digests of APK contents.
+ Map<Integer, byte[]> contentDigests; // digest algorithm ID -> digest
+ try {
+ contentDigests =
+ computeContentDigests(
+ contentDigestAlgorithms,
+ new ByteBuffer[] {beforeCentralDir, centralDir, eocd});
+ } catch (DigestException e) {
+ throw new SignatureException("Failed to compute digests of APK", e);
+ }
+
+ // Sign the digests and wrap the signatures and signer info into an APK Signing Block.
+ ByteBuffer apkSigningBlock =
+ ByteBuffer.wrap(generateApkSigningBlock(signerConfigs, contentDigests));
+
+ // Update Central Directory Offset in End of Central Directory Record. Central Directory
+ // follows the APK Signing Block and thus is shifted by the size of the APK Signing Block.
+ centralDirOffset += apkSigningBlock.remaining();
+ eocd.clear();
+ ZipUtils.setZipEocdCentralDirectoryOffset(eocd, centralDirOffset);
+
+ // Follow the Java NIO pattern for ByteBuffer whose contents have been consumed.
+ originalInputApk.position(originalInputApk.limit());
+
+ // Reset positions (to 0) and limits (to capacity) in the ByteBuffers below to follow the
+ // Java NIO pattern for ByteBuffers which are ready for their contents to be read by caller.
+ // Contrary to the name, this does not clear the contents of these ByteBuffer.
+ beforeCentralDir.clear();
+ centralDir.clear();
+ eocd.clear();
+
+ // Insert APK Signing Block immediately before the ZIP Central Directory.
+ return new ByteBuffer[] {
+ beforeCentralDir,
+ apkSigningBlock,
+ centralDir,
+ eocd,
+ };
+ }
+
+ private static Map<Integer, byte[]> computeContentDigests(
+ Set<Integer> digestAlgorithms,
+ ByteBuffer[] contents) throws DigestException {
+ // For each digest algorithm the result is computed as follows:
+ // 1. Each segment of contents is split into consecutive chunks of 1 MB in size.
+ // The final chunk will be shorter iff the length of segment is not a multiple of 1 MB.
+ // No chunks are produced for empty (zero length) segments.
+ // 2. The digest of each chunk is computed over the concatenation of byte 0xa5, the chunk's
+ // length in bytes (uint32 little-endian) and the chunk's contents.
+ // 3. The output digest is computed over the concatenation of the byte 0x5a, the number of
+ // chunks (uint32 little-endian) and the concatenation of digests of chunks of all
+ // segments in-order.
+
+ int chunkCount = 0;
+ for (ByteBuffer input : contents) {
+ chunkCount += getChunkCount(input.remaining(), CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+ }
+
+ final Map<Integer, byte[]> digestsOfChunks = new HashMap<>(digestAlgorithms.size());
+ for (int digestAlgorithm : digestAlgorithms) {
+ int digestOutputSizeBytes = getContentDigestAlgorithmOutputSizeBytes(digestAlgorithm);
+ byte[] concatenationOfChunkCountAndChunkDigests =
+ new byte[5 + chunkCount * digestOutputSizeBytes];
+ concatenationOfChunkCountAndChunkDigests[0] = 0x5a;
+ setUnsignedInt32LittleEngian(
+ chunkCount, concatenationOfChunkCountAndChunkDigests, 1);
+ digestsOfChunks.put(digestAlgorithm, concatenationOfChunkCountAndChunkDigests);
+ }
+
+ int chunkIndex = 0;
+ byte[] chunkContentPrefix = new byte[5];
+ chunkContentPrefix[0] = (byte) 0xa5;
+ // Optimization opportunity: digests of chunks can be computed in parallel.
+ for (ByteBuffer input : contents) {
+ while (input.hasRemaining()) {
+ int chunkSize =
+ Math.min(input.remaining(), CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+ final ByteBuffer chunk = getByteBuffer(input, chunkSize);
+ for (int digestAlgorithm : digestAlgorithms) {
+ String jcaAlgorithmName =
+ getContentDigestAlgorithmJcaDigestAlgorithm(digestAlgorithm);
+ MessageDigest md;
+ try {
+ md = MessageDigest.getInstance(jcaAlgorithmName);
+ } catch (NoSuchAlgorithmException e) {
+ throw new DigestException(
+ jcaAlgorithmName + " MessageDigest not supported", e);
+ }
+ // Reset position to 0 and limit to capacity. Position would've been modified
+ // by the preceding iteration of this loop. NOTE: Contrary to the method name,
+ // this does not modify the contents of the chunk.
+ chunk.clear();
+ setUnsignedInt32LittleEngian(chunk.remaining(), chunkContentPrefix, 1);
+ md.update(chunkContentPrefix);
+ md.update(chunk);
+ byte[] concatenationOfChunkCountAndChunkDigests =
+ digestsOfChunks.get(digestAlgorithm);
+ int expectedDigestSizeBytes =
+ getContentDigestAlgorithmOutputSizeBytes(digestAlgorithm);
+ int actualDigestSizeBytes =
+ md.digest(
+ concatenationOfChunkCountAndChunkDigests,
+ 5 + chunkIndex * expectedDigestSizeBytes,
+ expectedDigestSizeBytes);
+ if (actualDigestSizeBytes != expectedDigestSizeBytes) {
+ throw new DigestException(
+ "Unexpected output size of " + md.getAlgorithm()
+ + " digest: " + actualDigestSizeBytes);
+ }
+ }
+ chunkIndex++;
+ }
+ }
+
+ Map<Integer, byte[]> result = new HashMap<>(digestAlgorithms.size());
+ for (Map.Entry<Integer, byte[]> entry : digestsOfChunks.entrySet()) {
+ int digestAlgorithm = entry.getKey();
+ byte[] concatenationOfChunkCountAndChunkDigests = entry.getValue();
+ String jcaAlgorithmName = getContentDigestAlgorithmJcaDigestAlgorithm(digestAlgorithm);
+ MessageDigest md;
+ try {
+ md = MessageDigest.getInstance(jcaAlgorithmName);
+ } catch (NoSuchAlgorithmException e) {
+ throw new DigestException(jcaAlgorithmName + " MessageDigest not supported", e);
+ }
+ result.put(digestAlgorithm, md.digest(concatenationOfChunkCountAndChunkDigests));
+ }
+ return result;
+ }
+
+ private static final int getChunkCount(int inputSize, int chunkSize) {
+ return (inputSize + chunkSize - 1) / chunkSize;
+ }
+
+ private static void setUnsignedInt32LittleEngian(int value, byte[] result, int offset) {
+ result[offset] = (byte) (value & 0xff);
+ result[offset + 1] = (byte) ((value >> 8) & 0xff);
+ result[offset + 2] = (byte) ((value >> 16) & 0xff);
+ result[offset + 3] = (byte) ((value >> 24) & 0xff);
+ }
+
+ private static byte[] generateApkSigningBlock(
+ List<SignerConfig> signerConfigs,
+ Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+ byte[] apkSignatureSchemeV2Block =
+ generateApkSignatureSchemeV2Block(signerConfigs, contentDigests);
+ return generateApkSigningBlock(apkSignatureSchemeV2Block);
+ }
+
+ private static byte[] generateApkSigningBlock(byte[] apkSignatureSchemeV2Block) {
+ // FORMAT:
+ // uint64: size (excluding this field)
+ // repeated ID-value pairs:
+ // uint64: size (excluding this field)
+ // uint32: ID
+ // (size - 4) bytes: value
+ // uint64: size (same as the one above)
+ // uint128: magic
+
+ int resultSize =
+ 8 // size
+ + 8 + 4 + apkSignatureSchemeV2Block.length // v2Block as ID-value pair
+ + 8 // size
+ + 16 // magic
+ ;
+ ByteBuffer result = ByteBuffer.allocate(resultSize);
+ result.order(ByteOrder.LITTLE_ENDIAN);
+ long blockSizeFieldValue = resultSize - 8;
+ result.putLong(blockSizeFieldValue);
+
+ long pairSizeFieldValue = 4 + apkSignatureSchemeV2Block.length;
+ result.putLong(pairSizeFieldValue);
+ result.putInt(APK_SIGNATURE_SCHEME_V2_BLOCK_ID);
+ result.put(apkSignatureSchemeV2Block);
+
+ result.putLong(blockSizeFieldValue);
+ result.put(APK_SIGNING_BLOCK_MAGIC);
+
+ return result.array();
+ }
+
+ private static byte[] generateApkSignatureSchemeV2Block(
+ List<SignerConfig> signerConfigs,
+ Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+ // FORMAT:
+ // * length-prefixed sequence of length-prefixed signer blocks.
+
+ List<byte[]> signerBlocks = new ArrayList<>(signerConfigs.size());
+ int signerNumber = 0;
+ for (SignerConfig signerConfig : signerConfigs) {
+ signerNumber++;
+ byte[] signerBlock;
+ try {
+ signerBlock = generateSignerBlock(signerConfig, contentDigests);
+ } catch (InvalidKeyException e) {
+ throw new InvalidKeyException("Signer #" + signerNumber + " failed", e);
+ } catch (SignatureException e) {
+ throw new SignatureException("Signer #" + signerNumber + " failed", e);
+ }
+ signerBlocks.add(signerBlock);
+ }
+
+ return encodeAsSequenceOfLengthPrefixedElements(
+ new byte[][] {
+ encodeAsSequenceOfLengthPrefixedElements(signerBlocks),
+ });
+ }
+
+ private static byte[] generateSignerBlock(
+ SignerConfig signerConfig,
+ Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+ if (signerConfig.certificates.isEmpty()) {
+ throw new SignatureException("No certificates configured for signer");
+ }
+ PublicKey publicKey = signerConfig.certificates.get(0).getPublicKey();
+
+ byte[] encodedPublicKey = encodePublicKey(publicKey);
+
+ V2SignatureSchemeBlock.SignedData signedData = new V2SignatureSchemeBlock.SignedData();
+ try {
+ signedData.certificates = encodeCertificates(signerConfig.certificates);
+ } catch (CertificateEncodingException e) {
+ throw new SignatureException("Failed to encode certificates", e);
+ }
+
+ List<Pair<Integer, byte[]>> digests =
+ new ArrayList<>(signerConfig.signatureAlgorithms.size());
+ for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+ int contentDigestAlgorithm =
+ getSignatureAlgorithmContentDigestAlgorithm(signatureAlgorithm);
+ byte[] contentDigest = contentDigests.get(contentDigestAlgorithm);
+ if (contentDigest == null) {
+ throw new RuntimeException(
+ getContentDigestAlgorithmJcaDigestAlgorithm(contentDigestAlgorithm)
+ + " content digest for "
+ + getSignatureAlgorithmJcaSignatureAlgorithm(signatureAlgorithm)
+ + " not computed");
+ }
+ digests.add(Pair.create(signatureAlgorithm, contentDigest));
+ }
+ signedData.digests = digests;
+
+ V2SignatureSchemeBlock.Signer signer = new V2SignatureSchemeBlock.Signer();
+ // FORMAT:
+ // * length-prefixed sequence of length-prefixed digests:
+ // * uint32: signature algorithm ID
+ // * length-prefixed bytes: digest of contents
+ // * length-prefixed sequence of certificates:
+ // * length-prefixed bytes: X.509 certificate (ASN.1 DER encoded).
+ // * length-prefixed sequence of length-prefixed additional attributes:
+ // * uint32: ID
+ // * (length - 4) bytes: value
+ signer.signedData = encodeAsSequenceOfLengthPrefixedElements(new byte[][] {
+ encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(signedData.digests),
+ encodeAsSequenceOfLengthPrefixedElements(signedData.certificates),
+ // additional attributes
+ new byte[0],
+ });
+ signer.publicKey = encodedPublicKey;
+ signer.signatures = new ArrayList<>();
+ for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+ Pair<String, ? extends AlgorithmParameterSpec> signatureParams =
+ getSignatureAlgorithmJcaSignatureAlgorithm(signatureAlgorithm);
+ String jcaSignatureAlgorithm = signatureParams.getFirst();
+ AlgorithmParameterSpec jcaSignatureAlgorithmParams = signatureParams.getSecond();
+ byte[] signatureBytes;
+ try {
+ Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+ signature.initSign(signerConfig.privateKey);
+ if (jcaSignatureAlgorithmParams != null) {
+ signature.setParameter(jcaSignatureAlgorithmParams);
+ }
+ signature.update(signer.signedData);
+ signatureBytes = signature.sign();
+ } catch (InvalidKeyException e) {
+ throw new InvalidKeyException("Failed sign using " + jcaSignatureAlgorithm, e);
+ } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+ | SignatureException e) {
+ throw new SignatureException("Failed sign using " + jcaSignatureAlgorithm, e);
+ }
+
+ try {
+ Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+ signature.initVerify(publicKey);
+ if (jcaSignatureAlgorithmParams != null) {
+ signature.setParameter(jcaSignatureAlgorithmParams);
+ }
+ signature.update(signer.signedData);
+ if (!signature.verify(signatureBytes)) {
+ throw new SignatureException("Signature did not verify");
+ }
+ } catch (InvalidKeyException e) {
+ throw new InvalidKeyException("Failed to verify generated " + jcaSignatureAlgorithm
+ + " signature using public key from certificate", e);
+ } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+ | SignatureException e) {
+ throw new SignatureException("Failed to verify generated " + jcaSignatureAlgorithm
+ + " signature using public key from certificate", e);
+ }
+
+ signer.signatures.add(Pair.create(signatureAlgorithm, signatureBytes));
+ }
+
+ // FORMAT:
+ // * length-prefixed signed data
+ // * length-prefixed sequence of length-prefixed signatures:
+ // * uint32: signature algorithm ID
+ // * length-prefixed bytes: signature of signed data
+ // * length-prefixed bytes: public key (X.509 SubjectPublicKeyInfo, ASN.1 DER encoded)
+ return encodeAsSequenceOfLengthPrefixedElements(
+ new byte[][] {
+ signer.signedData,
+ encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+ signer.signatures),
+ signer.publicKey,
+ });
+ }
+
+ private static final class V2SignatureSchemeBlock {
+ private static final class Signer {
+ public byte[] signedData;
+ public List<Pair<Integer, byte[]>> signatures;
+ public byte[] publicKey;
+ }
+
+ private static final class SignedData {
+ public List<Pair<Integer, byte[]>> digests;
+ public List<byte[]> certificates;
+ }
+ }
+
+ private static byte[] encodePublicKey(PublicKey publicKey) throws InvalidKeyException {
+ byte[] encodedPublicKey = null;
+ if ("X.509".equals(publicKey.getFormat())) {
+ encodedPublicKey = publicKey.getEncoded();
+ }
+ if (encodedPublicKey == null) {
+ try {
+ encodedPublicKey =
+ KeyFactory.getInstance(publicKey.getAlgorithm())
+ .getKeySpec(publicKey, X509EncodedKeySpec.class)
+ .getEncoded();
+ } catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
+ throw new InvalidKeyException(
+ "Failed to obtain X.509 encoded form of public key " + publicKey
+ + " of class " + publicKey.getClass().getName(),
+ e);
+ }
+ }
+ if ((encodedPublicKey == null) || (encodedPublicKey.length == 0)) {
+ throw new InvalidKeyException(
+ "Failed to obtain X.509 encoded form of public key " + publicKey
+ + " of class " + publicKey.getClass().getName());
+ }
+ return encodedPublicKey;
+ }
+
+ public static List<byte[]> encodeCertificates(List<X509Certificate> certificates)
+ throws CertificateEncodingException {
+ List<byte[]> result = new ArrayList<>();
+ for (X509Certificate certificate : certificates) {
+ result.add(certificate.getEncoded());
+ }
+ return result;
+ }
+
+ private static byte[] encodeAsSequenceOfLengthPrefixedElements(List<byte[]> sequence) {
+ return encodeAsSequenceOfLengthPrefixedElements(
+ sequence.toArray(new byte[sequence.size()][]));
+ }
+
+ private static byte[] encodeAsSequenceOfLengthPrefixedElements(byte[][] sequence) {
+ int payloadSize = 0;
+ for (byte[] element : sequence) {
+ payloadSize += 4 + element.length;
+ }
+ ByteBuffer result = ByteBuffer.allocate(payloadSize);
+ result.order(ByteOrder.LITTLE_ENDIAN);
+ for (byte[] element : sequence) {
+ result.putInt(element.length);
+ result.put(element);
+ }
+ return result.array();
+ }
+
+ private static byte[] encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+ List<Pair<Integer, byte[]>> sequence) {
+ int resultSize = 0;
+ for (Pair<Integer, byte[]> element : sequence) {
+ resultSize += 12 + element.getSecond().length;
+ }
+ ByteBuffer result = ByteBuffer.allocate(resultSize);
+ result.order(ByteOrder.LITTLE_ENDIAN);
+ for (Pair<Integer, byte[]> element : sequence) {
+ byte[] second = element.getSecond();
+ result.putInt(8 + second.length);
+ result.putInt(element.getFirst());
+ result.putInt(second.length);
+ result.put(second);
+ }
+ return result.array();
+ }
+
+ /**
+ * Relative <em>get</em> method for reading {@code size} number of bytes from the current
+ * position of this buffer.
+ *
+ * <p>This method reads the next {@code size} bytes at this buffer's current position,
+ * returning them as a {@code ByteBuffer} with start set to 0, limit and capacity set to
+ * {@code size}, byte order set to this buffer's byte order; and then increments the position by
+ * {@code size}.
+ */
+ private static ByteBuffer getByteBuffer(ByteBuffer source, int size) {
+ if (size < 0) {
+ throw new IllegalArgumentException("size: " + size);
+ }
+ int originalLimit = source.limit();
+ int position = source.position();
+ int limit = position + size;
+ if ((limit < position) || (limit > originalLimit)) {
+ throw new BufferUnderflowException();
+ }
+ source.limit(limit);
+ try {
+ ByteBuffer result = source.slice();
+ result.order(source.order());
+ source.position(limit);
+ return result;
+ } finally {
+ source.limit(originalLimit);
+ }
+ }
+
+ private static Pair<String, ? extends AlgorithmParameterSpec>
+ getSignatureAlgorithmJcaSignatureAlgorithm(int sigAlgorithm) {
+ switch (sigAlgorithm) {
+ case SIGNATURE_RSA_PSS_WITH_SHA256:
+ return Pair.create(
+ "SHA256withRSA/PSS",
+ new PSSParameterSpec(
+ "SHA-256", "MGF1", MGF1ParameterSpec.SHA256, 256 / 8, 1));
+ case SIGNATURE_RSA_PSS_WITH_SHA512:
+ return Pair.create(
+ "SHA512withRSA/PSS",
+ new PSSParameterSpec(
+ "SHA-512", "MGF1", MGF1ParameterSpec.SHA512, 512 / 8, 1));
+ case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256:
+ return Pair.create("SHA256withRSA", null);
+ case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512:
+ return Pair.create("SHA512withRSA", null);
+ case SIGNATURE_ECDSA_WITH_SHA256:
+ return Pair.create("SHA256withECDSA", null);
+ case SIGNATURE_ECDSA_WITH_SHA512:
+ return Pair.create("SHA512withECDSA", null);
+ case SIGNATURE_DSA_WITH_SHA256:
+ return Pair.create("SHA256withDSA", null);
+ case SIGNATURE_DSA_WITH_SHA512:
+ return Pair.create("SHA512withDSA", null);
+ default:
+ throw new IllegalArgumentException(
+ "Unknown signature algorithm: 0x"
+ + Long.toHexString(sigAlgorithm & 0xffffffff));
+ }
+ }
+
+ private static int getSignatureAlgorithmContentDigestAlgorithm(int sigAlgorithm) {
+ switch (sigAlgorithm) {
+ case SIGNATURE_RSA_PSS_WITH_SHA256:
+ case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256:
+ case SIGNATURE_ECDSA_WITH_SHA256:
+ case SIGNATURE_DSA_WITH_SHA256:
+ return CONTENT_DIGEST_CHUNKED_SHA256;
+ case SIGNATURE_RSA_PSS_WITH_SHA512:
+ case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512:
+ case SIGNATURE_ECDSA_WITH_SHA512:
+ case SIGNATURE_DSA_WITH_SHA512:
+ return CONTENT_DIGEST_CHUNKED_SHA512;
+ default:
+ throw new IllegalArgumentException(
+ "Unknown signature algorithm: 0x"
+ + Long.toHexString(sigAlgorithm & 0xffffffff));
+ }
+ }
+
+ private static String getContentDigestAlgorithmJcaDigestAlgorithm(int digestAlgorithm) {
+ switch (digestAlgorithm) {
+ case CONTENT_DIGEST_CHUNKED_SHA256:
+ return "SHA-256";
+ case CONTENT_DIGEST_CHUNKED_SHA512:
+ return "SHA-512";
+ default:
+ throw new IllegalArgumentException(
+ "Unknown content digest algorthm: " + digestAlgorithm);
+ }
+ }
+
+ private static int getContentDigestAlgorithmOutputSizeBytes(int digestAlgorithm) {
+ switch (digestAlgorithm) {
+ case CONTENT_DIGEST_CHUNKED_SHA256:
+ return 256 / 8;
+ case CONTENT_DIGEST_CHUNKED_SHA512:
+ return 512 / 8;
+ default:
+ throw new IllegalArgumentException(
+ "Unknown content digest algorthm: " + digestAlgorithm);
+ }
+ }
+
+ /**
+ * Indicates that APK file could not be parsed.
+ */
+ public static class ApkParseException extends Exception {
+ private static final long serialVersionUID = 1L;
+
+ public ApkParseException(String message) {
+ super(message);
+ }
+
+ public ApkParseException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ }
+}
diff --git a/tools/signapk/src/com/android/signapk/Pair.java b/tools/signapk/src/com/android/signapk/Pair.java
new file mode 100644
index 0000000..e4a6c92
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/Pair.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+/**
+ * Pair of two elements.
+ */
+public final class Pair<A, B> {
+ private final A mFirst;
+ private final B mSecond;
+
+ private Pair(A first, B second) {
+ mFirst = first;
+ mSecond = second;
+ }
+
+ public static <A, B> Pair<A, B> create(A first, B second) {
+ return new Pair<A, B>(first, second);
+ }
+
+ public A getFirst() {
+ return mFirst;
+ }
+
+ public B getSecond() {
+ return mSecond;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((mFirst == null) ? 0 : mFirst.hashCode());
+ result = prime * result + ((mSecond == null) ? 0 : mSecond.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ @SuppressWarnings("rawtypes")
+ Pair other = (Pair) obj;
+ if (mFirst == null) {
+ if (other.mFirst != null) {
+ return false;
+ }
+ } else if (!mFirst.equals(other.mFirst)) {
+ return false;
+ }
+ if (mSecond == null) {
+ if (other.mSecond != null) {
+ return false;
+ }
+ } else if (!mSecond.equals(other.mSecond)) {
+ return false;
+ }
+ return true;
+ }
+}
diff --git a/tools/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
similarity index 76%
rename from tools/signapk/SignApk.java
rename to tools/signapk/src/com/android/signapk/SignApk.java
index 5afb8d1..c6ed2e1 100644
--- a/tools/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -51,13 +51,16 @@
import java.io.OutputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
+import java.nio.ByteBuffer;
import java.security.DigestOutputStream;
import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.MessageDigest;
import java.security.PrivateKey;
import java.security.Provider;
+import java.security.PublicKey;
import java.security.Security;
import java.security.cert.CertificateEncodingException;
import java.security.cert.CertificateFactory;
@@ -67,6 +70,8 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
@@ -101,7 +106,8 @@
/**
* Command line tool to sign JAR files (including APKs and OTA updates) in a way
* compatible with the mincrypt verifier, using EC or RSA keys and SHA1 or
- * SHA-256 (see historical note).
+ * SHA-256 (see historical note). The tool can additionally sign APKs using
+ * APK Signature Scheme v2.
*/
class SignApk {
private static final String CERT_SF_NAME = "META-INF/CERT.SF";
@@ -115,15 +121,28 @@
private static final int USE_SHA1 = 1;
private static final int USE_SHA256 = 2;
+ /** Digest algorithm used when signing the APK using APK Signature Scheme v2. */
+ private static final String APK_SIG_SCHEME_V2_DIGEST_ALGORITHM = "SHA-256";
+
+ /**
+ * Minimum Android SDK API Level which accepts JAR signatures which use SHA-256. Older platform
+ * versions accept only SHA-1 signatures.
+ */
+ private static final int MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES = 18;
+
/**
* Return one of USE_SHA1 or USE_SHA256 according to the signature
* algorithm specified in the cert.
*/
- private static int getDigestAlgorithm(X509Certificate cert) {
+ private static int getDigestAlgorithm(X509Certificate cert, int minSdkVersion) {
String sigAlg = cert.getSigAlgName().toUpperCase(Locale.US);
- if ("SHA1WITHRSA".equals(sigAlg) ||
- "MD5WITHRSA".equals(sigAlg)) { // see "HISTORICAL NOTE" above.
- return USE_SHA1;
+ if ("SHA1WITHRSA".equals(sigAlg) || "MD5WITHRSA".equals(sigAlg)) {
+ // see "HISTORICAL NOTE" above.
+ if (minSdkVersion < MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES) {
+ return USE_SHA1;
+ } else {
+ return USE_SHA256;
+ }
} else if (sigAlg.startsWith("SHA256WITH")) {
return USE_SHA256;
} else {
@@ -133,10 +152,11 @@
}
/** Returns the expected signature algorithm for this key type. */
- private static String getSignatureAlgorithm(X509Certificate cert) {
+ private static String getSignatureAlgorithm(X509Certificate cert, int minSdkVersion) {
String keyType = cert.getPublicKey().getAlgorithm().toUpperCase(Locale.US);
if ("RSA".equalsIgnoreCase(keyType)) {
- if (getDigestAlgorithm(cert) == USE_SHA256) {
+ if ((minSdkVersion >= MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES)
+ || (getDigestAlgorithm(cert, minSdkVersion) == USE_SHA256)) {
return "SHA256withRSA";
} else {
return "SHA1withRSA";
@@ -309,10 +329,24 @@
Attributes attr = null;
if (input != null) attr = input.getAttributes(name);
attr = attr != null ? new Attributes(attr) : new Attributes();
+ // Remove any previously computed digests from this entry's attributes.
+ for (Iterator<Object> i = attr.keySet().iterator(); i.hasNext();) {
+ Object key = i.next();
+ if (!(key instanceof Attributes.Name)) {
+ continue;
+ }
+ String attributeNameLowerCase =
+ ((Attributes.Name) key).toString().toLowerCase(Locale.US);
+ if (attributeNameLowerCase.endsWith("-digest")) {
+ i.remove();
+ }
+ }
+ // Add SHA-1 digest if requested
if (md_sha1 != null) {
attr.putValue("SHA1-Digest",
new String(Base64.encode(md_sha1.digest()), "ASCII"));
}
+ // Add SHA-256 digest if requested
if (md_sha256 != null) {
attr.putValue("SHA-256-Digest",
new String(Base64.encode(md_sha256.digest()), "ASCII"));
@@ -388,12 +422,22 @@
/** Write a .SF file with a digest of the specified manifest. */
private static void writeSignatureFile(Manifest manifest, OutputStream out,
- int hash)
+ int hash, boolean additionallySignedUsingAnApkSignatureScheme)
throws IOException, GeneralSecurityException {
Manifest sf = new Manifest();
Attributes main = sf.getMainAttributes();
main.putValue("Signature-Version", "1.0");
main.putValue("Created-By", "1.0 (Android SignApk)");
+ if (additionallySignedUsingAnApkSignatureScheme) {
+ // Add APK Signature Scheme v2 signature stripping protection.
+ // This attribute indicates that this APK is supposed to have been signed using one or
+ // more APK-specific signature schemes in addition to the standard JAR signature scheme
+ // used by this code. APK signature verifier should reject the APK if it does not
+ // contain a signature for the signature scheme the verifier prefers out of this set.
+ main.putValue(
+ ApkSignerV2.SF_ATTRIBUTE_ANDROID_APK_SIGNED_NAME,
+ ApkSignerV2.SF_ATTRIBUTE_ANDROID_APK_SIGNED_VALUE);
+ }
MessageDigest md = MessageDigest.getInstance(
hash == USE_SHA256 ? "SHA256" : "SHA1");
@@ -418,7 +462,7 @@
print.flush();
Attributes sfAttr = new Attributes();
- sfAttr.putValue(hash == USE_SHA256 ? "SHA-256-Digest" : "SHA1-Digest-Manifest",
+ sfAttr.putValue(hash == USE_SHA256 ? "SHA-256-Digest" : "SHA1-Digest",
new String(Base64.encode(md.digest()), "ASCII"));
sf.getEntries().put(entry.getKey(), sfAttr);
}
@@ -438,7 +482,7 @@
/** Sign data and write the digital signature to 'out'. */
private static void writeSignatureBlock(
- CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey,
+ CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey, int minSdkVersion,
OutputStream out)
throws IOException,
CertificateEncodingException,
@@ -449,8 +493,9 @@
JcaCertStore certs = new JcaCertStore(certList);
CMSSignedDataGenerator gen = new CMSSignedDataGenerator();
- ContentSigner signer = new JcaContentSignerBuilder(getSignatureAlgorithm(publicKey))
- .build(privateKey);
+ ContentSigner signer =
+ new JcaContentSignerBuilder(getSignatureAlgorithm(publicKey, minSdkVersion))
+ .build(privateKey);
gen.addSignerInfoGenerator(
new JcaSignerInfoGeneratorBuilder(
new JcaDigestCalculatorProviderBuilder()
@@ -631,21 +676,23 @@
}
private static class CMSSigner implements CMSTypedData {
- private JarFile inputJar;
- private File publicKeyFile;
- private X509Certificate publicKey;
- private PrivateKey privateKey;
- private OutputStream outputStream;
+ private final JarFile inputJar;
+ private final File publicKeyFile;
+ private final X509Certificate publicKey;
+ private final PrivateKey privateKey;
+ private final int minSdkVersion;
+ private final OutputStream outputStream;
private final ASN1ObjectIdentifier type;
private WholeFileSignerOutputStream signer;
public CMSSigner(JarFile inputJar, File publicKeyFile,
- X509Certificate publicKey, PrivateKey privateKey,
+ X509Certificate publicKey, PrivateKey privateKey, int minSdkVersion,
OutputStream outputStream) {
this.inputJar = inputJar;
this.publicKeyFile = publicKeyFile;
this.publicKey = publicKey;
this.privateKey = privateKey;
+ this.minSdkVersion = minSdkVersion;
this.outputStream = outputStream;
this.type = new ASN1ObjectIdentifier(CMSObjectIdentifiers.data.getId());
}
@@ -670,7 +717,7 @@
signer = new WholeFileSignerOutputStream(out, outputStream);
JarOutputStream outputJar = new JarOutputStream(signer);
- int hash = getDigestAlgorithm(publicKey);
+ int hash = getDigestAlgorithm(publicKey, minSdkVersion);
// Assume the certificate is valid for at least an hour.
long timestamp = publicKey.getNotBefore().getTime() + 3600L * 1000;
@@ -682,6 +729,8 @@
signFile(manifest,
new X509Certificate[]{ publicKey },
new PrivateKey[]{ privateKey },
+ minSdkVersion,
+ false, // Don't sign using APK Signature Scheme v2
outputJar);
signer.notifyClosing();
@@ -698,7 +747,7 @@
CertificateEncodingException,
OperatorCreationException,
CMSException {
- SignApk.writeSignatureBlock(this, publicKey, privateKey, temp);
+ SignApk.writeSignatureBlock(this, publicKey, privateKey, minSdkVersion, temp);
}
public WholeFileSignerOutputStream getSigner() {
@@ -708,9 +757,10 @@
private static void signWholeFile(JarFile inputJar, File publicKeyFile,
X509Certificate publicKey, PrivateKey privateKey,
+ int minSdkVersion,
OutputStream outputStream) throws Exception {
CMSSigner cmsOut = new CMSSigner(inputJar, publicKeyFile,
- publicKey, privateKey, outputStream);
+ publicKey, privateKey, minSdkVersion, outputStream);
ByteArrayOutputStream temp = new ByteArrayOutputStream();
@@ -776,6 +826,8 @@
private static void signFile(Manifest manifest,
X509Certificate[] publicKey, PrivateKey[] privateKey,
+ int minSdkVersion,
+ boolean additionallySignedUsingAnApkSignatureScheme,
JarOutputStream outputJar)
throws Exception {
// Assume the certificate is valid for at least an hour.
@@ -795,7 +847,11 @@
je.setTime(timestamp);
outputJar.putNextEntry(je);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
- writeSignatureFile(manifest, baos, getDigestAlgorithm(publicKey[k]));
+ writeSignatureFile(
+ manifest,
+ baos,
+ getDigestAlgorithm(publicKey[k], minSdkVersion),
+ additionallySignedUsingAnApkSignatureScheme);
byte[] signedData = baos.toByteArray();
outputJar.write(signedData);
@@ -807,7 +863,7 @@
je.setTime(timestamp);
outputJar.putNextEntry(je);
writeSignatureBlock(new CMSProcessableByteArray(signedData),
- publicKey[k], privateKey[k], outputJar);
+ publicKey[k], privateKey[k], minSdkVersion, outputJar);
}
}
@@ -863,10 +919,89 @@
Security.insertProviderAt((Provider) o, 1);
}
+ /**
+ * Converts the provided lists of private keys, their X.509 certificates, and digest algorithms
+ * into a list of APK Signature Scheme v2 {@code SignerConfig} instances.
+ */
+ public static List<ApkSignerV2.SignerConfig> createV2SignerConfigs(
+ PrivateKey[] privateKeys, X509Certificate[] certificates, String[] digestAlgorithms)
+ throws InvalidKeyException {
+ if (privateKeys.length != certificates.length) {
+ throw new IllegalArgumentException(
+ "The number of private keys must match the number of certificates: "
+ + privateKeys.length + " vs" + certificates.length);
+ }
+ List<ApkSignerV2.SignerConfig> result = new ArrayList<>(privateKeys.length);
+ for (int i = 0; i < privateKeys.length; i++) {
+ PrivateKey privateKey = privateKeys[i];
+ X509Certificate certificate = certificates[i];
+ PublicKey publicKey = certificate.getPublicKey();
+ String keyAlgorithm = privateKey.getAlgorithm();
+ if (!keyAlgorithm.equalsIgnoreCase(publicKey.getAlgorithm())) {
+ throw new InvalidKeyException(
+ "Key algorithm of private key #" + (i + 1) + " does not match key"
+ + " algorithm of public key #" + (i + 1) + ": " + keyAlgorithm
+ + " vs " + publicKey.getAlgorithm());
+ }
+ ApkSignerV2.SignerConfig signerConfig = new ApkSignerV2.SignerConfig();
+ signerConfig.privateKey = privateKey;
+ signerConfig.certificates = Collections.singletonList(certificate);
+ List<Integer> signatureAlgorithms = new ArrayList<>(digestAlgorithms.length);
+ for (String digestAlgorithm : digestAlgorithms) {
+ try {
+ signatureAlgorithms.add(
+ getV2SignatureAlgorithm(keyAlgorithm, digestAlgorithm));
+ } catch (IllegalArgumentException e) {
+ throw new InvalidKeyException(
+ "Unsupported key and digest algorithm combination for signer #"
+ + (i + 1),
+ e);
+ }
+ }
+ signerConfig.signatureAlgorithms = signatureAlgorithms;
+ result.add(signerConfig);
+ }
+ return result;
+ }
+
+ private static int getV2SignatureAlgorithm(String keyAlgorithm, String digestAlgorithm) {
+ if ("SHA-256".equalsIgnoreCase(digestAlgorithm)) {
+ if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+ // Use RSASSA-PKCS1-v1_5 signature scheme instead of RSASSA-PSS to guarantee
+ // deterministic signatures which make life easier for OTA updates (fewer files
+ // changed when deterministic signature schemes are used).
+ return ApkSignerV2.SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256;
+ } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+ return ApkSignerV2.SIGNATURE_ECDSA_WITH_SHA256;
+ } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+ return ApkSignerV2.SIGNATURE_DSA_WITH_SHA256;
+ } else {
+ throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
+ }
+ } else if ("SHA-512".equalsIgnoreCase(digestAlgorithm)) {
+ if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+ // Use RSASSA-PKCS1-v1_5 signature scheme instead of RSASSA-PSS to guarantee
+ // deterministic signatures which make life easier for OTA updates (fewer files
+ // changed when deterministic signature schemes are used).
+ return ApkSignerV2.SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512;
+ } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+ return ApkSignerV2.SIGNATURE_ECDSA_WITH_SHA512;
+ } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+ return ApkSignerV2.SIGNATURE_DSA_WITH_SHA512;
+ } else {
+ throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
+ }
+ } else {
+ throw new IllegalArgumentException("Unsupported digest algorithm: " + digestAlgorithm);
+ }
+ }
+
private static void usage() {
System.err.println("Usage: signapk [-w] " +
"[-a <alignment>] " +
"[-providerClass <className>] " +
+ "[--min-sdk-version <n>] " +
+ "[--disable-v2] " +
"publickey.x509[.pem] privatekey.pk8 " +
"[publickey2.x509[.pem] privatekey2.pk8 ...] " +
"input.jar output.jar");
@@ -887,6 +1022,8 @@
boolean signWholeFile = false;
String providerClass = null;
int alignment = 4;
+ int minSdkVersion = 0;
+ boolean signUsingApkSignatureSchemeV2 = true;
int argstart = 0;
while (argstart < args.length && args[argstart].startsWith("-")) {
@@ -902,6 +1039,18 @@
} else if ("-a".equals(args[argstart])) {
alignment = Integer.parseInt(args[++argstart]);
++argstart;
+ } else if ("--min-sdk-version".equals(args[argstart])) {
+ String minSdkVersionString = args[++argstart];
+ try {
+ minSdkVersion = Integer.parseInt(minSdkVersionString);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException(
+ "--min-sdk-version must be a decimal number: " + minSdkVersionString);
+ }
+ ++argstart;
+ } else if ("--disable-v2".equals(args[argstart])) {
+ signUsingApkSignatureSchemeV2 = false;
+ ++argstart;
} else {
usage();
}
@@ -931,7 +1080,7 @@
for (int i = 0; i < numKeys; ++i) {
int argNum = argstart + i*2;
publicKey[i] = readPublicKey(new File(args[argNum]));
- hashes |= getDigestAlgorithm(publicKey[i]);
+ hashes |= getDigestAlgorithm(publicKey[i], minSdkVersion);
}
} catch (IllegalArgumentException e) {
System.err.println(e);
@@ -952,25 +1101,56 @@
outputFile = new FileOutputStream(outputFilename);
-
+ // NOTE: Signing currently recompresses any compressed entries using Deflate (default
+ // compression level for OTA update files and maximum compession level for APKs).
if (signWholeFile) {
SignApk.signWholeFile(inputJar, firstPublicKeyFile,
- publicKey[0], privateKey[0], outputFile);
+ publicKey[0], privateKey[0], minSdkVersion, outputFile);
} else {
- JarOutputStream outputJar = new JarOutputStream(outputFile);
-
- // For signing .apks, use the maximum compression to make
- // them as small as possible (since they live forever on
- // the system partition). For OTA packages, use the
- // default compression level, which is much much faster
- // and produces output that is only a tiny bit larger
- // (~0.1% on full OTA packages I tested).
+ // Generate, in memory, an APK signed using standard JAR Signature Scheme.
+ ByteArrayOutputStream v1SignedApkBuf = new ByteArrayOutputStream();
+ JarOutputStream outputJar = new JarOutputStream(v1SignedApkBuf);
+ // Use maximum compression for compressed entries because the APK lives forever on
+ // the system partition.
outputJar.setLevel(9);
-
Manifest manifest = addDigestsToManifest(inputJar, hashes);
copyFiles(manifest, inputJar, outputJar, timestamp, alignment);
- signFile(manifest, publicKey, privateKey, outputJar);
+ signFile(
+ manifest,
+ publicKey, privateKey, minSdkVersion, signUsingApkSignatureSchemeV2,
+ outputJar);
outputJar.close();
+ ByteBuffer v1SignedApk = ByteBuffer.wrap(v1SignedApkBuf.toByteArray());
+ v1SignedApkBuf.reset();
+
+ ByteBuffer[] outputChunks;
+ if (signUsingApkSignatureSchemeV2) {
+ // Additionally sign the APK using the APK Signature Scheme v2.
+ ByteBuffer apkContents = v1SignedApk;
+ List<ApkSignerV2.SignerConfig> signerConfigs =
+ createV2SignerConfigs(
+ privateKey,
+ publicKey,
+ new String[] {APK_SIG_SCHEME_V2_DIGEST_ALGORITHM});
+ outputChunks = ApkSignerV2.sign(apkContents, signerConfigs);
+ } else {
+ // Output the JAR-signed APK as is.
+ outputChunks = new ByteBuffer[] {v1SignedApk};
+ }
+
+ // This assumes outputChunks are array-backed. To avoid this assumption, the
+ // code could be rewritten to use FileChannel.
+ for (ByteBuffer outputChunk : outputChunks) {
+ outputFile.write(
+ outputChunk.array(),
+ outputChunk.arrayOffset() + outputChunk.position(),
+ outputChunk.remaining());
+ outputChunk.position(outputChunk.limit());
+ }
+
+ outputFile.close();
+ outputFile = null;
+ return;
}
} catch (Exception e) {
e.printStackTrace();
diff --git a/tools/signapk/src/com/android/signapk/ZipUtils.java b/tools/signapk/src/com/android/signapk/ZipUtils.java
new file mode 100644
index 0000000..b9a17ca
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/ZipUtils.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Assorted ZIP format helpers.
+ *
+ * <p>NOTE: Most helper methods operating on {@code ByteBuffer} instances expect that the byte
+ * order of these buffers is little-endian.
+ */
+public abstract class ZipUtils {
+ private ZipUtils() {}
+
+ private static final int ZIP_EOCD_REC_MIN_SIZE = 22;
+ private static final int ZIP_EOCD_REC_SIG = 0x06054b50;
+ private static final int ZIP_EOCD_CENTRAL_DIR_SIZE_FIELD_OFFSET = 12;
+ private static final int ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET = 16;
+ private static final int ZIP_EOCD_COMMENT_LENGTH_FIELD_OFFSET = 20;
+
+ private static final int ZIP64_EOCD_LOCATOR_SIZE = 20;
+ private static final int ZIP64_EOCD_LOCATOR_SIG = 0x07064b50;
+
+ private static final int UINT32_MAX_VALUE = 0xffff;
+
+ /**
+ * Returns the position at which ZIP End of Central Directory record starts in the provided
+ * buffer or {@code -1} if the record is not present.
+ *
+ * <p>NOTE: Byte order of {@code zipContents} must be little-endian.
+ */
+ public static int findZipEndOfCentralDirectoryRecord(ByteBuffer zipContents) {
+ assertByteOrderLittleEndian(zipContents);
+
+ // ZIP End of Central Directory (EOCD) record is located at the very end of the ZIP archive.
+ // The record can be identified by its 4-byte signature/magic which is located at the very
+ // beginning of the record. A complication is that the record is variable-length because of
+ // the comment field.
+ // The algorithm for locating the ZIP EOCD record is as follows. We search backwards from
+ // end of the buffer for the EOCD record signature. Whenever we find a signature, we check
+ // the candidate record's comment length is such that the remainder of the record takes up
+ // exactly the remaining bytes in the buffer. The search is bounded because the maximum
+ // size of the comment field is 65535 bytes because the field is an unsigned 32-bit number.
+
+ int archiveSize = zipContents.capacity();
+ if (archiveSize < ZIP_EOCD_REC_MIN_SIZE) {
+ System.out.println("File size smaller than EOCD min size");
+ return -1;
+ }
+ int maxCommentLength = Math.min(archiveSize - ZIP_EOCD_REC_MIN_SIZE, UINT32_MAX_VALUE);
+ int eocdWithEmptyCommentStartPosition = archiveSize - ZIP_EOCD_REC_MIN_SIZE;
+ for (int expectedCommentLength = 0; expectedCommentLength < maxCommentLength;
+ expectedCommentLength++) {
+ int eocdStartPos = eocdWithEmptyCommentStartPosition - expectedCommentLength;
+ if (zipContents.getInt(eocdStartPos) == ZIP_EOCD_REC_SIG) {
+ int actualCommentLength =
+ getUnsignedInt16(
+ zipContents, eocdStartPos + ZIP_EOCD_COMMENT_LENGTH_FIELD_OFFSET);
+ if (actualCommentLength == expectedCommentLength) {
+ return eocdStartPos;
+ }
+ }
+ }
+
+ return -1;
+ }
+
+ /**
+ * Returns {@code true} if the provided buffer contains a ZIP64 End of Central Directory
+ * Locator.
+ *
+ * <p>NOTE: Byte order of {@code zipContents} must be little-endian.
+ */
+ public static final boolean isZip64EndOfCentralDirectoryLocatorPresent(
+ ByteBuffer zipContents, int zipEndOfCentralDirectoryPosition) {
+ assertByteOrderLittleEndian(zipContents);
+
+ // ZIP64 End of Central Directory Locator immediately precedes the ZIP End of Central
+ // Directory Record.
+
+ int locatorPosition = zipEndOfCentralDirectoryPosition - ZIP64_EOCD_LOCATOR_SIZE;
+ if (locatorPosition < 0) {
+ return false;
+ }
+
+ return zipContents.getInt(locatorPosition) == ZIP64_EOCD_LOCATOR_SIG;
+ }
+
+ /**
+ * Returns the offset of the start of the ZIP Central Directory in the archive.
+ *
+ * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+ */
+ public static long getZipEocdCentralDirectoryOffset(ByteBuffer zipEndOfCentralDirectory) {
+ assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+ return getUnsignedInt32(
+ zipEndOfCentralDirectory,
+ zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET);
+ }
+
+ /**
+ * Sets the offset of the start of the ZIP Central Directory in the archive.
+ *
+ * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+ */
+ public static void setZipEocdCentralDirectoryOffset(
+ ByteBuffer zipEndOfCentralDirectory, long offset) {
+ assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+ setUnsignedInt32(
+ zipEndOfCentralDirectory,
+ zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET,
+ offset);
+ }
+
+ /**
+ * Returns the size (in bytes) of the ZIP Central Directory.
+ *
+ * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+ */
+ public static long getZipEocdCentralDirectorySizeBytes(ByteBuffer zipEndOfCentralDirectory) {
+ assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+ return getUnsignedInt32(
+ zipEndOfCentralDirectory,
+ zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_SIZE_FIELD_OFFSET);
+ }
+
+ private static void assertByteOrderLittleEndian(ByteBuffer buffer) {
+ if (buffer.order() != ByteOrder.LITTLE_ENDIAN) {
+ throw new IllegalArgumentException("ByteBuffer byte order must be little endian");
+ }
+ }
+
+ private static int getUnsignedInt16(ByteBuffer buffer, int offset) {
+ return buffer.getShort(offset) & 0xffff;
+ }
+
+ private static long getUnsignedInt32(ByteBuffer buffer, int offset) {
+ return buffer.getInt(offset) & 0xffffffffL;
+ }
+
+ private static void setUnsignedInt32(ByteBuffer buffer, int offset, long value) {
+ if ((value < 0) || (value > 0xffffffffL)) {
+ throw new IllegalArgumentException("uint32 value of out range: " + value);
+ }
+ buffer.putInt(buffer.position() + offset, (int) value);
+ }
+}