Merge "envsetup.sh: don't assume 'cd' is builtin"
diff --git a/core/allowed_ndk_types.mk b/core/allowed_ndk_types.mk
new file mode 100644
index 0000000..b88b9e8
--- /dev/null
+++ b/core/allowed_ndk_types.mk
@@ -0,0 +1,84 @@
+# Determines the types of NDK modules the current module is allowed to link to.
+# Input variables:
+# LOCAL_MODULE
+# LOCAL_MODULE_CLASS
+# LOCAL_NDK_STL_VARIANT
+# LOCAL_SDK_VERSION
+# Output variables:
+# my_ndk_stl_family: Family of the NDK STL.
+# my_ndk_stl_link_type: STL link type, static or shared.
+# my_allowed_ndk_types: Types of NDK modules that may be linked.
+# my_warn_ndk_types: Types of NDK modules that shouldn't be linked, but are.
+
+my_allowed_ndk_types :=
+my_warn_ndk_types :=
+my_ndk_stl_family :=
+my_ndk_stl_link_type :=
+
+ifdef LOCAL_SDK_VERSION
+ ifeq ($(LOCAL_NDK_STL_VARIANT),)
+ my_ndk_stl_family := system
+ my_ndk_stl_link_type := shared
+ else ifeq ($(LOCAL_NDK_STL_VARIANT),system)
+ my_ndk_stl_family := system
+ my_ndk_stl_link_type := shared
+ else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
+ my_ndk_stl_family := libc++
+ my_ndk_stl_link_type := shared
+ else ifeq ($(LOCAL_NDK_STL_VARIANT),c++_static)
+ my_ndk_stl_family := libc++
+ my_ndk_stl_link_type := static
+ else ifeq ($(LOCAL_NDK_STL_VARIANT),none)
+ my_ndk_stl_family := none
+ my_ndk_stl_link_type := none
+ else
+ $(call pretty-error,invalid LOCAL_NDK_STL_VARIANT: $(LOCAL_NDK_STL_VARIANT))
+ endif
+
+ ifeq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
+ # The "none" link type indicates that nothing is actually linked. Since
+ # this is a static library, it's still up to the final use of the
+ # library whether a static or shared STL should be used.
+ my_ndk_stl_link_type := none
+ endif
+
+ # The system STL is only the C++ ABI layer, so it's compatible with any STL.
+ my_allowed_ndk_types += native:ndk:system:shared
+ my_allowed_ndk_types += native:ndk:system:none
+
+ # Libaries that don't use the STL can be linked to anything.
+ my_allowed_ndk_types += native:ndk:none:none
+
+ # And it's always okay to link a static library that uses your own STL type.
+ # Since nothing was actually linked for the static library, it is up to the
+ # first linked library in the dependency chain which gets used.
+ my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):none
+
+ ifeq ($(LOCAL_MODULE_CLASS),APPS)
+ # For an app package, it's actually okay to depend on any set of STLs.
+ # If any of the individual libraries depend on each other they've
+ # already been checked for consistency, and if they don't they'll be
+ # kept isolated by RTLD_LOCAL anyway.
+ my_allowed_ndk_types += \
+ native:ndk:libc++:shared native:ndk:libc++:static
+
+ # The "none" link type that used by static libraries is intentionally
+ # omitted here. We should only be dealing with shared libraries in
+ # LOCAL_JNI_SHARED_LIBRARIES.
+ else ifeq ($(my_ndk_stl_link_type),shared)
+ # Modules linked to a shared STL can only use another shared STL.
+ my_allowed_ndk_types += native:ndk:$(my_ndk_stl_family):shared
+ endif
+ # Else we are a non-static library that uses a static STL, and are
+ # incompatible with all other shared libraries that use an STL.
+else
+ my_allowed_ndk_types := \
+ native:ndk:none:none \
+ native:ndk:system:none \
+ native:ndk:system:shared \
+
+ ifeq ($(LOCAL_MODULE_CLASS),APPS)
+ # CTS is bad and it should feel bad: http://b/13249737
+ my_warn_ndk_types += native:ndk:libc++:static
+ endif
+endif
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 9234abe..cebf52b 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -499,18 +499,22 @@
# separate the multiple architectures into subdirectories of the testcase folder.
arch_dir :=
is_native :=
+multi_arch :=
ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
is_native := true
+ multi_arch := true
endif
ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
is_native := true
+ multi_arch := true
endif
ifdef LOCAL_MULTILIB
- is_native := true
+ multi_arch := true
endif
-ifdef is_native
+ifdef multi_arch
arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
endif
+multi_arch :=
# The module itself.
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
diff --git a/core/binary.mk b/core/binary.mk
index 92f9959..a4fd8e3 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -189,24 +189,14 @@
ifeq (,$(LOCAL_NDK_STL_VARIANT))
LOCAL_NDK_STL_VARIANT := system
endif
- ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
+ ifneq (1,$(words $(filter none system c++_static c++_shared, $(LOCAL_NDK_STL_VARIANT))))
$(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
endif
+
ifeq (system,$(LOCAL_NDK_STL_VARIANT))
my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
my_system_shared_libraries += libstdc++
- else # LOCAL_NDK_STL_VARIANT is not system
- ifneq (,$(filter stlport_%, $(LOCAL_NDK_STL_VARIANT)))
- my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/stlport/stlport
- my_system_shared_libraries += libstdc++
- ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
- my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
- my_ldlibs += -ldl
- else
- my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_shared.so
- endif
- else # LOCAL_NDK_STL_VARIANT is not stlport_* either
- ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
+ else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
my_ndk_stl_include_path := \
$(my_ndk_source_root)/cxx-stl/llvm-libc++/include
my_ndk_stl_include_path += \
@@ -232,17 +222,9 @@
my_ldlibs += -ldl
my_ndk_cpp_std_version := c++11
- else # LOCAL_NDK_STL_VARIANT is not c++_* either
- ifneq (,$(filter gnustl_%, $(LOCAL_NDK_STL_VARIANT)))
- my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/include \
- $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/include
- my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/libgnustl_static.a
else # LOCAL_NDK_STL_VARIANT must be none
# Do nothing.
endif
- endif
- endif
- endif
endif
ifneq ($(LOCAL_USE_VNDK),)
@@ -1404,10 +1386,12 @@
## other NDK-built libraries
####################################################
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
-my_warn_types :=
-my_allowed_types := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
else ifdef LOCAL_USE_VNDK
_name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
@@ -1427,8 +1411,8 @@
endif
else
my_link_type := native:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform
endif
my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 73b1c04..01cf3f5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -50,7 +50,7 @@
endif
ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)),)
-TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT := armv5te
+$(error TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT must be set)
endif
TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
diff --git a/core/combo/arch/arm/armv5te-vfp.mk b/core/combo/arch/arm/armv5te-vfp.mk
deleted file mode 100644
index 75299ac..0000000
--- a/core/combo/arch/arm/armv5te-vfp.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-# At the moment, use the same settings than the one
-# for armv5te, since TARGET_ARCH_VARIANT := armv5te-vfp
-# will only be used to select an optimized VFP-capable assembly
-# interpreter loop for Dalvik.
-#
-include $(BUILD_COMBOS)/arch/arm/armv5te.mk
-
diff --git a/core/combo/arch/arm/armv5te.mk b/core/combo/arch/arm/armv5te.mk
deleted file mode 100644
index bd75695..0000000
--- a/core/combo/arch/arm/armv5te.mk
+++ /dev/null
@@ -1,4 +0,0 @@
-# Configuration for Linux on ARM.
-# Generating binaries for the ARMv5TE architecture and higher
-#
-
diff --git a/core/config.mk b/core/config.mk
index b03f21f..4fc5edf 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -609,13 +609,13 @@
LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
# The default PKGDATADIR built in the prebuilt bison is a relative path
-# external/bison/data.
+# prebuilts/build-tools/common/bison.
# To run bison from elsewhere you need to set up enviromental variable
# BISON_PKGDATADIR.
-BISON_PKGDATADIR := $(PWD)/external/bison/data
-BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison
+BISON_PKGDATADIR := $(PWD)/prebuilts/build-tools/common/bison
+BISON := prebuilts/build-tools/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bin/bison
YACC := $(BISON) -d
-BISON_DATA := $(wildcard external/bison/data/* external/bison/data/*/*)
+BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 89a39a8..255c02b 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -287,7 +287,7 @@
# Check BOARD_VNDK_VERSION
define check_vndk_version
$(eval vndk_path := prebuilts/vndk/v$(1)) \
- $(if $(wildcard $(vndk_path)/Android.bp),,$(error VNDK version $(1) not found))
+ $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
endef
ifdef BOARD_VNDK_VERSION
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 265d482..ab5fd2c 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -24,16 +24,11 @@
ifdef my_embed_jni
# App explicitly requires the prebuilt NDK stl shared libraies.
# The NDK stl shared libraries should never go to the system image.
-ifneq ($(filter $(LOCAL_NDK_STL_VARIANT), stlport_shared c++_shared),)
+ifeq ($(LOCAL_NDK_STL_VARIANT),c++_shared)
ifndef LOCAL_SDK_VERSION
$(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \
LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME))
endif
-endif
-ifeq (stlport_shared,$(LOCAL_NDK_STL_VARIANT))
-my_jni_shared_libraries += \
- $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/stlport/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libstlport_shared.so
-else ifeq (c++_shared,$(LOCAL_NDK_STL_VARIANT))
my_jni_shared_libraries += \
$(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so
endif
@@ -108,15 +103,16 @@
endif # outer my_prebuilt_jni_libs
# Verify that all included libraries are built against the NDK
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
ifneq ($(LOCAL_SDK_VERSION),)
my_link_type := app:sdk
-my_warn_types := native:platform
-my_allowed_types := native:ndk
+my_warn_types := native:platform $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types)
else
my_link_type := app:platform
-my_warn_types :=
-my_allowed_types := native:ndk native:platform native:vendor native:vndk native:vndk_private
+my_warn_types := $(my_warn_ndk_types)
+my_allowed_types := $(my_allowed_ndk_types) native:platform native:vendor native:vndk native:vndk_private
endif
my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
diff --git a/core/java_library.mk b/core/java_library.mk
index e4916b8..8cf0074 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -42,6 +42,8 @@
ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
ifeq (true,$(EMMA_INSTRUMENT_STATIC))
LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
endif # LOCAL_EMMA_INSTRUMENT
endif # EMMA_INSTRUMENT_STATIC
else
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 2a63817..e153a8a 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -275,6 +275,8 @@
ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
# Only add jacocoagent if the package contains some java code
LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
endif # Contains java code
else
ifdef LOCAL_SDK_VERSION
@@ -361,6 +363,8 @@
$(full_classes_compiled_jar): $(data_binding_stamp)
endif # LOCAL_DATA_BINDING
+resource_export_package :=
+
ifeq ($(need_compile_res),true)
###############################
@@ -427,7 +431,6 @@
$(proguard_options_file): $(R_file_stamp)
-resource_export_package :=
ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
# Put this module's resources into a PRODUCT-agnositc package that
# other packages can use to build their own PRODUCT-agnostic R.java (etc.)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 2a9ad1f..d934338 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -173,8 +173,10 @@
endif
export_cflags :=
+include $(BUILD_SYSTEM)/allowed_ndk_types.mk
+
ifdef LOCAL_SDK_VERSION
-my_link_type := native:ndk
+my_link_type := native:ndk:$(my_ndk_stl_family):$(my_ndk_stl_link_type)
else ifdef LOCAL_USE_VNDK
_name := $(patsubst %.vendor,%,$(LOCAL_MODULE))
ifneq ($(filter $(_name),$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES) $(LLNDK_LIBRARIES)),)
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 59a3a9e..b5c3a7c 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -8,15 +8,9 @@
ifndef LOCAL_SDK_VERSION
LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
else
- ifneq (,$(filter c++_%,$(LOCAL_NDK_STL_VARIANT)))
- my_ndk_gtest_suffix := _c++
- else ifneq ($(filter stlport_,$(LOCAL_NDK_STL_VARIANT)),)
- my_ndk_gtest_suffix := _stlport
- else ifneq ($(filter gnustl_,$(LOCAL_NDK_STL_VARIANT)),)
- my_ndk_gtest_suffix := _gnustl
- else # system STL, use stlport
- my_ndk_gtest_suffix := _stlport
- endif
+ # TODO(danalbert): Remove the suffix from the module since we only need the
+ # one variant now.
+ my_ndk_gtest_suffix := _c++
LOCAL_STATIC_LIBRARIES += \
libgtest_main_ndk$(my_ndk_gtest_suffix) \
libgtest_ndk$(my_ndk_gtest_suffix)
diff --git a/core/tasks/vndk.mk b/core/tasks/vndk.mk
index e42e0bd..3604aed 100644
--- a/core/tasks/vndk.mk
+++ b/core/tasks/vndk.mk
@@ -20,6 +20,9 @@
# PLATFORM_VNDK_VERSION must be set.
ifneq (,$(PLATFORM_VNDK_VERSION))
+# BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'.
+ifneq ($(BOARD_VNDK_RUNTIME_DISABLE),true)
+
# Returns arch-specific libclang_rt.ubsan* library name.
# Because VNDK_CORE_LIBRARIES includes all arch variants for libclang_rt.ubsan*
# libs, the arch-specific libs are selected separately.
@@ -75,13 +78,10 @@
else
vndk_core_libs := $(addsuffix .vendor,$(filter-out libclang_rt.ubsan%,$(VNDK_CORE_LIBRARIES)))
- # for TARGET_ARCH
vndk_core_libs += $(call clang-ubsan-vndk-core)
-
- # TODO(b/69834489): Package additional arch variants
- # ifdef TARGET_2ND_ARCH
- # vndk_core_libs += $(call clang-ubsan-vndk-core,true)
- # endif
+ ifdef TARGET_2ND_ARCH
+ vndk_core_libs += $(call clang-ubsan-vndk-core,true)
+ endif
endif
vndk_sp_libs := $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES))
@@ -140,40 +140,41 @@
#######################################
# vndk_snapshot_zip
-vndk_snapshot_arch := $(vndk_snapshot_out)/arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)
+vndk_snapshot_variant := $(vndk_snapshot_out)/$(TARGET_ARCH)
+vndk_lib_dir := $(vndk_snapshot_variant)/arch-$(TARGET_ARCH)-$(TARGET_ARCH_VARIANT)
+vndk_lib_dir_2nd := $(vndk_snapshot_variant)/arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)
vndk_snapshot_zip := $(PRODUCT_OUT)/android-vndk-$(TARGET_ARCH).zip
$(vndk_snapshot_zip): PRIVATE_VNDK_SNAPSHOT_OUT := $(vndk_snapshot_out)
-$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_snapshot_arch)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT := $(vndk_lib_dir)/shared/vndk-core
$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES := \
$(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
-$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_snapshot_arch)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT := $(vndk_lib_dir)/shared/vndk-sp
$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES := \
$(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES)
-$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_arch)/configs
+$(vndk_snapshot_zip): PRIVATE_CONFIGS_OUT := $(vndk_snapshot_variant)/configs
$(vndk_snapshot_zip): PRIVATE_CONFIGS_INTERMEDIATES := \
$(call paths-of-intermediates,$(foreach txt,$(vndk_prebuilt_txts), \
$(txt):$(patsubst %.txt,%.$(PLATFORM_VNDK_VERSION).txt,$(txt))),ETC) \
$(vndk_snapshot_configs)
-$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_arch)/NOTICE_FILES
+$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_OUT := $(vndk_snapshot_variant)/NOTICE_FILES
$(vndk_snapshot_zip): PRIVATE_NOTICE_FILES_INTERMEDIATES := \
$(call paths-of-notice-files,$(vndk_core_libs),vndk) \
$(call paths-of-notice-files,$(vndk_sp_libs),vndk-sp)
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# vndk_snapshot_arch_2ND := $(vndk_snapshot_out)/arch-$(TARGET_2ND_ARCH)-$(TARGET_2ND_ARCH_VARIANT)
-# $(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_snapshot_arch_2ND)/shared/vndk-core
-# $(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
-# $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
-# $(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_snapshot_arch_2ND)/shared/vndk-sp
-# $(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
-# $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
-# endif
+ifdef TARGET_2ND_ARCH
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-core
+$(vndk_snapshot_zip): PRIVATE_VNDK_CORE_INTERMEDIATES_2ND := \
+ $(call paths-of-intermediates,$(foreach lib,$(vndk_core_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_OUT_2ND := $(vndk_lib_dir_2nd)/shared/vndk-sp
+$(vndk_snapshot_zip): PRIVATE_VNDK_SP_INTERMEDIATES_2ND := \
+ $(call paths-of-intermediates,$(foreach lib,$(vndk_sp_libs),$(lib):$(lib).so),SHARED_LIBRARIES,true)
+endif
# Args
# $(1): destination directory
@@ -206,13 +207,12 @@
$(PRIVATE_CONFIGS_OUT),$(PRIVATE_CONFIGS_INTERMEDIATES))
$(call private-copy-vndk-intermediates, \
$(PRIVATE_NOTICE_FILES_OUT),$(PRIVATE_NOTICE_FILES_INTERMEDIATES))
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# $(call private-copy-vndk-intermediates, \
-# $(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
-# $(call private-copy-vndk-intermediates, \
-# $(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
-# endif
+ifdef TARGET_2ND_ARCH
+ $(call private-copy-vndk-intermediates, \
+ $(PRIVATE_VNDK_CORE_OUT_2ND),$(PRIVATE_VNDK_CORE_INTERMEDIATES_2ND))
+ $(call private-copy-vndk-intermediates, \
+ $(PRIVATE_VNDK_SP_OUT_2ND),$(PRIVATE_VNDK_SP_INTERMEDIATES_2ND))
+endif
$(hide) $(SOONG_ZIP) -o $@ -C $(PRIVATE_VNDK_SNAPSHOT_OUT) -D $(PRIVATE_VNDK_SNAPSHOT_OUT)
.PHONY: vndk
@@ -232,27 +232,28 @@
vndk_snapshot_top :=
vndk_snapshot_out :=
vndk_snapshot_configs_out :=
-vndk_snapshot_arch :=
+vndk_snapshot_variant :=
+vndk_lib_dir :=
+vndk_lib_dir_2nd :=
vndk_snapshot_dependencies :=
-# TODO(b/69834489): Package additional arch variants
-# ifdef TARGET_2ND_ARCH
-# vndk_snapshot_arch_2ND :=
-# endif
+
+else # BOARD_VNDK_RUNTIME_DISABLE is set to 'true'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_RUNTIME_DISABLE must not be set to 'true'."
+endif # BOARD_VNDK_RUNTIME_DISABLE
else # PLATFORM_VNDK_VERSION is NOT set
-
-.PHONY: vndk
-vndk:
- $(call echo-error,$(current_makefile),CANNOT generate VNDK snapshot. PLATFORM_VNDK_VERSION must be set.)
- exit 1
-
+error_msg := "CANNOT generate VNDK snapshot. PLATFORM_VNDK_VERSION must be set."
endif # PLATFORM_VNDK_VERSION
else # BOARD_VNDK_VERSION is NOT set to 'current'
+error_msg := "CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'."
+endif # BOARD_VNDK_VERSION
+
+ifneq (,$(error_msg))
.PHONY: vndk
vndk:
- $(call echo-error,$(current_makefile),CANNOT generate VNDK snapshot. BOARD_VNDK_VERSION must be set to 'current'.)
+ $(call echo-error,$(current_makefile),$(error_msg))
exit 1
-endif # BOARD_VNDK_VERSION
+endif
diff --git a/envsetup.sh b/envsetup.sh
index fe22111..372dffb 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -8,7 +8,7 @@
Selects <product_name> as the product to build, and <build_variant> as the variant to
build, and stores those selections in the environment to be read by subsequent
invocations of 'm' etc.
-- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
- croot: Changes directory to the top of the tree.
- m: Makes from the top of the tree.
- mm: Builds all of the modules in the current directory, but not their dependencies.
@@ -661,10 +661,10 @@
function tapas()
{
local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
- local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+ local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|arm64|x86_64|mips64)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
- local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
if [ "$showHelp" != "" ]; then
$(gettop)/build/make/tapasHelp.sh
@@ -688,7 +688,6 @@
case $arch in
x86) product=aosp_x86;;
mips) product=aosp_mips;;
- armv5) product=generic_armv5;;
arm64) product=aosp_arm64;;
x86_64) product=aosp_x86_64;;
mips64) product=aosp_mips64;;
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 058ac1d..38b3e34 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
cd ../..
TOP="${PWD}"
-message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|mips|arm64|x86_64|mips64] [eng|userdebug|user]
tapas selects individual apps to be built by the Android build system. Unlike
"lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/generic/sepolicy/bootanim.te b/target/board/generic/sepolicy/bootanim.te
index b23e1ca..e4f7c73 100644
--- a/target/board/generic/sepolicy/bootanim.te
+++ b/target/board/generic/sepolicy/bootanim.te
@@ -3,7 +3,6 @@
#TODO: This can safely be ignored until b/62954877 is fixed
dontaudit bootanim system_data_file:dir read;
-allow bootanim vendor_file:file { execute getattr open read };
allow bootanim graphics_device:chr_file { read ioctl open };
set_prop(bootanim, qemu_prop)
diff --git a/target/board/generic_armv5/AndroidBoard.mk b/target/board/generic_armv5/AndroidBoard.mk
deleted file mode 100644
index 7daff27..0000000
--- a/target/board/generic_armv5/AndroidBoard.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
--include build/target/board/generic/AndroidBoard.mk
diff --git a/target/board/generic_armv5/BoardConfig.mk b/target/board/generic_armv5/BoardConfig.mk
deleted file mode 100644
index 016937a..0000000
--- a/target/board/generic_armv5/BoardConfig.mk
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/BoardConfig.mk
-
-TARGET_ARCH_VARIANT := armv5te
-TARGET_CPU_ABI := armeabi
-TARGET_CPU_ABI2 :=
-
-WITH_DEXPREOPT := false
diff --git a/target/board/generic_armv5/README.txt b/target/board/generic_armv5/README.txt
deleted file mode 100644
index 25d590a..0000000
--- a/target/board/generic_armv5/README.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-The "generic_armv5" product defines a non-hardware-specific target
-without a kernel or bootloader.
-
-It is not a product "base class"; no other products inherit
-from it or use it in any way.
diff --git a/target/board/generic_armv5/device.mk b/target/board/generic_armv5/device.mk
deleted file mode 100644
index 7c4aaf2..0000000
--- a/target/board/generic_armv5/device.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-include build/target/board/generic/device.mk
diff --git a/target/board/generic_armv5/system.prop b/target/board/generic_armv5/system.prop
deleted file mode 100644
index 137a0f9..0000000
--- a/target/board/generic_armv5/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/system/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/board/treble_common.mk b/target/board/treble_common.mk
index 44f601f..b4777b6 100644
--- a/target/board/treble_common.mk
+++ b/target/board/treble_common.mk
@@ -53,9 +53,20 @@
# Set emulator framebuffer display device buffer count to 3
NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-BOARD_FLASH_BLOCK_SIZE := 512
+# Audio
+USE_XML_AUDIO_POLICY_CONF := 1
# b/64700195: add minimum support for odm.img
# Currently odm.img can only be built by `make custom_images`.
# Adding /odm mount point under root directory.
BOARD_ROOT_EXTRA_FOLDERS += odm
+
+# Android Verified Boot (AVB):
+# Builds a special vbmeta.img that disables AVB verification.
+# Otherwise, AVB will prevent the device from booting the generic system.img.
+# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
+# metadata into system.img.
+ifeq ($(BOARD_AVB_ENABLE),true)
+$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
+endif
+BOARD_BUILD_DISABLED_VBMETAIMAGE := true
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 9e2adee..85330b3 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -36,7 +36,6 @@
PRODUCT_MAKEFILES := \
$(LOCAL_DIR)/aosp_arm.mk \
$(LOCAL_DIR)/full.mk \
- $(LOCAL_DIR)/generic_armv5.mk \
$(LOCAL_DIR)/aosp_x86.mk \
$(LOCAL_DIR)/full_x86.mk \
$(LOCAL_DIR)/aosp_mips.mk \
diff --git a/target/product/generic_armv5.mk b/target/product/generic_armv5.mk
deleted file mode 100644
index daa321a..0000000
--- a/target/product/generic_armv5.mk
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic product that isn't specialized for a specific device.
-# It includes the base Android platform.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic.mk)
-
-# Overrides
-PRODUCT_BRAND := generic_armv5
-PRODUCT_DEVICE := generic_armv5
-PRODUCT_NAME := generic_armv5
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
index c385352..5880bf8 100644
--- a/target/product/treble_common.mk
+++ b/target/product/treble_common.mk
@@ -42,8 +42,6 @@
PRODUCT_PACKAGES += \
libvulkan \
-# Audio:
-USE_XML_AUDIO_POLICY_CONF := 1
# The following policy XML files are used as fallback for
# vendors/devices not using XML to configure audio policy.
PRODUCT_COPY_FILES += \
@@ -72,16 +70,6 @@
PRODUCT_COPY_FILES += \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
-# Android Verified Boot (AVB):
-# Builds a special vbmeta.img that disables AVB verification.
-# Otherwise, AVB will prevent the device from booting the generic system.img.
-# Also checks that BOARD_AVB_ENABLE is not set, to prevent adding verity
-# metadata into system.img.
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(error BOARD_AVB_ENABLE cannot be set for Treble GSI)
-endif
-BOARD_BUILD_DISABLED_VBMETAIMAGE := true
-
#GSI support for the devices that disable VNDK enforcing
PRODUCT_COPY_FILES += \
system/core/rootdir/etc/ld.config.txt:system/etc/ld.config.noenforce.txt \
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index ea8c95e..a134d02 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -94,9 +94,9 @@
vndk_current
else
LOCAL_REQUIRED_MODULES := \
- vndk_v$(BOARD_VNDK_VERSION)
+ vndk_v$(BOARD_VNDK_VERSION)_$(TARGET_ARCH)
endif
LOCAL_REQUIRED_MODULES += \
- $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver))
+ $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),vndk_v$(vndk_ver)_$(TARGET_ARCH))
include $(BUILD_PHONY_PACKAGE)
endif # BOARD_VNDK_VERSION is set
diff --git a/tools/adbs b/tools/adbs
deleted file mode 100755
index a8f06c0..0000000
--- a/tools/adbs
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import os.path
-import re
-import string
-import sys
-
-sys.path.insert(0, os.path.dirname(__file__) + "/../../development/scripts")
-import stack_core
-import symbol
-
-if __name__ == '__main__':
- # pass the options to adb
- adb_cmd = "adb " + ' '.join(sys.argv[1:])
-
- # create tracer for line parsing
- tracer = stack_core.TraceConverter()
-
- # invoke the adb command and filter its output
- stream = os.popen(adb_cmd)
- while (True):
- line = stream.readline()
- if (line == ''):
- break
- if(tracer.ProcessLine(line) == False):
- print(line.strip())
- sys.stdout.flush()
-
- # adb itself aborts
- stream.close()
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 3bb8b9c..ed60188 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -15,27 +15,33 @@
# limitations under the License.
"""
-Build image output_image_file from input_directory, properties_file, and target_out_dir
+Builds output_image from the given input_directory, properties_file,
+and writes the image to target_output_directory.
-Usage: build_image input_directory properties_file output_image_file target_out_dir
-
+Usage: build_image.py input_directory properties_file output_image \\
+ target_output_directory
"""
+
+from __future__ import print_function
+
import os
import os.path
import re
-import subprocess
-import sys
-import common
import shlex
import shutil
+import subprocess
+import sys
+
+import common
import sparse_img
-import tempfile
+
OPTIONS = common.OPTIONS
FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
BLOCK_SIZE = 4096
+
def RunCommand(cmd, verbose=None):
"""Echo and run the given command.
@@ -56,6 +62,7 @@
print(output.rstrip())
return (output, p.returncode)
+
def GetVerityFECSize(partition_size):
cmd = ["fec", "-s", str(partition_size)]
output, exit_code = RunCommand(cmd, False)
@@ -63,6 +70,7 @@
return False, 0
return True, int(output)
+
def GetVerityTreeSize(partition_size):
cmd = ["build_verity_tree", "-s", str(partition_size)]
output, exit_code = RunCommand(cmd, False)
@@ -70,6 +78,7 @@
return False, 0
return True, int(output)
+
def GetVerityMetadataSize(partition_size):
cmd = ["system/extras/verity/build_verity_metadata.py", "size",
str(partition_size)]
@@ -78,6 +87,7 @@
return False, 0
return True, int(output)
+
def GetVeritySize(partition_size, fec_supported):
success, verity_tree_size = GetVerityTreeSize(partition_size)
if not success:
@@ -93,16 +103,19 @@
return verity_size + fec_size
return verity_size
+
def GetSimgSize(image_file):
simg = sparse_img.SparseImage(image_file, build_map=False)
return simg.blocksize * simg.total_blocks
+
def ZeroPadSimg(image_file, pad_size):
blocks = pad_size // BLOCK_SIZE
print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
simg.AppendFillChunk(0, blocks)
+
def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
"""Calculates max image size for a given partition size.
@@ -115,8 +128,8 @@
Returns:
The maximum image size or 0 if an error occurred.
"""
- cmd =[avbtool, "add_%s_footer" % footer_type,
- "--partition_size", partition_size, "--calc_max_image_size"]
+ cmd = [avbtool, "add_%s_footer" % footer_type,
+ "--partition_size", partition_size, "--calc_max_image_size"]
cmd.extend(shlex.split(additional_args))
(output, exit_code) = RunCommand(cmd)
@@ -125,6 +138,7 @@
else:
return int(output)
+
def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
partition_name, key_path, algorithm, salt,
additional_args):
@@ -140,14 +154,15 @@
algorithm: Name of algorithm to use or None.
salt: The salt to use (a hexadecimal string) or None.
additional_args: Additional arguments to pass to 'avbtool
- add_hashtree_image'.
+ add_hashtree_image'.
+
Returns:
True if the operation succeeded.
"""
- cmd =[avbtool, "add_%s_footer" % footer_type,
- "--partition_size", partition_size,
- "--partition_name", partition_name,
- "--image", image_path]
+ cmd = [avbtool, "add_%s_footer" % footer_type,
+ "--partition_size", partition_size,
+ "--partition_name", partition_name,
+ "--image", image_path]
if key_path and algorithm:
cmd.extend(["--key", key_path, "--algorithm", algorithm])
@@ -159,12 +174,15 @@
(_, exit_code) = RunCommand(cmd)
return exit_code == 0
+
def AdjustPartitionSizeForVerity(partition_size, fec_supported):
"""Modifies the provided partition size to account for the verity metadata.
This information is used to size the created image appropriately.
+
Args:
partition_size: the size of the partition to be verified.
+
Returns:
A tuple of the size of the partition adjusted for verity metadata, and
the size of verity metadata.
@@ -201,30 +219,34 @@
AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
return (result, verity_size)
+
AdjustPartitionSizeForVerity.results = {}
+
def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
padding_size):
cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
verity_path, verity_fec_path]
output, exit_code = RunCommand(cmd)
if exit_code != 0:
- print "Could not build FEC data! Error: %s" % output
+ print("Could not build FEC data! Error: %s" % output)
return False
return True
+
def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
verity_image_path]
output, exit_code = RunCommand(cmd)
if exit_code != 0:
- print "Could not build verity tree! Error: %s" % output
+ print("Could not build verity tree! Error: %s" % output)
return False
root, salt = output.split()
prop_dict["verity_root_hash"] = root
prop_dict["verity_salt"] = salt
return True
+
def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
block_device, signer_path, key, signer_args,
verity_disable):
@@ -237,10 +259,11 @@
cmd.append("--verity_disable")
output, exit_code = RunCommand(cmd)
if exit_code != 0:
- print "Could not build verity metadata! Error: %s" % output
+ print("Could not build verity metadata! Error: %s" % output)
return False
return True
+
def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
"""Appends the unsparse image to the given sparse image.
@@ -253,18 +276,23 @@
cmd = ["append2simg", sparse_image_path, unsparse_image_path]
output, exit_code = RunCommand(cmd)
if exit_code != 0:
- print "%s: %s" % (error_message, output)
+ print("%s: %s" % (error_message, output))
return False
return True
+
def Append(target, file_to_append, error_message):
- # appending file_to_append to target
- with open(target, "a") as out_file:
- with open(file_to_append, "r") as input_file:
+ """Appends file_to_append to target."""
+ try:
+ with open(target, "a") as out_file, open(file_to_append, "r") as input_file:
for line in input_file:
out_file.write(line)
+ except IOError:
+ print(error_message)
+ return False
return True
+
def BuildVerifiedImage(data_image_path, verity_image_path,
verity_metadata_path, verity_fec_path,
padding_size, fec_supported):
@@ -286,6 +314,7 @@
return False
return True
+
def UnsparseImage(sparse_image_path, replace=True):
img_dir = os.path.dirname(sparse_image_path)
unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
@@ -302,6 +331,7 @@
return False, None
return True, unsparse_image_path
+
def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
"""Creates an image that is verifiable using dm-verity.
@@ -360,14 +390,12 @@
return True
+
def ConvertBlockMapToBaseFs(block_map_file):
base_fs_file = common.MakeTempFile(prefix="script_gen_", suffix=".base_fs")
-
convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
(_, exit_code) = RunCommand(convert_command)
- if exit_code != 0:
- return None
- return base_fs_file
+ return base_fs_file if exit_code == 0 else None
def CheckHeadroom(ext4fs_output, prop_dict):
@@ -396,7 +424,8 @@
ext4fs_stats = re.compile(
r'Created filesystem with .* (?P<used_blocks>[0-9]+)/'
r'(?P<total_blocks>[0-9]+) blocks')
- m = ext4fs_stats.match(ext4fs_output.strip().split('\n')[-1])
+ last_line = ext4fs_output.strip().split('\n')[-1]
+ m = ext4fs_stats.match(last_line)
used_blocks = int(m.groupdict().get('used_blocks'))
total_blocks = int(m.groupdict().get('total_blocks'))
headroom_blocks = int(prop_dict['partition_headroom']) / BLOCK_SIZE
@@ -418,7 +447,8 @@
in_dir: path of input directory.
prop_dict: property dictionary.
out_file: path of the output image file.
- target_out: path of the product out directory to read device specific FS config files.
+ target_out: path of the product out directory to read device specific FS
+ config files.
Returns:
True iff the image is built successfully.
@@ -427,10 +457,10 @@
# /system and the ramdisk, and can be mounted at the root of the file system.
origin_in = in_dir
fs_config = prop_dict.get("fs_config")
- if (prop_dict.get("system_root_image") == "true"
- and prop_dict["mount_point"] == "system"):
+ if (prop_dict.get("system_root_image") == "true" and
+ prop_dict["mount_point"] == "system"):
in_dir = common.MakeTempDir()
- # Change the mount point to "/"
+ # Change the mount point to "/".
prop_dict["mount_point"] = "/"
if fs_config:
# We need to merge the fs_config files of system and ramdisk.
@@ -446,7 +476,7 @@
build_command = []
fs_type = prop_dict.get("fs_type", "")
- run_fsck = False
+ run_e2fsck = False
fs_spans_partition = True
if fs_type.startswith("squash"):
@@ -460,8 +490,8 @@
# verified.
if verity_supported and is_verity_partition:
partition_size = int(prop_dict.get("partition_size"))
- (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size,
- verity_fec_supported)
+ (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(
+ partition_size, verity_fec_supported)
if not adjusted_size:
return False
prop_dict["partition_size"] = str(adjusted_size)
@@ -480,8 +510,8 @@
partition_size = prop_dict["partition_size"]
# avb_add_hash_footer_args or avb_add_hashtree_footer_args.
additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
- max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type, partition_size,
- additional_args)
+ max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type,
+ partition_size, additional_args)
if max_image_size == 0:
return False
prop_dict["partition_size"] = str(max_image_size)
@@ -491,7 +521,7 @@
build_command = [prop_dict["ext_mkuserimg"]]
if "extfs_sparse_flag" in prop_dict:
build_command.append(prop_dict["extfs_sparse_flag"])
- run_fsck = True
+ run_e2fsck = True
build_command.extend([in_dir, out_file, fs_type,
prop_dict["mount_point"]])
build_command.append(prop_dict["partition_size"])
@@ -545,7 +575,7 @@
build_command.extend(["-zo", prop_dict["squashfs_compressor_opt"]])
if "squashfs_block_size" in prop_dict:
build_command.extend(["-b", prop_dict["squashfs_block_size"]])
- if "squashfs_disable_4k_align" in prop_dict and prop_dict.get("squashfs_disable_4k_align") == "true":
+ if prop_dict.get("squashfs_disable_4k_align") == "true":
build_command.extend(["-a"])
elif fs_type.startswith("f2fs"):
build_command = ["mkf2fsuserimg.sh"]
@@ -575,18 +605,14 @@
shutil.rmtree(staging_system, ignore_errors=True)
shutil.copytree(origin_in, staging_system, symlinks=True)
- ext4fs_output = None
- if fs_type.startswith("ext4"):
- (ext4fs_output, exit_code) = RunCommand(build_command)
- else:
- (_, exit_code) = RunCommand(build_command)
+ (mkfs_output, exit_code) = RunCommand(build_command)
if exit_code != 0:
print("Error: '%s' failed with exit code %d" % (build_command, exit_code))
return False
# Check if there's enough headroom space available for ext4 image.
if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
- if not CheckHeadroom(ext4fs_output, prop_dict):
+ if not CheckHeadroom(mkfs_output, prop_dict):
return False
if not fs_spans_partition:
@@ -600,7 +626,7 @@
if verity_supported and is_verity_partition:
ZeroPadSimg(out_file, partition_size - image_size)
- # create the verified image if this is to be verified
+ # Create the verified image if this is to be verified.
if verity_supported and is_verity_partition:
if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
return False
@@ -616,11 +642,12 @@
salt = prop_dict.get("avb_salt")
# avb_add_hash_footer_args or avb_add_hashtree_footer_args
additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
- if not AVBAddFooter(out_file, avbtool, avb_footer_type, original_partition_size,
- partition_name, key_path, algorithm, salt, additional_args):
+ if not AVBAddFooter(out_file, avbtool, avb_footer_type,
+ original_partition_size, partition_name, key_path,
+ algorithm, salt, additional_args):
return False
- if run_fsck and prop_dict.get("skip_fsck") != "true":
+ if run_e2fsck and prop_dict.get("skip_fsck") != "true":
success, unsparse_image = UnsparseImage(out_file, replace=False)
if not success:
return False
@@ -632,7 +659,8 @@
os.remove(unsparse_image)
if exit_code != 0:
- print("Error: '%s' failed with exit code %d" % (e2fsck_command, exit_code))
+ print("Error: '%s' failed with exit code %d" % (e2fsck_command,
+ exit_code))
return False
return True
@@ -699,7 +727,8 @@
copy_prop("system_base_fs_file", "base_fs_file")
copy_prop("system_extfs_inode_count", "extfs_inode_count")
elif mount_point == "system_other":
- # We inherit the selinux policies of /system since we contain some of its files.
+ # We inherit the selinux policies of /system since we contain some of its
+ # files.
d["mount_point"] = "system"
copy_prop("avb_system_hashtree_enable", "avb_hashtree_enable")
copy_prop("avb_system_add_hashtree_footer_args",
@@ -721,7 +750,7 @@
copy_prop("fs_type", "fs_type")
copy_prop("userdata_fs_type", "fs_type")
copy_prop("userdata_size", "partition_size")
- copy_prop("flash_logical_block_size","flash_logical_block_size")
+ copy_prop("flash_logical_block_size", "flash_logical_block_size")
copy_prop("flash_erase_block_size", "flash_erase_block_size")
elif mount_point == "cache":
copy_prop("cache_fs_type", "fs_type")
@@ -767,7 +796,7 @@
def main(argv):
if len(argv) != 4:
- print __doc__
+ print(__doc__)
sys.exit(1)
in_dir = argv[0]
@@ -796,14 +825,14 @@
elif image_filename == "oem.img":
mount_point = "oem"
else:
- print >> sys.stderr, "error: unknown image file name ", image_filename
+ print("error: unknown image file name ", image_filename, file=sys.stderr)
sys.exit(1)
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
if not BuildImage(in_dir, image_properties, out_file, target_out):
- print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
- in_dir)
+ print("error: failed to build %s from %s" % (out_file, in_dir),
+ file=sys.stderr)
sys.exit(1)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 03e808f..ebebd63 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -792,11 +792,22 @@
def ReadApkCerts(tf_zip):
- """Given a target_files ZipFile, parse the META/apkcerts.txt file
- and return a tuple with the following elements: (1) a dictionary that maps
- packages to certs (based on the "certificate" and "private_key" attributes
- in the file. (2) A string representing the extension of compressed APKs in
- the target files (e.g ".gz" ".bro")."""
+ """Parses the APK certs info from a given target-files zip.
+
+ Given a target-files ZipFile, parses the META/apkcerts.txt entry and returns a
+ tuple with the following elements: (1) a dictionary that maps packages to
+ certs (based on the "certificate" and "private_key" attributes in the file;
+ (2) a string representing the extension of compressed APKs in the target files
+ (e.g ".gz", ".bro").
+
+ Args:
+ tf_zip: The input target_files ZipFile (already open).
+
+ Returns:
+ (certmap, ext): certmap is a dictionary that maps packages to certs; ext is
+ the extension string of compressed APKs (e.g. ".gz"), or None if there's
+ no compressed APKs.
+ """
certmap = {}
compressed_extension = None
@@ -812,41 +823,51 @@
line = line.strip()
if not line:
continue
- m = re.match(r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
- r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
- line)
- if m:
- matches = m.groupdict()
- cert = matches["CERT"]
- privkey = matches["PRIVKEY"]
- name = matches["NAME"]
- this_compressed_extension = matches["COMPRESSED"]
- public_key_suffix_len = len(OPTIONS.public_key_suffix)
- private_key_suffix_len = len(OPTIONS.private_key_suffix)
- if cert in SPECIAL_CERT_STRINGS and not privkey:
- certmap[name] = cert
- elif (cert.endswith(OPTIONS.public_key_suffix) and
- privkey.endswith(OPTIONS.private_key_suffix) and
- cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
- certmap[name] = cert[:-public_key_suffix_len]
- else:
- raise ValueError("failed to parse line from apkcerts.txt:\n" + line)
- if this_compressed_extension:
- # Only count the installed files.
- filename = name + '.' + this_compressed_extension
- if filename not in installed_files:
- continue
- # Make sure that all the values in the compression map have the same
- # extension. We don't support multiple compression methods in the same
- # system image.
- if compressed_extension:
- if this_compressed_extension != compressed_extension:
- raise ValueError("multiple compressed extensions : %s vs %s",
- (compressed_extension, this_compressed_extension))
- else:
- compressed_extension = this_compressed_extension
+ m = re.match(
+ r'^name="(?P<NAME>.*)"\s+certificate="(?P<CERT>.*)"\s+'
+ r'private_key="(?P<PRIVKEY>.*?)"(\s+compressed="(?P<COMPRESSED>.*)")?$',
+ line)
+ if not m:
+ continue
- return (certmap, ("." + compressed_extension) if compressed_extension else None)
+ matches = m.groupdict()
+ cert = matches["CERT"]
+ privkey = matches["PRIVKEY"]
+ name = matches["NAME"]
+ this_compressed_extension = matches["COMPRESSED"]
+
+ public_key_suffix_len = len(OPTIONS.public_key_suffix)
+ private_key_suffix_len = len(OPTIONS.private_key_suffix)
+ if cert in SPECIAL_CERT_STRINGS and not privkey:
+ certmap[name] = cert
+ elif (cert.endswith(OPTIONS.public_key_suffix) and
+ privkey.endswith(OPTIONS.private_key_suffix) and
+ cert[:-public_key_suffix_len] == privkey[:-private_key_suffix_len]):
+ certmap[name] = cert[:-public_key_suffix_len]
+ else:
+ raise ValueError("Failed to parse line from apkcerts.txt:\n" + line)
+
+ if not this_compressed_extension:
+ continue
+
+ # Only count the installed files.
+ filename = name + '.' + this_compressed_extension
+ if filename not in installed_files:
+ continue
+
+ # Make sure that all the values in the compression map have the same
+ # extension. We don't support multiple compression methods in the same
+ # system image.
+ if compressed_extension:
+ if this_compressed_extension != compressed_extension:
+ raise ValueError(
+ "Multiple compressed extensions: {} vs {}".format(
+ compressed_extension, this_compressed_extension))
+ else:
+ compressed_extension = this_compressed_extension
+
+ return (certmap,
+ ("." + compressed_extension) if compressed_extension else None)
COMMON_DOCSTRING = """
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 0c44faf..7a81928 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -77,14 +77,14 @@
with temporary=True) to this one."""
self.script.extend(other.script)
- def AssertOemProperty(self, name, values):
+ def AssertOemProperty(self, name, values, oem_no_mount):
"""Assert that a property on the OEM paritition matches allowed values."""
if not name:
raise ValueError("must specify an OEM property")
if not values:
raise ValueError("must specify the OEM value")
- get_prop_command = None
- if common.OPTIONS.oem_no_mount:
+
+ if oem_no_mount:
get_prop_command = 'getprop("%s")' % name
else:
get_prop_command = 'file_getprop("/oem/oem.prop", "%s")' % name
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index d5ac922..1a4383c 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -49,8 +49,10 @@
-o (--oem_settings) <main_file[,additional_files...]>
Comma seperated list of files used to specify the expected OEM-specific
- properties on the OEM partition of the intended device.
- Multiple expected values can be used by providing multiple files.
+ properties on the OEM partition of the intended device. Multiple expected
+ values can be used by providing multiple files. Only the first dict will
+ be used to compute fingerprint, while the rest will be used to assert
+ OEM-specific properties.
--oem_no_mount
For devices with OEM-specific properties but without an OEM partition,
@@ -128,16 +130,11 @@
from __future__ import print_function
-import sys
-
-if sys.hexversion < 0x02070000:
- print("Python 2.7 or newer is required.", file=sys.stderr)
- sys.exit(1)
-
import multiprocessing
import os.path
-import subprocess
import shlex
+import subprocess
+import sys
import tempfile
import zipfile
@@ -145,6 +142,11 @@
import edify_generator
import sparse_img
+if sys.hexversion < 0x02070000:
+ print("Python 2.7 or newer is required.", file=sys.stderr)
+ sys.exit(1)
+
+
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
OPTIONS.incremental_source = None
@@ -179,6 +181,136 @@
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
+class BuildInfo(object):
+ """A class that holds the information for a given build.
+
+ This class wraps up the property querying for a given source or target build.
+ It abstracts away the logic of handling OEM-specific properties, and caches
+ the commonly used properties such as fingerprint.
+
+ There are two types of info dicts: a) build-time info dict, which is generated
+ at build time (i.e. included in a target_files zip); b) OEM info dict that is
+ specified at package generation time (via command line argument
+ '--oem_settings'). If a build doesn't use OEM-specific properties (i.e. not
+ having "oem_fingerprint_properties" in build-time info dict), all the queries
+ would be answered based on build-time info dict only. Otherwise if using
+ OEM-specific properties, some of them will be calculated from two info dicts.
+
+ Users can query properties similarly as using a dict() (e.g. info['fstab']),
+ or to query build properties via GetBuildProp() or GetVendorBuildProp().
+
+ Attributes:
+ info_dict: The build-time info dict.
+ is_ab: Whether it's a build that uses A/B OTA.
+ oem_dicts: A list of OEM dicts.
+ oem_props: A list of OEM properties that should be read from OEM dicts; None
+ if the build doesn't use any OEM-specific property.
+ fingerprint: The fingerprint of the build, which would be calculated based
+ on OEM properties if applicable.
+ device: The device name, which could come from OEM dicts if applicable.
+ """
+
+ def __init__(self, info_dict, oem_dicts):
+ """Initializes a BuildInfo instance with the given dicts.
+
+ Arguments:
+ info_dict: The build-time info dict.
+ oem_dicts: A list of OEM dicts (which is parsed from --oem_settings). Note
+ that it always uses the first dict to calculate the fingerprint or the
+ device name. The rest would be used for asserting OEM properties only
+ (e.g. one package can be installed on one of these devices).
+ """
+ self.info_dict = info_dict
+ self.oem_dicts = oem_dicts
+
+ self._is_ab = info_dict.get("ab_update") == "true"
+ self._oem_props = info_dict.get("oem_fingerprint_properties")
+
+ if self._oem_props:
+ assert oem_dicts, "OEM source required for this build"
+
+ # These two should be computed only after setting self._oem_props.
+ self._device = self.GetOemProperty("ro.product.device")
+ self._fingerprint = self.CalculateFingerprint()
+
+ @property
+ def is_ab(self):
+ return self._is_ab
+
+ @property
+ def device(self):
+ return self._device
+
+ @property
+ def fingerprint(self):
+ return self._fingerprint
+
+ @property
+ def oem_props(self):
+ return self._oem_props
+
+ def __getitem__(self, key):
+ return self.info_dict[key]
+
+ def get(self, key, default=None):
+ return self.info_dict.get(key, default)
+
+ def GetBuildProp(self, prop):
+ """Returns the inquired build property."""
+ try:
+ return self.info_dict.get("build.prop", {})[prop]
+ except KeyError:
+ raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+
+ def GetVendorBuildProp(self, prop):
+ """Returns the inquired vendor build property."""
+ try:
+ return self.info_dict.get("vendor.build.prop", {})[prop]
+ except KeyError:
+ raise common.ExternalError(
+ "couldn't find %s in vendor.build.prop" % (prop,))
+
+ def GetOemProperty(self, key):
+ if self.oem_props is not None and key in self.oem_props:
+ return self.oem_dicts[0][key]
+ return self.GetBuildProp(key)
+
+ def CalculateFingerprint(self):
+ if self.oem_props is None:
+ return self.GetBuildProp("ro.build.fingerprint")
+ return "%s/%s/%s:%s" % (
+ self.GetOemProperty("ro.product.brand"),
+ self.GetOemProperty("ro.product.name"),
+ self.GetOemProperty("ro.product.device"),
+ self.GetBuildProp("ro.build.thumbprint"))
+
+ def WriteMountOemScript(self, script):
+ assert self.oem_props is not None
+ recovery_mount_options = self.info_dict.get("recovery_mount_options")
+ script.Mount("/oem", recovery_mount_options)
+
+ def WriteDeviceAssertions(self, script, oem_no_mount):
+ # Read the property directly if not using OEM properties.
+ if not self.oem_props:
+ script.AssertDevice(self.device)
+ return
+
+ # Otherwise assert OEM properties.
+ if not self.oem_dicts:
+ raise common.ExternalError(
+ "No OEM file provided to answer expected assertions")
+
+ for prop in self.oem_props.split():
+ values = []
+ for oem_dict in self.oem_dicts:
+ if prop in oem_dict:
+ values.append(oem_dict[prop])
+ if not values:
+ raise common.ExternalError(
+ "The OEM file is missing the property %s" % (prop,))
+ script.AssertOemProperty(prop, values, oem_no_mount)
+
+
def SignOutput(temp_zip_name, output_zip_name):
pw = OPTIONS.key_passwords[OPTIONS.package_key]
@@ -186,37 +318,15 @@
whole_file=True)
-def AppendAssertions(script, info_dict, oem_dicts=None):
- oem_props = info_dict.get("oem_fingerprint_properties")
- if not oem_props:
- device = GetBuildProp("ro.product.device", info_dict)
- script.AssertDevice(device)
- else:
- if not oem_dicts:
- raise common.ExternalError(
- "No OEM file provided to answer expected assertions")
- for prop in oem_props.split():
- values = []
- for oem_dict in oem_dicts:
- if oem_dict.get(prop):
- values.append(oem_dict[prop])
- if not values:
- raise common.ExternalError(
- "The OEM file is missing the property %s" % prop)
- script.AssertOemProperty(prop, values)
-
-
-def _LoadOemDicts(script, recovery_mount_options=None):
+def _LoadOemDicts(oem_source):
"""Returns the list of loaded OEM properties dict."""
- oem_dicts = None
- if OPTIONS.oem_source is None:
- raise common.ExternalError("OEM source required for this build")
- if not OPTIONS.oem_no_mount and script:
- script.Mount("/oem", recovery_mount_options)
+ if not oem_source:
+ return None
+
oem_dicts = []
- for oem_file in OPTIONS.oem_source:
- oem_dicts.append(common.LoadDictionaryFromLines(
- open(oem_file).readlines()))
+ for oem_file in oem_source:
+ with open(oem_file) as fp:
+ oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
return oem_dicts
@@ -267,25 +377,30 @@
return False
-def HasTrebleEnabled(target_files_zip, info_dict):
+def HasTrebleEnabled(target_files_zip, target_info):
return (HasVendorPartition(target_files_zip) and
- GetBuildProp("ro.treble.enabled", info_dict) == "true")
+ target_info.GetBuildProp("ro.treble.enabled") == "true")
-def GetOemProperty(name, oem_props, oem_dict, info_dict):
- if oem_props is not None and name in oem_props:
- return oem_dict[name]
- return GetBuildProp(name, info_dict)
+def WriteFingerprintAssertion(script, target_info, source_info):
+ source_oem_props = source_info.oem_props
+ target_oem_props = target_info.oem_props
-
-def CalculateFingerprint(oem_props, oem_dict, info_dict):
- if oem_props is None:
- return GetBuildProp("ro.build.fingerprint", info_dict)
- return "%s/%s/%s:%s" % (
- GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
- GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
- GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
- GetBuildProp("ro.build.thumbprint", info_dict))
+ if source_oem_props is None and target_oem_props is None:
+ script.AssertSomeFingerprint(
+ source_info.fingerprint, target_info.fingerprint)
+ elif source_oem_props is not None and target_oem_props is not None:
+ script.AssertSomeThumbprint(
+ target_info.GetBuildProp("ro.build.thumbprint"),
+ source_info.GetBuildProp("ro.build.thumbprint"))
+ elif source_oem_props is None and target_oem_props is not None:
+ script.AssertFingerprintOrThumbprint(
+ source_info.fingerprint,
+ target_info.GetBuildProp("ro.build.thumbprint"))
+ else:
+ script.AssertFingerprintOrThumbprint(
+ target_info.fingerprint,
+ source_info.GetBuildProp("ro.build.thumbprint"))
def GetImage(which, tmpdir):
@@ -313,9 +428,8 @@
return sparse_img.SparseImage(path, mappath, clobbered_blocks)
-def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip,
- target_info_dict,
- source_info_dict=None):
+def AddCompatibilityArchiveIfTrebleEnabled(target_zip, output_zip, target_info,
+ source_info=None):
"""Adds compatibility info into the output zip if it's Treble-enabled target.
Metadata used for on-device compatibility verification is retrieved from
@@ -328,9 +442,9 @@
Args:
target_zip: Zip file containing the source files to be included for OTA.
output_zip: Zip file that will be sent for OTA.
- target_info_dict: The dict that holds the target build info.
- source_info_dict: The dict that holds the source build info, if generating
- an incremental OTA; None otherwise.
+ target_info: The BuildInfo instance that holds the target build info.
+ source_info: The BuildInfo instance that holds the source build info, if
+ generating an incremental OTA; None otherwise.
"""
def AddCompatibilityArchive(system_updated, vendor_updated):
@@ -353,8 +467,8 @@
# Create new archive.
compatibility_archive = tempfile.NamedTemporaryFile()
- compatibility_archive_zip = zipfile.ZipFile(compatibility_archive, "w",
- compression=zipfile.ZIP_DEFLATED)
+ compatibility_archive_zip = zipfile.ZipFile(
+ compatibility_archive, "w", compression=zipfile.ZIP_DEFLATED)
# Add metadata.
for file_name in compatibility_files:
@@ -375,59 +489,55 @@
# Will only proceed if the target has enabled the Treble support (as well as
# having a /vendor partition).
- if not HasTrebleEnabled(target_zip, target_info_dict):
+ if not HasTrebleEnabled(target_zip, target_info):
return
# We don't support OEM thumbprint in Treble world (which calculates
# fingerprints in a different way as shown in CalculateFingerprint()).
- assert not target_info_dict.get("oem_fingerprint_properties")
+ assert not target_info.oem_props
# Full OTA carries the info for system/vendor both.
- if source_info_dict is None:
+ if source_info is None:
AddCompatibilityArchive(True, True)
return
- assert not source_info_dict.get("oem_fingerprint_properties")
+ assert not source_info.oem_props
- source_fp = GetBuildProp("ro.build.fingerprint", source_info_dict)
- target_fp = GetBuildProp("ro.build.fingerprint", target_info_dict)
+ source_fp = source_info.fingerprint
+ target_fp = target_info.fingerprint
system_updated = source_fp != target_fp
- source_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
- source_info_dict)
- target_fp_vendor = GetVendorBuildProp("ro.vendor.build.fingerprint",
- target_info_dict)
+ source_fp_vendor = source_info.GetVendorBuildProp(
+ "ro.vendor.build.fingerprint")
+ target_fp_vendor = target_info.GetVendorBuildProp(
+ "ro.vendor.build.fingerprint")
vendor_updated = source_fp_vendor != target_fp_vendor
AddCompatibilityArchive(system_updated, vendor_updated)
def WriteFullOTAPackage(input_zip, output_zip):
- # TODO: how to determine this? We don't know what version it will
- # be installed on top of. For now, we expect the API just won't
- # change very often. Similarly for fstab, it might have changed
- # in the target build.
- script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+ target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
- oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
- oem_dicts = None
- if oem_props:
- recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
- oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+ # We don't know what version it will be installed on top of. We expect the API
+ # just won't change very often. Similarly for fstab, it might have changed in
+ # the target build.
+ target_api_version = target_info["recovery_api_version"]
+ script = edify_generator.EdifyGenerator(target_api_version, target_info)
- target_fp = CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
- OPTIONS.info_dict)
+ if target_info.oem_props and not OPTIONS.oem_no_mount:
+ target_info.WriteMountOemScript(script)
+
metadata = {
- "post-build": target_fp,
- "pre-device": GetOemProperty("ro.product.device", oem_props,
- oem_dicts and oem_dicts[0],
- OPTIONS.info_dict),
- "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
+ "post-build": target_info.fingerprint,
+ "pre-device": target_info.device,
+ "post-timestamp": target_info.GetBuildProp("ro.build.date.utc"),
+ "ota-type" : "BLOCK",
}
device_specific = common.DeviceSpecificParams(
input_zip=input_zip,
- input_version=OPTIONS.info_dict["recovery_api_version"],
+ input_version=target_api_version,
output_zip=output_zip,
script=script,
input_tmp=OPTIONS.input_tmp,
@@ -436,13 +546,12 @@
assert HasRecoveryPatch(input_zip)
- metadata["ota-type"] = "BLOCK"
-
- ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
- ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
+ # Assertions (e.g. downgrade check, device properties check).
+ ts = target_info.GetBuildProp("ro.build.date.utc")
+ ts_text = target_info.GetBuildProp("ro.build.date")
script.AssertOlderBuild(ts, ts_text)
- AppendAssertions(script, OPTIONS.info_dict, oem_dicts)
+ target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
device_specific.FullOTA_Assertions()
# Two-step package strategy (in chronological order, which is *not*
@@ -468,9 +577,9 @@
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
OPTIONS.input_tmp, "RECOVERY")
if OPTIONS.two_step:
- if not OPTIONS.info_dict.get("multistage_support", None):
+ if not target_info.get("multistage_support"):
assert False, "two-step packages not supported by this build"
- fs = OPTIONS.info_dict["fstab"]["/misc"]
+ fs = target_info["fstab"]["/misc"]
assert fs.fs_type.upper() == "EMMC", \
"two-step packages only supported on devices with EMMC /misc partitions"
bcb_dev = {"bcb_dev": fs.device}
@@ -492,7 +601,7 @@
script.Comment("Stage 3/3")
# Dump fingerprints
- script.Print("Target: %s" % target_fp)
+ script.Print("Target: {}".format(target_info.fingerprint))
device_specific.FullOTA_InstallBegin()
@@ -525,10 +634,9 @@
vendor_diff = common.BlockDifference("vendor", vendor_tgt)
vendor_diff.WriteScript(script, output_zip)
- AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip,
- OPTIONS.info_dict)
+ AddCompatibilityArchiveIfTrebleEnabled(input_zip, output_zip, target_info)
- common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
+ common.CheckSize(boot_img.data, "boot.img", target_info)
common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
script.ShowProgress(0.05, 5)
@@ -592,12 +700,12 @@
"couldn't find %s in vendor.build.prop" % (prop,))
-def HandleDowngradeMetadata(metadata):
+def HandleDowngradeMetadata(metadata, target_info, source_info):
# Only incremental OTAs are allowed to reach here.
assert OPTIONS.incremental_source is not None
- post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
- pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+ post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
+ pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
is_downgrade = long(post_timestamp) < long(pre_timestamp)
if OPTIONS.downgrade:
@@ -607,72 +715,65 @@
metadata["ota-downgrade"] = "yes"
elif OPTIONS.timestamp:
if not is_downgrade:
- raise RuntimeError("--timestamp specified but no timestamp hack needed: "
- "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+ raise RuntimeError("--override_timestamp specified but no timestamp hack "
+ "needed: pre: %s, post: %s" % (pre_timestamp,
+ post_timestamp))
metadata["post-timestamp"] = str(long(pre_timestamp) + 1)
else:
if is_downgrade:
raise RuntimeError("Downgrade detected based on timestamp check: "
- "pre: %s, post: %s. Need to specify --timestamp OR "
- "--downgrade to allow building the incremental." % (
- pre_timestamp, post_timestamp))
+ "pre: %s, post: %s. Need to specify "
+ "--override_timestamp OR --downgrade to allow "
+ "building the incremental." % (pre_timestamp,
+ post_timestamp))
metadata["post-timestamp"] = post_timestamp
def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
- source_version = OPTIONS.source_info_dict["recovery_api_version"]
- target_version = OPTIONS.target_info_dict["recovery_api_version"]
+ target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+ source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
- if source_version == 0:
+ target_api_version = target_info["recovery_api_version"]
+ source_api_version = source_info["recovery_api_version"]
+ if source_api_version == 0:
print("WARNING: generating edify script for a source that "
"can't install it.")
- script = edify_generator.EdifyGenerator(
- source_version, OPTIONS.target_info_dict,
- fstab=OPTIONS.source_info_dict["fstab"])
- source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
- target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
- oem_dicts = None
- if source_oem_props or target_oem_props:
- recovery_mount_options = OPTIONS.source_info_dict.get(
- "recovery_mount_options")
- oem_dicts = _LoadOemDicts(script, recovery_mount_options)
+ script = edify_generator.EdifyGenerator(
+ source_api_version, target_info, fstab=source_info["fstab"])
+
+ if target_info.oem_props or source_info.oem_props:
+ if not OPTIONS.oem_no_mount:
+ source_info.WriteMountOemScript(script)
metadata = {
- "pre-device": GetOemProperty("ro.product.device", source_oem_props,
- oem_dicts and oem_dicts[0],
- OPTIONS.source_info_dict),
+ "pre-device": source_info.device,
"ota-type": "BLOCK",
}
- HandleDowngradeMetadata(metadata)
+ HandleDowngradeMetadata(metadata, target_info, source_info)
device_specific = common.DeviceSpecificParams(
source_zip=source_zip,
- source_version=source_version,
+ source_version=source_api_version,
target_zip=target_zip,
- target_version=target_version,
+ target_version=target_api_version,
output_zip=output_zip,
script=script,
metadata=metadata,
- info_dict=OPTIONS.source_info_dict)
+ info_dict=source_info)
- source_fp = CalculateFingerprint(source_oem_props, oem_dicts and oem_dicts[0],
- OPTIONS.source_info_dict)
- target_fp = CalculateFingerprint(target_oem_props, oem_dicts and oem_dicts[0],
- OPTIONS.target_info_dict)
- metadata["pre-build"] = source_fp
- metadata["post-build"] = target_fp
- metadata["pre-build-incremental"] = GetBuildProp(
- "ro.build.version.incremental", OPTIONS.source_info_dict)
- metadata["post-build-incremental"] = GetBuildProp(
- "ro.build.version.incremental", OPTIONS.target_info_dict)
+ metadata["pre-build"] = source_info.fingerprint
+ metadata["post-build"] = target_info.fingerprint
+ metadata["pre-build-incremental"] = source_info.GetBuildProp(
+ "ro.build.version.incremental")
+ metadata["post-build-incremental"] = target_info.GetBuildProp(
+ "ro.build.version.incremental")
source_boot = common.GetBootableImage(
- "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
- OPTIONS.source_info_dict)
+ "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
target_boot = common.GetBootableImage(
- "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+ "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
updating_boot = (not OPTIONS.two_step and
(source_boot.data != target_boot.data))
@@ -683,19 +784,18 @@
system_tgt = GetImage("system", OPTIONS.target_tmp)
blockimgdiff_version = max(
- int(i) for i in
- OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+ int(i) for i in target_info.get("blockimgdiff_versions", "1").split(","))
assert blockimgdiff_version >= 3
# Check the first block of the source system partition for remount R/W only
# if the filesystem is ext4.
- system_src_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+ system_src_partition = source_info["fstab"]["/system"]
check_first_block = system_src_partition.fs_type == "ext4"
# Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
# in zip formats. However with squashfs, a) all files are compressed in LZ4;
# b) the blocks listed in block map may not contain all the bytes for a given
# file (because they're rounded to be 4K-aligned).
- system_tgt_partition = OPTIONS.target_info_dict["fstab"]["/system"]
+ system_tgt_partition = target_info["fstab"]["/system"]
disable_imgdiff = (system_src_partition.fs_type == "squashfs" or
system_tgt_partition.fs_type == "squashfs")
system_diff = common.BlockDifference("system", system_tgt, system_src,
@@ -711,7 +811,7 @@
# Check first block of vendor partition for remount R/W only if
# disk type is ext4
- vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+ vendor_partition = source_info["fstab"]["/vendor"]
check_first_block = vendor_partition.fs_type == "ext4"
disable_imgdiff = vendor_partition.fs_type == "squashfs"
vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
@@ -722,10 +822,10 @@
vendor_diff = None
AddCompatibilityArchiveIfTrebleEnabled(
- target_zip, output_zip, OPTIONS.target_info_dict,
- OPTIONS.source_info_dict)
+ target_zip, output_zip, target_info, source_info)
- AppendAssertions(script, OPTIONS.target_info_dict, oem_dicts)
+ # Assertions (e.g. device properties check).
+ target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
device_specific.IncrementalOTA_Assertions()
# Two-step incremental package strategy (in chronological order,
@@ -751,12 +851,12 @@
# (allow recovery to mark itself finished and reboot)
if OPTIONS.two_step:
- if not OPTIONS.source_info_dict.get("multistage_support", None):
+ if not source_info.get("multistage_support"):
assert False, "two-step packages not supported by this build"
fs = OPTIONS.source_info_dict["fstab"]["/misc"]
assert fs.fs_type.upper() == "EMMC", \
"two-step packages only supported on devices with EMMC /misc partitions"
- bcb_dev = {"bcb_dev": fs.device}
+ bcb_dev = {"bcb_dev" : fs.device}
common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
script.AppendExtra("""
if get_stage("%(bcb_dev)s") == "2/3" then
@@ -776,27 +876,14 @@
script.Comment("Stage 1/3")
# Dump fingerprints
- script.Print("Source: %s" % (source_fp,))
- script.Print("Target: %s" % (target_fp,))
+ script.Print("Source: {}".format(source_info.fingerprint))
+ script.Print("Target: {}".format(target_info.fingerprint))
script.Print("Verifying current system...")
device_specific.IncrementalOTA_VerifyBegin()
- if source_oem_props is None and target_oem_props is None:
- script.AssertSomeFingerprint(source_fp, target_fp)
- elif source_oem_props is not None and target_oem_props is not None:
- script.AssertSomeThumbprint(
- GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
- GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
- elif source_oem_props is None and target_oem_props is not None:
- script.AssertFingerprintOrThumbprint(
- source_fp,
- GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
- else:
- script.AssertFingerprintOrThumbprint(
- target_fp,
- GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+ WriteFingerprintAssertion(script, target_info, source_info)
# Check the required cache size (i.e. stashed blocks).
size = []
@@ -806,8 +893,7 @@
size.append(vendor_diff.required_cache)
if updating_boot:
- boot_type, boot_device = common.GetTypeAndDevice(
- "/boot", OPTIONS.source_info_dict)
+ boot_type, boot_device = common.GetTypeAndDevice("/boot", source_info)
d = common.Difference(target_boot, source_boot)
_, _, d = d.ComputePatch()
if d is None:
@@ -984,7 +1070,8 @@
cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
cmd.extend(["-out", rsa_key])
- p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
+ p1 = common.Run(cmd, verbose=False, stdout=log_file,
+ stderr=subprocess.STDOUT)
p1.communicate()
assert p1.returncode == 0, "openssl pkcs8 failed"
@@ -993,35 +1080,32 @@
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
- # Metadata to comply with Android OTA package format.
- oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
- oem_dicts = None
- if oem_props:
- oem_dicts = _LoadOemDicts(None)
+ if source_file is not None:
+ target_info = BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+ source_info = BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+ else:
+ target_info = BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+ source_info = None
+ # Metadata to comply with Android OTA package format.
metadata = {
- "post-build": CalculateFingerprint(oem_props, oem_dicts and oem_dicts[0],
- OPTIONS.info_dict),
- "post-build-incremental" : GetBuildProp("ro.build.version.incremental",
- OPTIONS.info_dict),
- "pre-device": GetOemProperty("ro.product.device", oem_props,
- oem_dicts and oem_dicts[0],
- OPTIONS.info_dict),
- "ota-required-cache": "0",
- "ota-type": "AB",
+ "post-build" : target_info.fingerprint,
+ "post-build-incremental" : target_info.GetBuildProp(
+ "ro.build.version.incremental"),
+ "ota-required-cache" : "0",
+ "ota-type" : "AB",
}
if source_file is not None:
- metadata["pre-build"] = CalculateFingerprint(oem_props,
- oem_dicts and oem_dicts[0],
- OPTIONS.source_info_dict)
- metadata["pre-build-incremental"] = GetBuildProp(
- "ro.build.version.incremental", OPTIONS.source_info_dict)
+ metadata["pre-device"] = source_info.device
+ metadata["pre-build"] = source_info.fingerprint
+ metadata["pre-build-incremental"] = source_info.GetBuildProp(
+ "ro.build.version.incremental")
- HandleDowngradeMetadata(metadata)
+ HandleDowngradeMetadata(metadata, target_info, source_info)
else:
- metadata["post-timestamp"] = GetBuildProp(
- "ro.build.date.utc", OPTIONS.info_dict)
+ metadata["pre-device"] = target_info.device
+ metadata["post-timestamp"] = target_info.GetBuildProp("ro.build.date.utc")
# 1. Generate payload.
payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
@@ -1120,23 +1204,23 @@
# If dm-verity is supported for the device, copy contents of care_map
# into A/B OTA package.
target_zip = zipfile.ZipFile(target_file, "r")
- if (OPTIONS.info_dict.get("verity") == "true" or
- OPTIONS.info_dict.get("avb_enable") == "true"):
+ if (target_info.get("verity") == "true" or
+ target_info.get("avb_enable") == "true"):
care_map_path = "META/care_map.txt"
namelist = target_zip.namelist()
if care_map_path in namelist:
care_map_data = target_zip.read(care_map_path)
common.ZipWriteStr(output_zip, "care_map.txt", care_map_data,
- compress_type=zipfile.ZIP_STORED)
+ compress_type=zipfile.ZIP_STORED)
else:
print("Warning: cannot find care map file in target_file package")
- # OPTIONS.source_info_dict must be None for incrementals.
+ # source_info must be None for full OTAs.
if source_file is None:
- assert OPTIONS.source_info_dict is None
+ assert source_info is None
AddCompatibilityArchiveIfTrebleEnabled(
- target_zip, output_zip, OPTIONS.info_dict, OPTIONS.source_info_dict)
+ target_zip, output_zip, target_info, source_info)
common.ZipClose(target_zip)
@@ -1289,12 +1373,15 @@
# Load the dict file from the zip directly to have a peek at the OTA type.
# For packages using A/B update, unzipping is not needed.
if OPTIONS.extracted_input is not None:
- OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input, OPTIONS.extracted_input)
+ OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input,
+ OPTIONS.extracted_input)
else:
input_zip = zipfile.ZipFile(args[0], "r")
OPTIONS.info_dict = common.LoadInfoDict(input_zip)
common.ZipClose(input_zip)
+ OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
+
ab_update = OPTIONS.info_dict.get("ab_update") == "true"
# Use the default key to sign the package if not specified with package_key.
@@ -1337,7 +1424,8 @@
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
OPTIONS.target_tmp = OPTIONS.input_tmp
- OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
+ OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp,
+ OPTIONS.input_tmp)
input_zip = zipfile.ZipFile(args[0], "r")
else:
print("unzipping target target-files...")
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index ed454ca..8fb4600 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -353,6 +353,128 @@
os.remove(zip_file.name)
+class CommonApkUtilsTest(unittest.TestCase):
+ """Tests the APK utils related functions."""
+
+ APKCERTS_TXT1 = (
+ 'name="RecoveryLocalizer.apk" certificate="certs/devkey.x509.pem"'
+ ' private_key="certs/devkey.pk8"\n'
+ 'name="Settings.apk"'
+ ' certificate="build/target/product/security/platform.x509.pem"'
+ ' private_key="build/target/product/security/platform.pk8"\n'
+ 'name="TV.apk" certificate="PRESIGNED" private_key=""\n'
+ )
+
+ APKCERTS_CERTMAP1 = {
+ 'RecoveryLocalizer.apk' : 'certs/devkey',
+ 'Settings.apk' : 'build/target/product/security/platform',
+ 'TV.apk' : 'PRESIGNED',
+ }
+
+ APKCERTS_TXT2 = (
+ 'name="Compressed1.apk" certificate="certs/compressed1.x509.pem"'
+ ' private_key="certs/compressed1.pk8" compressed="gz"\n'
+ 'name="Compressed2a.apk" certificate="certs/compressed2.x509.pem"'
+ ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+ 'name="Compressed2b.apk" certificate="certs/compressed2.x509.pem"'
+ ' private_key="certs/compressed2.pk8" compressed="gz"\n'
+ 'name="Compressed3.apk" certificate="certs/compressed3.x509.pem"'
+ ' private_key="certs/compressed3.pk8" compressed="gz"\n'
+ )
+
+ APKCERTS_CERTMAP2 = {
+ 'Compressed1.apk' : 'certs/compressed1',
+ 'Compressed2a.apk' : 'certs/compressed2',
+ 'Compressed2b.apk' : 'certs/compressed2',
+ 'Compressed3.apk' : 'certs/compressed3',
+ }
+
+ APKCERTS_TXT3 = (
+ 'name="Compressed4.apk" certificate="certs/compressed4.x509.pem"'
+ ' private_key="certs/compressed4.pk8" compressed="xz"\n'
+ )
+
+ APKCERTS_CERTMAP3 = {
+ 'Compressed4.apk' : 'certs/compressed4',
+ }
+
+ def tearDown(self):
+ common.Cleanup()
+
+ @staticmethod
+ def _write_apkcerts_txt(apkcerts_txt, additional=None):
+ if additional is None:
+ additional = []
+ target_files = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.writestr('META/apkcerts.txt', apkcerts_txt)
+ for entry in additional:
+ target_files_zip.writestr(entry, '')
+ return target_files
+
+ def test_ReadApkCerts_NoncompressedApks(self):
+ target_files = self._write_apkcerts_txt(self.APKCERTS_TXT1)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ certmap, ext = common.ReadApkCerts(input_zip)
+
+ self.assertDictEqual(self.APKCERTS_CERTMAP1, certmap)
+ self.assertIsNone(ext)
+
+ def test_ReadApkCerts_CompressedApks(self):
+ # We have "installed" Compressed1.apk.gz only. Note that Compressed3.apk is
+ # not stored in '.gz' format, so it shouldn't be considered as installed.
+ target_files = self._write_apkcerts_txt(
+ self.APKCERTS_TXT2,
+ ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ certmap, ext = common.ReadApkCerts(input_zip)
+
+ self.assertDictEqual(self.APKCERTS_CERTMAP2, certmap)
+ self.assertEqual('.gz', ext)
+
+ # Alternative case with '.xz'.
+ target_files = self._write_apkcerts_txt(
+ self.APKCERTS_TXT3, ['Compressed4.apk.xz'])
+
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ certmap, ext = common.ReadApkCerts(input_zip)
+
+ self.assertDictEqual(self.APKCERTS_CERTMAP3, certmap)
+ self.assertEqual('.xz', ext)
+
+ def test_ReadApkCerts_CompressedAndNoncompressedApks(self):
+ target_files = self._write_apkcerts_txt(
+ self.APKCERTS_TXT1 + self.APKCERTS_TXT2,
+ ['Compressed1.apk.gz', 'Compressed3.apk'])
+
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ certmap, ext = common.ReadApkCerts(input_zip)
+
+ certmap_merged = self.APKCERTS_CERTMAP1.copy()
+ certmap_merged.update(self.APKCERTS_CERTMAP2)
+ self.assertDictEqual(certmap_merged, certmap)
+ self.assertEqual('.gz', ext)
+
+ def test_ReadApkCerts_MultipleCompressionMethods(self):
+ target_files = self._write_apkcerts_txt(
+ self.APKCERTS_TXT2 + self.APKCERTS_TXT3,
+ ['Compressed1.apk.gz', 'Compressed4.apk.xz'])
+
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+ def test_ReadApkCerts_MismatchingKeys(self):
+ malformed_apkcerts_txt = (
+ 'name="App1.apk" certificate="certs/cert1.x509.pem"'
+ ' private_key="certs/cert2.pk8"\n'
+ )
+ target_files = self._write_apkcerts_txt(malformed_apkcerts_txt)
+
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
+
+
class InstallRecoveryScriptFormatTest(unittest.TestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
new file mode 100644
index 0000000..0948c61
--- /dev/null
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -0,0 +1,302 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import copy
+import unittest
+
+import common
+from ota_from_target_files import (
+ _LoadOemDicts, BuildInfo, WriteFingerprintAssertion)
+
+
+class MockScriptWriter(object):
+ """A class that mocks edify_generator.EdifyGenerator.
+
+ It simply pushes the incoming arguments onto script stack, which is to assert
+ the calls to EdifyGenerator functions.
+ """
+
+ def __init__(self):
+ self.script = []
+
+ def Mount(self, *args):
+ self.script.append(('Mount',) + args)
+
+ def AssertDevice(self, *args):
+ self.script.append(('AssertDevice',) + args)
+
+ def AssertOemProperty(self, *args):
+ self.script.append(('AssertOemProperty',) + args)
+
+ def AssertFingerprintOrThumbprint(self, *args):
+ self.script.append(('AssertFingerprintOrThumbprint',) + args)
+
+ def AssertSomeFingerprint(self, *args):
+ self.script.append(('AssertSomeFingerprint',) + args)
+
+ def AssertSomeThumbprint(self, *args):
+ self.script.append(('AssertSomeThumbprint',) + args)
+
+
+class BuildInfoTest(unittest.TestCase):
+
+ TEST_INFO_DICT = {
+ 'build.prop' : {
+ 'ro.product.device' : 'product-device',
+ 'ro.product.name' : 'product-name',
+ 'ro.build.fingerprint' : 'build-fingerprint',
+ 'ro.build.foo' : 'build-foo',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ },
+ 'property1' : 'value1',
+ 'property2' : 4096,
+ }
+
+ TEST_INFO_DICT_USES_OEM_PROPS = {
+ 'build.prop' : {
+ 'ro.product.name' : 'product-name',
+ 'ro.build.thumbprint' : 'build-thumbprint',
+ 'ro.build.bar' : 'build-bar',
+ },
+ 'vendor.build.prop' : {
+ 'ro.vendor.build.fingerprint' : 'vendor-build-fingerprint',
+ },
+ 'property1' : 'value1',
+ 'property2' : 4096,
+ 'oem_fingerprint_properties' : 'ro.product.device ro.product.brand',
+ }
+
+ TEST_OEM_DICTS = [
+ {
+ 'ro.product.brand' : 'brand1',
+ 'ro.product.device' : 'device1',
+ },
+ {
+ 'ro.product.brand' : 'brand2',
+ 'ro.product.device' : 'device2',
+ },
+ {
+ 'ro.product.brand' : 'brand3',
+ 'ro.product.device' : 'device3',
+ },
+ ]
+
+ def test_init(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('product-device', target_info.device)
+ self.assertEqual('build-fingerprint', target_info.fingerprint)
+ self.assertFalse(target_info.is_ab)
+ self.assertIsNone(target_info.oem_props)
+
+ def test_init_with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('device1', target_info.device)
+ self.assertEqual('brand1/product-name/device1:build-thumbprint',
+ target_info.fingerprint)
+
+ # Swap the order in oem_dicts, which would lead to different BuildInfo.
+ oem_dicts = copy.copy(self.TEST_OEM_DICTS)
+ oem_dicts[0], oem_dicts[2] = oem_dicts[2], oem_dicts[0]
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, oem_dicts)
+ self.assertEqual('device3', target_info.device)
+ self.assertEqual('brand3/product-name/device3:build-thumbprint',
+ target_info.fingerprint)
+
+ # Missing oem_dict should be rejected.
+ self.assertRaises(AssertionError, BuildInfo,
+ self.TEST_INFO_DICT_USES_OEM_PROPS, None)
+
+ def test___getitem__(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('value1', target_info['property1'])
+ self.assertEqual(4096, target_info['property2'])
+ self.assertEqual('build-foo', target_info['build.prop']['ro.build.foo'])
+
+ def test___getitem__with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('value1', target_info['property1'])
+ self.assertEqual(4096, target_info['property2'])
+ self.assertRaises(KeyError,
+ lambda: target_info['build.prop']['ro.build.foo'])
+
+ def test_get(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('value1', target_info.get('property1'))
+ self.assertEqual(4096, target_info.get('property2'))
+ self.assertEqual(4096, target_info.get('property2', 1024))
+ self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+ self.assertEqual('build-foo', target_info.get('build.prop')['ro.build.foo'])
+
+ def test_get_with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('value1', target_info.get('property1'))
+ self.assertEqual(4096, target_info.get('property2'))
+ self.assertEqual(4096, target_info.get('property2', 1024))
+ self.assertEqual(1024, target_info.get('property-nonexistent', 1024))
+ self.assertIsNone(target_info.get('build.prop').get('ro.build.foo'))
+ self.assertRaises(KeyError,
+ lambda: target_info.get('build.prop')['ro.build.foo'])
+
+ def test_GetBuildProp(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('build-foo', target_info.GetBuildProp('ro.build.foo'))
+ self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetBuildProp_with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('build-bar', target_info.GetBuildProp('ro.build.bar'))
+ self.assertRaises(common.ExternalError, target_info.GetBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetVendorBuildProp(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.GetVendorBuildProp(
+ 'ro.vendor.build.fingerprint'))
+ self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_GetVendorBuildProp_with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ self.assertEqual('vendor-build-fingerprint',
+ target_info.GetVendorBuildProp(
+ 'ro.vendor.build.fingerprint'))
+ self.assertRaises(common.ExternalError, target_info.GetVendorBuildProp,
+ 'ro.build.nonexistent')
+
+ def test_WriteMountOemScript(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ script_writer = MockScriptWriter()
+ target_info.WriteMountOemScript(script_writer)
+ self.assertEqual([('Mount', '/oem', None)], script_writer.script)
+
+ def test_WriteDeviceAssertions(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ script_writer = MockScriptWriter()
+ target_info.WriteDeviceAssertions(script_writer, False)
+ self.assertEqual([('AssertDevice', 'product-device')], script_writer.script)
+
+ def test_WriteDeviceAssertions_with_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ script_writer = MockScriptWriter()
+ target_info.WriteDeviceAssertions(script_writer, False)
+ self.assertEqual(
+ [
+ ('AssertOemProperty', 'ro.product.device',
+ ['device1', 'device2', 'device3'], False),
+ ('AssertOemProperty', 'ro.product.brand',
+ ['brand1', 'brand2', 'brand3'], False),
+ ],
+ script_writer.script)
+
+ def test_WriteFingerprintAssertion_without_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ source_info_dict = copy.deepcopy(self.TEST_INFO_DICT)
+ source_info_dict['build.prop']['ro.build.fingerprint'] = (
+ 'source-build-fingerprint')
+ source_info = BuildInfo(source_info_dict, None)
+
+ script_writer = MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertSomeFingerprint', 'source-build-fingerprint',
+ 'build-fingerprint')],
+ script_writer.script)
+
+ def test_WriteFingerprintAssertion_with_source_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT, None)
+ source_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+
+ script_writer = MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+ 'build-thumbprint')],
+ script_writer.script)
+
+ def test_WriteFingerprintAssertion_with_target_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ source_info = BuildInfo(self.TEST_INFO_DICT, None)
+
+ script_writer = MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertFingerprintOrThumbprint', 'build-fingerprint',
+ 'build-thumbprint')],
+ script_writer.script)
+
+ def test_WriteFingerprintAssertion_with_both_oem_props(self):
+ target_info = BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+ self.TEST_OEM_DICTS)
+ source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+ source_info_dict['build.prop']['ro.build.thumbprint'] = (
+ 'source-build-thumbprint')
+ source_info = BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+ script_writer = MockScriptWriter()
+ WriteFingerprintAssertion(script_writer, target_info, source_info)
+ self.assertEqual(
+ [('AssertSomeThumbprint', 'build-thumbprint',
+ 'source-build-thumbprint')],
+ script_writer.script)
+
+
+class LoadOemDictsTest(unittest.TestCase):
+
+ def tearDown(self):
+ common.Cleanup()
+
+ def test_NoneDict(self):
+ self.assertIsNone(_LoadOemDicts(None))
+
+ def test_SingleDict(self):
+ dict_file = common.MakeTempFile()
+ with open(dict_file, 'w') as dict_fp:
+ dict_fp.write('abc=1\ndef=2\nxyz=foo\na.b.c=bar\n')
+
+ oem_dicts = _LoadOemDicts([dict_file])
+ self.assertEqual(1, len(oem_dicts))
+ self.assertEqual('foo', oem_dicts[0]['xyz'])
+ self.assertEqual('bar', oem_dicts[0]['a.b.c'])
+
+ def test_MultipleDicts(self):
+ oem_source = []
+ for i in range(3):
+ dict_file = common.MakeTempFile()
+ with open(dict_file, 'w') as dict_fp:
+ dict_fp.write(
+ 'ro.build.index={}\ndef=2\nxyz=foo\na.b.c=bar\n'.format(i))
+ oem_source.append(dict_file)
+
+ oem_dicts = _LoadOemDicts(oem_source)
+ self.assertEqual(3, len(oem_dicts))
+ for i, oem_dict in enumerate(oem_dicts):
+ self.assertEqual('2', oem_dict['def'])
+ self.assertEqual('foo', oem_dict['xyz'])
+ self.assertEqual('bar', oem_dict['a.b.c'])
+ self.assertEqual('{}'.format(i), oem_dict['ro.build.index'])