Merge "add ot-cli-ftd in /system/bin/ for eng and userdebug builds" into main
diff --git a/common/math.mk b/common/math.mk
index 0271ea8..ecee474 100644
--- a/common/math.mk
+++ b/common/math.mk
@@ -25,6 +25,7 @@
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 \
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
__MATH_NUMBERS := 0 $(__MATH_POS_NUMBERS)
+__MATH_ONE_NUMBERS := 0 1 2 3 4 5 6 7 8 9
math-error = $(call pretty-error,$(1))
math-expect :=
@@ -36,6 +37,10 @@
# make -f ${ANDROID_BUILD_TOP}/build/make/common/math.mk RUN_MATH_TESTS=true
# $(get_build_var CKATI) -f ${ANDROID_BUILD_TOP}//build/make/common/math.mk RUN_MATH_TESTS=true
ifdef RUN_MATH_TESTS
+ ifndef empty
+ empty :=
+ space := $(empty) $(empty)
+ endif
MATH_TEST_FAILURE :=
MATH_TEST_ERROR :=
math-error = $(if $(MATH_TEST_ERROR),,$(eval MATH_TEST_ERROR:=$(1)))
@@ -61,13 +66,29 @@
endif
# Returns true if $(1) is a non-negative integer <= 100, otherwise returns nothing.
-define math_is_number
+define math_is_number_in_100
$(strip \
$(if $(1),,$(call math-error,Argument missing)) \
$(if $(word 2,$(1)),$(call math-error,Multiple words in a single argument: $(1))) \
$(if $(filter $(1),$(__MATH_NUMBERS)),true))
endef
+# Same with math_is_number_in_100, but no limit.
+define _math_ext_is_number
+$(strip \
+ $(if $(1),,$(call math-error,Argument missing)) \
+ $(if $(word 2,$(1)),$(call math-error,Multiple words in a single argument: $(1))) \
+ $(eval should_empty:=$(1)) \
+ $(foreach num,$(__MATH_ONE_NUMBERS),\
+ $(eval should_empty:=$(subst $(num),$(empty),$(should_empty)))) \
+ $(if $(should_empty),,true))
+endef
+
+# Returns true if $(1) is a non-negative integer.
+define math_is_number
+$(strip $(if $(call math_is_number_in_100,$(1)),true,$(call _math_ext_is_number,$(1))))
+endef
+
define math_is_zero
$(strip \
$(if $(word 2,$(1)),$(call math-error,Multiple words in a single argument: $(1))) \
@@ -76,6 +97,7 @@
$(call math-expect-true,(call math_is_number,0))
$(call math-expect-true,(call math_is_number,2))
+$(call math-expect-true,(call math_is_number,202412))
$(call math-expect-false,(call math_is_number,foo))
$(call math-expect-false,(call math_is_number,-1))
$(call math-expect-error,(call math_is_number,1 2),Multiple words in a single argument: 1 2)
@@ -88,7 +110,7 @@
$(call math-expect-error,(call math_is_zero,no 2),Multiple words in a single argument: no 2)
define _math_check_valid
-$(if $(call math_is_number,$(1)),,$(call math-error,Only non-negative integers <= 100 are supported (not $(1))))
+$(if $(call math_is_number_in_100,$(1)),,$(call math-error,Only non-negative integers <= 100 are supported (not $(1))))
endef
$(call math-expect,(call _math_check_valid,0))
@@ -113,18 +135,81 @@
$(call math-expect,(call int_range_list,2,1),)
$(call math-expect-error,(call int_range_list,1,101),Only non-negative integers <= 100 are supported (not 101))
+# Split an integer into a list of digits
+define _math_number_to_list
+$(strip \
+ $(if $(call _math_ext_is_number,$(1)),,\
+ $(call math-error,Only non-negative integers are supported (not $(1)))) \
+ $(eval num_list:=$(1)) \
+ $(foreach num,$(__MATH_ONE_NUMBERS),\
+ $(eval num_list:=$(subst $(num),$(space)$(num),$(num_list)))) \
+ $(if $(filter $(words $(num_list)),$(__MATH_ONE_NUMBERS)),,\
+ $(call math-error,Only non-negative integers with less than 9 digits are supported (not $(1)))) \
+ $(if $(filter 0,$(word 1,$(num_list))),\
+ $(call math-error,Only non-negative integers without leading zeros are supported (not $(1)))) \
+ $(num_list))
+endef
+
+$(call math-expect,(call _math_number_to_list,123),1 2 3)
+$(call math-expect-error,(call _math_number_to_list,123 456),Multiple words in a single argument: 123 456)
+$(call math-expect-error,(call _math_number_to_list,-123),Only non-negative integers are supported (not -123))
+$(call math-expect-error,(call _math_number_to_list,002),Only non-negative integers without leading zeros are supported (not 002))
+$(call math-expect-error,(call _math_number_to_list,1234567890),Only non-negative integers with less than 9 digits are supported (not 1234567890))
+
+# Compare 1-digit integer $(1) and $(2).
+# Returns 1 if $(1) > $(2), -1 if $(1) < $(2), nothing if equals.
+define _math_1digit_comp
+$(strip \
+ $(if $(filter $(1),$(2)),,\
+ $(if $(filter $(1),$(firstword $(filter $(1) $(2),$(__MATH_ONE_NUMBERS)))),-1,1)))
+endef
+
+$(call math-expect,(call _math_1digit_comp,1,1))
+$(call math-expect,(call _math_1digit_comp,0,9),-1)
+$(call math-expect,(call _math_1digit_comp,3,1),1)
+
+# Compare the same $(3)-digit-length integers $(1) and $(2) that are split into a list of digits.
+# Returns 1 if $(1) > $(2), -1 if $(1) < $(2), nothing if equals.
+define _math_list_comp
+$(strip \
+ $(eval ans:=) \
+ $(foreach num,$(call int_range_list,1,$(3)),\
+ $(if $(ans),,$(eval ans:=$(call _math_1digit_comp,$(word $(num),$(1)),$(word $(num),$(2)))))) \
+ $(ans))
+endef
+
+# Compare any two non-negative integers $(1) and $(2).
+# Returns 1 if $(1) > $(2), -1 if $(1) < $(2), nothing if equals.
+define _math_ext_comp
+$(strip \
+ $(eval num_list1:=$(call _math_number_to_list,$(1))) \
+ $(eval len1:=$(words $(num_list1))) \
+ $(eval num_list2:=$(call _math_number_to_list,$(2))) \
+ $(eval len2:=$(words $(num_list2))) \
+ $(eval comp:=$(call _math_1digit_comp,$(len1),$(len2))) \
+ $(if $(comp),$(comp),$(call _math_list_comp,$(num_list1),$(num_list2),$(len1))))
+endef
+
+$(call math-expect,(call _math_ext_comp,5,10),-1)
+$(call math-expect,(call _math_ext_comp,12345,12345))
+$(call math-expect,(call _math_ext_comp,500,5),1)
+$(call math-expect,(call _math_ext_comp,202404,202504),-1)
# Returns the greater of $1 or $2.
-# If $1 or $2 is not a positive integer <= 100, then an error is generated.
+# If $1 or $2 is not a positive integer, then an error is generated.
define math_max
-$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
- $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))))
+$(strip \
+ $(if $(filter truetrue,$(call math_is_number_in_100,$(1))$(call math_is_number_in_100,$(2))),\
+ $(lastword $(filter $(1) $(2),$(__MATH_NUMBERS))),\
+ $(if $(filter 1,$(call _math_ext_comp,$(1),$(2))),$(1),$(2))))
endef
# Returns the lesser of $1 or $2.
define math_min
-$(strip $(call _math_check_valid,$(1)) $(call _math_check_valid,$(2)) \
- $(firstword $(filter $(1) $(2),$(__MATH_NUMBERS))))
+$(strip \
+ $(if $(filter truetrue,$(call math_is_number_in_100,$(1))$(call math_is_number_in_100,$(2))),\
+ $(firstword $(filter $(1) $(2),$(__MATH_NUMBERS))),\
+ $(if $(filter -1,$(call _math_ext_comp,$(1),$(2))),$(1),$(2))))
endef
$(call math-expect-error,(call math_max),Argument missing)
@@ -142,6 +227,15 @@
$(call math-expect,(call math_min,7,32),7)
$(call math-expect,(call math_min,32,7),7)
+$(call math-expect,(call math_max,32759,7),32759)
+$(call math-expect,(call math_max,7,32759),32759)
+$(call math-expect,(call math_max,202404,202505),202505)
+$(call math-expect,(call math_max,202404,202404),202404)
+$(call math-expect,(call math_min,8908527,32),32)
+$(call math-expect,(call math_min,32,8908527),32)
+$(call math-expect,(call math_min,202404,202505),202404)
+$(call math-expect,(call math_min,202404,202404),202404)
+
define math_gt_or_eq
$(if $(filter $(1),$(call math_max,$(1),$(2))),true)
endef
@@ -150,6 +244,10 @@
$(if $(call math_gt_or_eq,$(2),$(1)),,true)
endef
+define math_lt_or_eq
+$(if $(call math_gt_or_eq,$(2),$(1)),true)
+endef
+
define math_lt
$(if $(call math_gt_or_eq,$(1),$(2)),,true)
endef
@@ -160,10 +258,17 @@
$(call math-expect-true,(call math_gt, 4, 3))
$(call math-expect-false,(call math_gt, 5, 5))
$(call math-expect-false,(call math_gt, 6, 7))
+$(call math-expect-true,(call math_lt_or_eq, 11, 11))
+$(call math-expect-false,(call math_lt_or_eq, 25, 15))
+$(call math-expect-true,(call math_lt_or_eq, 9, 16))
$(call math-expect-false,(call math_lt, 1, 0))
$(call math-expect-false,(call math_lt, 8, 8))
$(call math-expect-true,(call math_lt, 10, 11))
+$(call math-expect-true,(call math_gt_or_eq, 2573904, 2573900))
+$(call math-expect-true,(call math_gt_or_eq, 12345, 12345))
+$(call math-expect-false,(call math_gt_or_eq, 56, 2780))
+
# $1 is the variable name to increment
define inc_and_print
$(strip $(eval $(1) := $($(1)) .)$(words $($(1))))
@@ -192,6 +297,7 @@
$(call math-expect,(call numbers_less_than,3,0 2 1 3),0 2 1)
$(call math-expect,(call numbers_less_than,4,0 2 1 3),0 2 1 3)
$(call math-expect,(call numbers_less_than,3,0 2 1 3 2),0 2 1 2)
+$(call math-expect,(call numbers_less_than,100,0 1000 50 101 100),0 50)
# Returns the words in $2 that are numbers and are greater or equal to $1
define numbers_greater_or_equal_to
diff --git a/core/Makefile b/core/Makefile
index a253026..79c8a17 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -793,9 +793,25 @@
.PHONY: apkcerts-list
apkcerts-list: $(APKCERTS_FILE)
+intermediates := $(call intermediates-dir-for,PACKAGING,apexkeys)
+APEX_KEYS_FILE := $(intermediates)/apexkeys.txt
+
+all_apex_keys_files := $(sort $(foreach m,$(call product-installed-modules,$(INTERNAL_PRODUCT)),$(ALL_MODULES.$(m).APEX_KEYS_FILE)))
+$(APEX_KEYS_FILE): $(all_apex_keys_files)
+ @mkdir -p $(dir $@)
+ @rm -f $@
+ $(hide) touch $@
+ $(hide) $(foreach file,$^,cat $(file) >> $@ $(newline))
+all_apex_keys_files :=
+
+$(call declare-0p-target,$(APEX_KEYS_FILE))
+
+.PHONY: apexkeys.txt
+apexkeys.txt: $(APEX_KEYS_FILE)
+
ifneq (,$(TARGET_BUILD_APPS))
$(call dist-for-goals, apps_only, $(APKCERTS_FILE):apkcerts.txt)
- $(call dist-for-goals, apps_only, $(SOONG_APEX_KEYS_FILE):apexkeys.txt)
+ $(call dist-for-goals, apps_only, $(APEX_KEYS_FILE):apexkeys.txt)
endif
@@ -871,6 +887,9 @@
$(call declare-1p-target,$(MK2BP_REMAINING_CSV))
$(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_CSV))
+.PHONY: mk2bp_remaining
+mk2bp_remaining: $(MK2BP_REMAINING_HTML) $(MK2BP_REMAINING_CSV)
+
# -----------------------------------------------------------------
# Modules use -Wno-error, or added default -Wall -Werror
WALL_WERROR := $(PRODUCT_OUT)/wall_werror.txt
@@ -1065,6 +1084,18 @@
endif
+# BOARD_KERNEL_MODULES_16K might contain duplicate modules under different path.
+# for example, foo/bar/wifi.ko and foo/wifi.ko . To avoid build issues, de-dup
+# module list on basename first.
+BOARD_KERNEL_MODULES_16K := $(foreach \
+ pattern,\
+ $(sort $(foreach \
+ path,\
+ $(BOARD_KERNEL_MODULES_16K),\
+ %/$(notdir $(path)))\
+ ),\
+ $(firstword $(filter $(pattern),$(BOARD_KERNEL_MODULES_16K))) \
+)
# For non-GKI modules, strip them before install. As debug symbols take up
# significant space.
$(foreach \
@@ -1444,15 +1475,19 @@
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
+$(INSTALLED_BOOTIMAGE_TARGET): PRIVATE_WORKING_DIR := $(call intermediates-dir-for,PACKAGING,prebuilt_bootimg)
+$(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH) $(UNPACK_BOOTIMG)
cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
+ $(UNPACK_BOOTIMG) --boot_img $(INTERNAL_PREBUILT_BOOTIMAGE) --out $(PRIVATE_WORKING_DIR)
chmod +w $@
$(AVBTOOL) add_hash_footer \
--image $@ \
+ --salt `sha256sum $(PRIVATE_WORKING_DIR)/kernel | cut -d " " -f 1` \
$(call get-partition-size-argument,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) \
--partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+
$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",bool)
$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_PREBUILT_BOOTIMAGE),$(PRODUCT_OUT)/:/)
@@ -6320,7 +6355,7 @@
$(INSTALLED_MISC_INFO_TARGET) \
$(INSTALLED_FASTBOOT_INFO_TARGET) \
$(APKCERTS_FILE) \
- $(SOONG_APEX_KEYS_FILE) \
+ $(APEX_KEYS_FILE) \
$(SOONG_ZIP) \
$(HOST_OUT_EXECUTABLES)/fs_config \
$(HOST_OUT_EXECUTABLES)/map_file_generator \
@@ -6531,7 +6566,7 @@
@# build them.
$(hide) mkdir -p $(zip_root)/META
$(hide) cp $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
- $(hide) cp $(SOONG_APEX_KEYS_FILE) $(zip_root)/META/apexkeys.txt
+ $(hide) cp $(APEX_KEYS_FILE) $(zip_root)/META/apexkeys.txt
ifneq ($(tool_extension),)
$(hide) cp $(PRIVATE_TOOL_EXTENSION) $(zip_root)/META/
endif
@@ -6794,7 +6829,7 @@
$(INSTALLED_MISC_INFO_TARGET) \
$(INSTALLED_FASTBOOT_INFO_TARGET) \
$(APKCERTS_FILE) \
- $(SOONG_APEX_KEYS_FILE) \
+ $(APEX_KEYS_FILE) \
$(HOST_OUT_EXECUTABLES)/fs_config \
$(HOST_OUT_EXECUTABLES)/map_file_generator \
$(ADD_IMG_TO_TARGET_FILES) \
@@ -7676,7 +7711,7 @@
# -----------------------------------------------------------------
# Extract platform fonts used in Layoutlib
-include $(BUILD_SYSTEM)/layoutlib_fonts.mk
+include $(BUILD_SYSTEM)/layoutlib_data.mk
# -----------------------------------------------------------------
diff --git a/core/all_versions.bzl b/core/all_versions.bzl
deleted file mode 100644
index 33da673..0000000
--- a/core/all_versions.bzl
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-_all_versions = ["OPR1", "OPD1", "OPD2", "OPM1", "OPM2", "PPR1", "PPD1", "PPD2", "PPM1", "PPM2", "QPR1"] + [
- version + subversion
- for version in ["Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]
- for subversion in ["P1A", "P1B", "P2A", "P2B", "D1A", "D1B", "D2A", "D2B", "Q1A", "Q1B", "Q2A", "Q2B", "Q3A", "Q3B"]
-]
-
-variables_to_export_to_make = {
- "ALL_VERSIONS": _all_versions,
-}
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 3313b5f..f96504a 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -124,11 +124,6 @@
include $(BUILD_SYSTEM)/local_systemsdk.mk
include $(BUILD_SYSTEM)/local_current_sdk.mk
-my_module_tags := $(LOCAL_MODULE_TAGS)
-ifeq ($(my_host_cross),true)
- my_module_tags :=
-endif
-
# Ninja has an implicit dependency on the command being run, and kati will
# regenerate the ninja manifest if any read makefile changes, so there is no
# need to have dependencies on makefiles.
@@ -148,46 +143,13 @@
## Validate and define fallbacks for input LOCAL_* variables.
###########################################################
-## Dump a .csv file of all modules and their tags
-#ifneq ($(tag-list-first-time),false)
-#$(shell rm -f tag-list.csv)
-#tag-list-first-time := false
-#endif
-#$(shell echo $(lastword $(filter-out config/% out/%,$(MAKEFILE_LIST))),$(LOCAL_MODULE),$(strip $(LOCAL_MODULE_CLASS)),$(subst $(space),$(comma),$(sort $(my_module_tags))) >> tag-list.csv)
-
LOCAL_UNINSTALLABLE_MODULE := $(strip $(LOCAL_UNINSTALLABLE_MODULE))
-my_module_tags := $(sort $(my_module_tags))
-ifeq (,$(my_module_tags))
- my_module_tags := optional
-endif
-
-# User tags are not allowed anymore. Fail early because it will not be installed
-# like it used to be.
-ifneq ($(filter $(my_module_tags),user),)
- $(warning *** Module name: $(LOCAL_MODULE))
- $(warning *** Makefile location: $(LOCAL_MODULE_MAKEFILE))
- $(warning * )
- $(warning * Module is attempting to use the 'user' tag. This)
- $(warning * used to cause the module to be installed automatically.)
- $(warning * Now, the module must be listed in the PRODUCT_PACKAGES)
- $(warning * section of a product makefile to have it installed.)
- $(warning * )
- $(error user tag detected on module.)
-endif
-
-my_bad_module_tags := $(filter eng debug,$(my_module_tags))
-ifdef my_bad_module_tags
- ifeq (true,$(LOCAL_UNINSTALLABLE_MODULE))
- $(call pretty-warning,LOCAL_MODULE_TAGS := $(my_bad_module_tags) does not do anything for uninstallable modules)
- endif
- $(call pretty-error,LOCAL_MODULE_TAGS := $(my_bad_module_tags) is obsolete. See $(CHANGES_URL)#LOCAL_MODULE_TAGS)
-endif
# Only the tags mentioned in this test are expected to be set by module
# makefiles. Anything else is either a typo or a source of unexpected
# behaviors.
-ifneq ($(filter-out tests optional samples,$(my_module_tags)),)
-$(call pretty-error,unusual tags: $(filter-out tests optional samples,$(my_module_tags)))
+ifneq ($(filter-out tests optional samples,$(LOCAL_MODULE_TAGS)),)
+$(call pretty-error,unusual tags: $(filter-out tests optional samples,$(LOCAL_MODULE_TAGS)))
endif
LOCAL_MODULE_CLASS := $(strip $(LOCAL_MODULE_CLASS))
@@ -240,7 +202,7 @@
else
# The definition of should-install-to-system will be different depending
# on which goal (e.g., sdk or just droid) is being built.
- partition_tag := $(if $(call should-install-to-system,$(my_module_tags)),,_DATA)
+ partition_tag := $(if $(call should-install-to-system,$(LOCAL_MODULE_TAGS)),,_DATA)
actual_partition_tag := $(if $(partition_tag),data,system)
endif
endif
@@ -252,7 +214,7 @@
LOCAL_COMPATIBILITY_SUITE := null-suite
endif
ifneq ($(filter APPS, $(LOCAL_MODULE_CLASS)),)
- ifneq ($(filter $(my_module_tags),tests),)
+ ifneq ($(filter $(LOCAL_MODULE_TAGS),tests),)
LOCAL_COMPATIBILITY_SUITE := null-suite
endif
endif
@@ -947,7 +909,7 @@
ALL_MODULES.$(my_register_name).PATH := \
$(ALL_MODULES.$(my_register_name).PATH) $(LOCAL_PATH)
ALL_MODULES.$(my_register_name).TAGS := \
- $(ALL_MODULES.$(my_register_name).TAGS) $(my_module_tags)
+ $(ALL_MODULES.$(my_register_name).TAGS) $(LOCAL_MODULE_TAGS)
ALL_MODULES.$(my_register_name).CHECKED := \
$(ALL_MODULES.$(my_register_name).CHECKED) $(my_checked_module)
ALL_MODULES.$(my_register_name).BUILT := \
@@ -1180,6 +1142,7 @@
ALL_MODULES.$(my_register_name).TEST_MAINLINE_MODULES := $(LOCAL_TEST_MAINLINE_MODULES)
ifndef LOCAL_IS_HOST_MODULE
ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
+ALL_MODULES.$(my_register_name).APEX_KEYS_FILE := $(LOCAL_APEX_KEY_PATH)
endif
ifdef LOCAL_IS_UNIT_TEST
ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
@@ -1209,20 +1172,6 @@
$(LOCAL_JAVA_LIBRARIES) \
$(LOCAL_JNI_SHARED_LIBRARIES))
-license_files := $(call find-parent-file,$(LOCAL_PATH),MODULE_LICENSE*)
-ALL_DEPS.$(LOCAL_MODULE).LICENSE := $(sort $(ALL_DEPS.$(LOCAL_MODULE).LICENSE) $(license_files))
-
-###########################################################
-## Take care of my_module_tags
-###########################################################
-
-# Keep track of all the tags we've seen.
-ALL_MODULE_TAGS := $(sort $(ALL_MODULE_TAGS) $(my_module_tags))
-
-# Add this module name to the tag list of each specified tag.
-$(foreach tag,$(filter-out optional,$(my_module_tags)),\
- $(eval ALL_MODULE_NAME_TAGS.$(tag) := $$(ALL_MODULE_NAME_TAGS.$(tag)) $(my_register_name)))
-
###########################################################
## umbrella targets used to verify builds
###########################################################
@@ -1249,7 +1198,7 @@
ifdef j_or_n
$(j_or_n) $(h_or_t) $(j_or_n)-$(h_or_hc_or_t) : $(my_checked_module)
-ifneq (,$(filter $(my_module_tags),tests))
+ifneq (,$(filter $(LOCAL_MODULE_TAGS),tests))
$(j_or_n)-$(h_or_t)-tests $(j_or_n)-tests $(h_or_t)-tests : $(my_checked_module)
endif
$(LOCAL_MODULE)-$(h_or_hc_or_t) : $(my_all_targets)
diff --git a/core/binary.mk b/core/binary.mk
index e2e5be4..4c68ba7 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -354,17 +354,15 @@
# MinGW spits out warnings about -fPIC even for -fpie?!) being ignored because
# all code is position independent, and then those warnings get promoted to
# errors.
-ifneq ($(LOCAL_NO_PIC),true)
- ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
- my_cflags += -fPIE
- ifndef BUILD_HOST_static
- ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
- my_ldflags += -pie
- endif
+ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ my_cflags += -fPIE
+ ifndef BUILD_HOST_static
+ ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+ my_ldflags += -pie
endif
- else
- my_cflags += -fPIC
endif
+else
+ my_cflags += -fPIC
endif
ifdef LOCAL_IS_HOST_MODULE
@@ -1437,17 +1435,6 @@
$(call intermediates-dir-for, \
STATIC_LIBRARIES,$(lib),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/$(lib)$(a_suffix))
-# We don't care about installed static libraries, since the
-# libraries have already been linked into the module at that point.
-# We do, however, care about the NOTICE files for any static
-# libraries that we use. (see notice_files.mk)
-installed_static_library_notice_file_targets := \
- $(foreach lib,$(my_static_libraries) $(my_whole_static_libraries), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-STATIC_LIBRARIES-$(lib))
-
-$(notice_target): | $(installed_static_library_notice_file_targets)
-$(LOCAL_INSTALLED_MODULE): | $(notice_target)
-
# Default is -fno-rtti.
ifeq ($(strip $(LOCAL_RTTI_FLAG)),)
LOCAL_RTTI_FLAG := -fno-rtti
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index 2de4115..000159a 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -56,9 +56,6 @@
ifeq ($(filter SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
$(call pretty-error,Can strip/pack only shared libraries or executables)
endif
- ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
- $(call pretty-error,Cannot strip/pack scripts)
- endif
# Set the arch-specific variables to set up the strip rules
LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_strip_module)
include $(BUILD_SYSTEM)/dynamic_binary.mk
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 409e559..b73e9b4 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -5,7 +5,6 @@
# '',true
LOCAL_2ND_ARCH_VAR_PREFIX:=
LOCAL_32_BIT_ONLY:=
-LOCAL_AAPT2_ONLY:=
LOCAL_AAPT_FLAGS:=
LOCAL_AAPT_INCLUDE_ALL_RESOURCES:=
LOCAL_AAPT_NAMESPACES:=
@@ -13,14 +12,11 @@
LOCAL_ADDITIONAL_CERTIFICATES:=
LOCAL_ADDITIONAL_CHECKED_MODULE:=
LOCAL_ADDITIONAL_DEPENDENCIES:=
-LOCAL_ADDITIONAL_HTML_DIR:=
-LOCAL_ADDITIONAL_JAVA_DIR:=
LOCAL_AIDL_INCLUDES:=
LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
LOCAL_ANNOTATION_PROCESSORS:=
LOCAL_ANNOTATION_PROCESSOR_CLASSES:=
-LOCAL_APIDIFF_NEWAPI:=
-LOCAL_APIDIFF_OLDAPI:=
+LOCAL_APEX_KEY_PATH:=
LOCAL_APK_LIBRARIES:=
LOCAL_APK_SET_INSTALL_FILE:=
LOCAL_APKCERTS_FILE:=
@@ -53,8 +49,6 @@
LOCAL_CPPFLAGS:=
LOCAL_CPP_STD:=
LOCAL_C_STD:=
-LOCAL_CTS_TEST_PACKAGE:=
-LOCAL_CTS_TEST_RUNNER:=
LOCAL_CXX:=
LOCAL_CXX_STL := default
LOCAL_DEX_PREOPT_APP_IMAGE:=
@@ -73,21 +67,11 @@
LOCAL_DPI_VARIANTS:=
LOCAL_DROIDDOC_ANNOTATIONS_ZIP :=
LOCAL_DROIDDOC_API_VERSIONS_XML :=
-LOCAL_DROIDDOC_ASSET_DIR:=
-LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=
-LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:=
LOCAL_DROIDDOC_DOC_ZIP :=
-LOCAL_DROIDDOC_HTML_DIR:=
LOCAL_DROIDDOC_METADATA_ZIP:=
-LOCAL_DROIDDOC_OPTIONS:=
-LOCAL_DROIDDOC_SOURCE_PATH:=
-LOCAL_DROIDDOC_STUB_OUT_DIR:=
LOCAL_DROIDDOC_STUBS_SRCJAR :=
-LOCAL_DROIDDOC_TEMPLATE_DIR:=
-LOCAL_DROIDDOC_USE_STANDARD_DOCLET:=
LOCAL_DX_FLAGS:=
LOCAL_DYLIB_LIBRARIES:=
-LOCAL_EMMA_COVERAGE_FILTER:=
LOCAL_EMMA_INSTRUMENT:=
LOCAL_ENFORCE_USES_LIBRARIES:=
LOCAL_ERROR_PRONE_FLAGS:=
@@ -128,7 +112,6 @@
LOCAL_INSTALLED_MODULE:=
LOCAL_INSTALLED_MODULE_STEM:=
LOCAL_INSTRUMENTATION_FOR:=
-LOCAL_INTERMEDIATE_SOURCE_DIR:=
LOCAL_INTERMEDIATE_SOURCES:=
LOCAL_INTERMEDIATE_TARGETS:=
LOCAL_IS_FUZZ_TARGET:=
@@ -136,15 +119,8 @@
LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
LOCAL_IS_UNIT_TEST:=
LOCAL_TEST_OPTIONS_TAGS:=
-LOCAL_JACK_CLASSPATH:=
LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
-# '' (ie disabled), disabled, full, incremental, javac_frontend
-LOCAL_JACK_ENABLED:=$(DEFAULT_JACK_ENABLED)
-LOCAL_JACK_FLAGS:=
-LOCAL_JACK_PLUGIN:=
-LOCAL_JACK_PLUGIN_PATH:=
-LOCAL_JACK_PROGUARD_FLAGS:=
LOCAL_JAR_EXCLUDE_FILES:=
LOCAL_JAR_EXCLUDE_PACKAGES:=
LOCAL_JARJAR_RULES:=
@@ -157,7 +133,6 @@
LOCAL_JAVA_LIBRARIES:=
LOCAL_JAVA_RESOURCE_DIRS:=
LOCAL_JAVA_RESOURCE_FILES:=
-LOCAL_JETIFIER_ENABLED:=
LOCAL_JNI_SHARED_LIBRARIES:=
LOCAL_JNI_SHARED_LIBRARIES_ABI:=
LOCAL_CERTIFICATE_LINEAGE:=
@@ -199,10 +174,8 @@
LOCAL_NDK_VERSION:=current
LOCAL_NO_CRT:=
LOCAL_NO_DEFAULT_COMPILER_FLAGS:=
-LOCAL_NO_FPIE :=
LOCAL_NO_LIBCRT_BUILTINS:=
LOCAL_NO_NOTICE_FILE:=
-LOCAL_NO_PIC:=
LOCAL_NOSANITIZE:=
LOCAL_NO_STANDARD_LIBRARIES:=
LOCAL_NO_STATIC_ANALYZER:=
@@ -228,7 +201,6 @@
LOCAL_PREBUILT_MODULE_FILE:=
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
-LOCAL_PREBUILT_STRIP_COMMENTS:=
LOCAL_USE_EMBEDDED_DEX:=
LOCAL_USE_EMBEDDED_NATIVE_LIBS:=
LOCAL_PRESUBMIT_DISABLED:=
@@ -264,7 +236,6 @@
LOCAL_RES_LIBRARIES:=
LOCAL_RESOURCE_DIR:=
LOCAL_RLIB_LIBRARIES:=
-LOCAL_RMTYPEDEFS:=
LOCAL_ROTATION_MIN_SDK_VERSION:=
LOCAL_RUNTIME_LIBRARIES:=
LOCAL_RRO_THEME:=
@@ -338,16 +309,10 @@
LOCAL_VENDOR_MODULE:=
LOCAL_VINTF_FRAGMENTS:=
LOCAL_VNDK_DEPEND_ON_CORE_VARIANT:=
-LOCAL_VTSC_FLAGS:=
-LOCAL_VTS_INCLUDES:=
-LOCAL_VTS_MODE:=
LOCAL_WARNINGS_ENABLE:=
LOCAL_WHOLE_STATIC_LIBRARIES:=
LOCAL_YACCFLAGS:=
LOCAL_CHECK_ELF_FILES:=
-# TODO: deprecate, it does nothing
-OVERRIDE_BUILT_MODULE_PATH:=
-
# arch specific variables
LOCAL_ASFLAGS_$(TARGET_ARCH):=
LOCAL_CFLAGS_$(TARGET_ARCH):=
diff --git a/core/config.mk b/core/config.mk
index 196f07c..c747fd5 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -722,6 +722,7 @@
BUILD_SUPER_IMAGE := $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)
endif
IMG_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/img_from_target_files$(HOST_EXECUTABLE_SUFFIX)
+UNPACK_BOOTIMG := $(HOST_OUT_EXECUTABLES)/unpack_bootimg
MAKE_RECOVERY_PATCH := $(HOST_OUT_EXECUTABLES)/make_recovery_patch$(HOST_EXECUTABLE_SUFFIX)
OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
OTA_FROM_RAW_IMG := $(HOST_OUT_EXECUTABLES)/ota_from_raw_img$(HOST_EXECUTABLE_SUFFIX)
diff --git a/core/definitions.mk b/core/definitions.mk
index ebc6c6e..44643d9 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -48,20 +48,6 @@
# set of installed targets.
ALL_DEFAULT_INSTALLED_MODULES:=
-# The list of tags that have been defined by
-# LOCAL_MODULE_TAGS. Each word in this variable maps
-# to a corresponding ALL_MODULE_TAGS.<tagname> variable
-# that contains all of the INSTALLED_MODULEs with that tag.
-ALL_MODULE_TAGS:=
-
-# Similar to ALL_MODULE_TAGS, but contains the short names
-# of all targets for a particular tag. The top-level variable
-# won't have the list of tags; ust ALL_MODULE_TAGS to get
-# the list of all known tags. (This means that this variable
-# will always be empty; it's just here as a placeholder for
-# its sub-variables.)
-ALL_MODULE_NAME_TAGS:=
-
# Full path to all asm, C, C++, lex and yacc generated C files.
# These all have an order-only dependency on the copied headers
ALL_C_CPP_ETC_OBJECTS:=
@@ -1295,38 +1281,6 @@
endef
###########################################################
-## MODULE_TAG set operations
-###########################################################
-
-# Given a list of tags, return the targets that specify
-# any of those tags.
-# $(1): tag list
-define modules-for-tag-list
-$(sort $(foreach tag,$(1),$(foreach m,$(ALL_MODULE_NAME_TAGS.$(tag)),$(ALL_MODULES.$(m).INSTALLED))))
-endef
-
-# Same as modules-for-tag-list, but operates on
-# ALL_MODULE_NAME_TAGS.
-# $(1): tag list
-define module-names-for-tag-list
-$(sort $(foreach tag,$(1),$(ALL_MODULE_NAME_TAGS.$(tag))))
-endef
-
-# Given an accept and reject list, find the matching
-# set of targets. If a target has multiple tags and
-# any of them are rejected, the target is rejected.
-# Reject overrides accept.
-# $(1): list of tags to accept
-# $(2): list of tags to reject
-#TODO(dbort): do $(if $(strip $(1)),$(1),$(ALL_MODULE_TAGS))
-#TODO(jbq): as of 20100106 nobody uses the second parameter
-define get-tagged-modules
-$(filter-out \
- $(call modules-for-tag-list,$(2)), \
- $(call modules-for-tag-list,$(1)))
-endef
-
-###########################################################
## Append a leaf to a base path. Properly deals with
## base paths ending in /.
##
@@ -3289,14 +3243,6 @@
$(hide) cp -p "$<" "$@"
endef
-# The same as copy-file-to-target, but strip out "# comment"-style
-# comments (for config files and such).
-define copy-file-to-target-strip-comments
-@mkdir -p $(dir $@)
-$(hide) rm -f $@
-$(hide) sed -e 's/#.*$$//' -e 's/[ \t]*$$//' -e '/^$$/d' < $< > $@
-endef
-
# The same as copy-file-to-target, but don't preserve
# the old modification time.
define copy-file-to-new-target
@@ -3331,12 +3277,6 @@
$(copy-file-to-target)
endef
-# Copy a prebuilt file to a target location, stripping "# comment" comments.
-define transform-prebuilt-to-target-strip-comments
-@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
-$(copy-file-to-target-strip-comments)
-endef
-
# Copy a prebuilt file to a target location, but preserve symlinks rather than
# dereference them.
define copy-or-link-prebuilt-to-target
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 091c2e3..7ddbf32 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -51,44 +51,15 @@
include $(BUILD_SYSTEM)/release_config.mk
# ---------------------------------------------------------------
-# defines ALL_VERSIONS
-$(call run-starlark,build/make/core/all_versions.bzl)
+# Set up version information
+include $(BUILD_SYSTEM)/version_util.mk
-# Filters ALL_VERSIONS down to the range [$1, $2], and errors if $1 > $2 or $3 is
-# not in [$1, $2]
-# $(1): min platform version
-# $(2): max platform version
-# $(3): default platform version
-define allowed-platform-versions
-$(strip \
- $(if $(filter $(ALL_VERSIONS),$(1)),,
- $(error Invalid MIN_PLATFORM_VERSION '$(1)'))
- $(if $(filter $(ALL_VERSIONS),$(2)),,
- $(error Invalid MAX_PLATFORM_VERSION '$(2)'))
- $(if $(filter $(ALL_VERSIONS),$(3)),,
- $(error Invalid RELEASE_PLATFORM_VERSION '$(3)'))
+# This used to be calculated, but is now fixed and not expected
+# to change over time anymore. New code attempting to use a
+# variable like IS_AT_LAST_* should instead use a
+# build system flag.
- $(eval allowed_versions_ := $(call find_and_earlier,$(ALL_VERSIONS),$(2)))
-
- $(if $(filter $(allowed_versions_),$(1)),,
- $(error MIN_PLATFORM_VERSION '$(1)' must be before MAX_PLATFORM_VERSION '$(2)'))
-
- $(eval allowed_versions_ := $(1) \
- $(filter-out $(call find_and_earlier,$(allowed_versions_),$(1)),$(allowed_versions_)))
-
- $(if $(filter $(allowed_versions_),$(3)),,
- $(error RELEASE_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
-
- $(allowed_versions_))
-endef
-
-#$(warning $(call allowed-platform-versions,OPR1,PPR1,OPR1))
-#$(warning $(call allowed-platform-versions,OPM1,PPR1,OPR1))
-
-# Set up version information.
-include $(BUILD_SYSTEM)/version_defaults.mk
-
-ENABLED_VERSIONS := $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+ENABLED_VERSIONS := "OPR1 OPD1 OPD2 OPM1 OPM2 PPR1 PPD1 PPD2 PPM1 PPM2 QPR1 QP1A QP1B QP2A QP2B QD1A QD1B QD2A QD2B QQ1A QQ1B QQ2A QQ2B QQ3A QQ3B RP1A RP1B RP2A RP2B RD1A RD1B RD2A RD2B RQ1A RQ1B RQ2A RQ2B RQ3A RQ3B SP1A SP1B SP2A SP2B SD1A SD1B SD2A SD2B SQ1A SQ1B SQ2A SQ2B SQ3A SQ3B TP1A TP1B TP2A TP2B TD1A TD1B TD2A TD2B TQ1A TQ1B TQ2A TQ2B TQ3A TQ3B UP1A UP1B UP2A UP2B UD1A UD1B UD2A UD2B UQ1A UQ1B UQ2A UQ2B UQ3A UQ3B"
$(foreach v,$(ENABLED_VERSIONS), \
$(eval IS_AT_LEAST_$(v) := true))
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 0e62f60..006e6ec 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -46,5 +46,4 @@
endif
endif
-LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
diff --git a/core/instrumentation_test_config_template.xml b/core/instrumentation_test_config_template.xml
index 379126c..9dfc001 100644
--- a/core/instrumentation_test_config_template.xml
+++ b/core/instrumentation_test_config_template.xml
@@ -24,7 +24,7 @@
</target_preparer>
<test class="com.android.tradefed.testtype.{TEST_TYPE}" >
- {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="{PACKAGE}" />
+ <option name="package" value="{PACKAGE}" />
<option name="runner" value="{RUNNER}" />
</test>
</configuration>
diff --git a/core/java.mk b/core/java.mk
index 842fcbf..5fbc916 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -83,8 +83,6 @@
$(full_classes_stubs_jar) \
$(java_source_list_file)
-LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
-
###########################################################
## AIDL: Compile .aidl files to .java
###########################################################
diff --git a/core/java_common.mk b/core/java_common.mk
index a5ed057..ec04718 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -194,7 +194,7 @@
######################################
## PRIVATE java vars
# LOCAL_SOURCE_FILES_ALL_GENERATED is set only if the module does not have static source files,
-# but generated source files in its LOCAL_INTERMEDIATE_SOURCE_DIR.
+# but generated source files.
# You have to set up the dependency in some other way.
need_compile_java := $(strip $(all_java_sources)$(LOCAL_SRCJARS)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
ifdef need_compile_java
@@ -238,8 +238,6 @@
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCE_LIST := $(java_source_list_file)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
-
# Quickly check class path vars.
disallowed_deps := $(foreach sdk,$(TARGET_AVAILABLE_SDK_VERSIONS),$(call resolve-prebuilt-sdk-module,$(sdk)))
disallowed_deps += $(foreach sdk,$(TARGET_AVAILABLE_SDK_VERSIONS),\
@@ -492,20 +490,6 @@
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_JAVA_HEADER_LIBRARIES := $(full_java_header_libs)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SHARED_JAVA_HEADER_LIBRARIES := $(full_shared_java_header_libs)
-ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR := \
- $(ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR) $(LOCAL_INTERMEDIATE_SOURCE_DIR)
-
-
-##########################################################
-# Copy NOTICE files of transitive static dependencies
-# Don't do this in mm, since many of the targets won't exist.
-installed_static_library_notice_file_targets := \
- $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-JAVA_LIBRARIES-$(lib))
-
-$(notice_target): | $(installed_static_library_notice_file_targets)
-$(LOCAL_INSTALLED_MODULE): | $(notice_target)
-
###########################################################
# Verify that all libraries are safe to use
###########################################################
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index be733ff..46393ac 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -115,11 +115,6 @@
ifneq ($(my_src_aar),)
# This is .aar file, archive of classes.jar and Android resources.
-# run Jetifier if needed
-LOCAL_JETIFIER_INPUT_FILE := $(my_src_aar)
-include $(BUILD_SYSTEM)/jetifier.mk
-my_src_aar := $(LOCAL_JETIFIER_OUTPUT_FILE)
-
my_src_jar := $(intermediates.COMMON)/aar/classes.jar
my_src_proguard_options := $(intermediates.COMMON)/aar/proguard.txt
my_src_android_manifest := $(intermediates.COMMON)/aar/AndroidManifest.xml
@@ -137,13 +132,6 @@
$(eval $(call copy-one-file,$(my_src_android_manifest),$(my_prebuilt_android_manifest)))
$(call add-dependency,$(LOCAL_BUILT_MODULE),$(my_prebuilt_android_manifest))
-else
-
-# run Jetifier if needed
-LOCAL_JETIFIER_INPUT_FILE := $(my_src_jar)
-include $(BUILD_SYSTEM)/jetifier.mk
-my_src_jar := $(LOCAL_JETIFIER_OUTPUT_FILE)
-
endif
$(common_classes_jar) : $(my_src_jar)
diff --git a/core/jetifier.mk b/core/jetifier.mk
deleted file mode 100644
index fff4230..0000000
--- a/core/jetifier.mk
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This file sets up the running of Jetifier
-
-# now add the rule to run jetifier
-ifeq ($(strip $(LOCAL_JETIFIER_ENABLED)),true)
- my_jetifier_input_path := $(LOCAL_JETIFIER_INPUT_FILE)
- my_files := $(intermediates.COMMON)/jetifier
- my_jetifier_output_path := $(my_files)/jetified-$(notdir $(my_jetifier_input_path))
-
-$(my_jetifier_output_path) : $(my_jetifier_input_path) $(JETIFIER)
- rm -rf $@
- $(JETIFIER) -l error -o $@ -i $<
-
- LOCAL_JETIFIER_OUTPUT_FILE := $(my_jetifier_output_path)
- LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_JETIFIER_OUTPUT_FILE)
-else
- LOCAL_JETIFIER_OUTPUT_FILE := $(LOCAL_JETIFIER_INPUT_FILE)
-endif
-
diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk
new file mode 100644
index 0000000..e45f7ef
--- /dev/null
+++ b/core/layoutlib_data.mk
@@ -0,0 +1,153 @@
+# Data files for layoutlib
+
+FONT_TEMP := $(call intermediates-dir-for,PACKAGING,fonts,HOST,COMMON)
+
+# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
+font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
+font_config := $(addprefix $(FONT_TEMP)/, $(notdir $(font_config)))
+
+$(font_config): $(FONT_TEMP)/%.xml: \
+ frameworks/base/data/fonts/%.xml
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of fonts on the device that we want to ship. This is all .ttf, .ttc and .otf fonts.
+fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
+fonts_device := $(addprefix $(FONT_TEMP)/, $(notdir $(fonts_device)))
+
+# TODO: If the font file is a symlink, reuse the font renamed from the symlink
+# target.
+$(fonts_device): $(FONT_TEMP)/%: $(TARGET_OUT)/fonts/%
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+KEYBOARD_TEMP := $(call intermediates-dir-for,PACKAGING,keyboards,HOST,COMMON)
+
+# The key character map files needed for supporting KeyEvent
+keyboards := $(sort $(wildcard frameworks/base/data/keyboards/*.kcm))
+keyboards := $(addprefix $(KEYBOARD_TEMP)/, $(notdir $(keyboards)))
+
+$(keyboards): $(KEYBOARD_TEMP)/%.kcm: frameworks/base/data/keyboards/%.kcm
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of all data files - font files, font configuration files, key character map files
+LAYOUTLIB_FILES := $(fonts_device) $(font_config) $(keyboards)
+
+.PHONY: layoutlib layoutlib-tests
+layoutlib layoutlib-tests: $(LAYOUTLIB_FILES)
+
+$(call dist-for-goals, layoutlib, $(foreach m,$(fonts_device), $(m):layoutlib_native/fonts/$(notdir $(m))))
+$(call dist-for-goals, layoutlib, $(foreach m,$(font_config), $(m):layoutlib_native/fonts/$(notdir $(m))))
+$(call dist-for-goals, layoutlib, $(foreach m,$(keyboards), $(m):layoutlib_native/keyboards/$(notdir $(m))))
+
+FONT_TEMP :=
+font_config :=
+fonts_device :=
+FONT_FILES :=
+
+# The following build process of build.prop, layoutlib-res.zip is moved here from release_layoutlib.sh
+# so the SBOM of all platform neutral artifacts and Linux/Windows artifacts of layoutlib can be built in Make/Soong.
+# See go/layoutlib-sbom.
+
+# build.prop shipped with layoutlib
+LAYOUTLIB_BUILD_PROP := $(call intermediates-dir-for,PACKAGING,layoutlib-build-prop,HOST,COMMON)
+$(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop: $(INSTALLED_SDK_BUILD_PROP_TARGET)
+ rm -rf $@
+ cp $< $@
+ # Remove all the uncommon build properties
+ sed -i '/^ro\.\(build\|product\|config\|system\)/!d' $@
+ # Mark the build as layoutlib. This can be read at runtime by apps
+ sed -i 's|ro.product.brand=generic|ro.product.brand=studio|' $@
+ sed -i 's|ro.product.device=generic|ro.product.device=layoutlib|' $@
+
+$(call dist-for-goals,layoutlib,$(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop:layoutlib_native/build.prop)
+
+# Resource files from frameworks/base/core/res/res
+LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON)
+LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort)
+$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES)
+ rm -rf $@
+ echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt
+ $(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip
+ rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip
+ rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled
+ printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml
+ $(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml
+ rm -rf $(LAYOUTLIB_RES)/compiled_apk && unzip -q -d $(LAYOUTLIB_RES)/compiled_apk $(LAYOUTLIB_RES)/compiled.apk
+ for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done
+ for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done
+ cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data
+ $(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@
+
+$(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip)
+
+# SBOM of layoutlib artifacts
+LAYOUTLIB_SBOM := $(call intermediates-dir-for,PACKAGING,layoutlib-sbom,HOST)
+_layoutlib_font_config_files := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
+_layoutlib_fonts_files := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
+_layoutlib_keyboard_files := $(sort $(wildcard frameworks/base/data/keyboards/*.kcm))
+
+# Find out files disted with layoutlib in Soong.
+### Filter out static libraries for Windows and files already handled in make.
+_layoutlib_filter_out_disted := $(addprefix layoutlib_native/,fonts/% keyboards/% build.prop res.zip windows/%.a)
+_layoutlib_files_disted_by_soong := \
+ $(strip \
+ $(foreach p,$(_all_dist_src_dst_pairs), \
+ $(if $(filter-out $(_layoutlib_filter_out_disted),$(filter layoutlib_native/% layoutlib.jar,$(call word-colon,2,$p))),$p)))
+
+$(LAYOUTLIB_SBOM)/sbom-metadata.csv:
+ rm -rf $@
+ echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $@
+ echo build.prop,,,,,,Y,$(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop,,, >> $@
+
+ $(foreach f,$(_layoutlib_font_config_files),\
+ echo data/fonts/$(notdir $f),frameworks/base/data/fonts,prebuilt_etc,,,,,$f,,, >> $@; \
+ )
+
+ $(foreach f,$(_layoutlib_fonts_files), \
+ $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+ $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+ $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+ echo data/fonts/$(notdir $f),$(_module_path),$(_soong_module_type),,,,,$f,,, >> $@; \
+ )
+
+ $(foreach f,$(_layoutlib_keyboard_files), \
+ echo data/keyboards/$(notdir $f),frameworks/base/data/keyboards,prebuilt_etc,,,,,$f,,, >> $@; \
+ )
+
+ $(foreach f,$(_layoutlib_files_disted_by_soong), \
+ $(eval _prebuilt_module_file := $(call word-colon,1,$f)) \
+ $(eval _dist_file := $(call word-colon,2,$f)) \
+ $(eval _dist_file := $(patsubst data/windows/%,data/win/lib64/%,$(patsubst layoutlib_native/%,data/%,$(_dist_file)))) \
+ $(eval _dist_file := $(subst layoutlib.jar,data/layoutlib.jar,$(_dist_file))) \
+ $(eval _module_name := $(strip $(foreach m,$(ALL_MODULES),$(if $(filter $(_prebuilt_module_file),$(ALL_MODULES.$m.CHECKED)),$m)))) \
+ $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+ $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+ echo $(patsubst layoutlib_native/%,%,$(_dist_file)),$(_module_path),$(_soong_module_type),,,,,$(_prebuilt_module_file),,, >> $@; \
+ )
+
+ $(foreach f,$(LAYOUTLIB_RES_FILES), \
+ $(eval _path := $(subst frameworks/base/core/res,data,$f)) \
+ echo $(_path),,,,,,Y,$f,,, >> $@; \
+ )
+
+.PHONY: layoutlib-sbom
+layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json
+$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES)
+ rm -rf $@
+ $(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json
+
+$(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json)
+
+# Generate SBOM of framework_res.jar that is created in release_layoutlib.sh.
+# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include:
+# document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar.
+GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res
+.PHONY: layoutlib-framework_res-sbom
+layoutlib-framework_res-sbom: $(LAYOUTLIB_SBOM)/framework_res.jar.spdx.json
+$(LAYOUTLIB_SBOM)/framework_res.jar.spdx.json: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json $(GEN_SBOM_FRAMEWORK_RES)
+ rm -rf $@
+ $(GEN_SBOM_FRAMEWORK_RES) --output_file $(LAYOUTLIB_SBOM)/framework_res.jar.spdx.json --layoutlib_sbom $(LAYOUTLIB_SBOM)/layoutlib.spdx.json
+
+$(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/framework_res.jar.spdx.json:layoutlib_native/sbom/framework_res.jar.spdx.json)
\ No newline at end of file
diff --git a/core/layoutlib_fonts.mk b/core/layoutlib_fonts.mk
deleted file mode 100644
index d2a814f..0000000
--- a/core/layoutlib_fonts.mk
+++ /dev/null
@@ -1,35 +0,0 @@
-# Fonts for layoutlib
-
-FONT_TEMP := $(call intermediates-dir-for,PACKAGING,fonts,HOST,COMMON)
-
-# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
-font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-font_config := $(addprefix $(FONT_TEMP)/, $(notdir $(font_config)))
-
-$(font_config): $(FONT_TEMP)/%.xml: \
- frameworks/base/data/fonts/%.xml
- $(hide) mkdir -p $(dir $@)
- $(hide) cp -vf $< $@
-
-# List of fonts on the device that we want to ship. This is all .ttf, .ttc and .otf fonts.
-fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
-fonts_device := $(addprefix $(FONT_TEMP)/, $(notdir $(fonts_device)))
-
-# TODO: If the font file is a symlink, reuse the font renamed from the symlink
-# target.
-$(fonts_device): $(FONT_TEMP)/%: $(TARGET_OUT)/fonts/%
- $(hide) mkdir -p $(dir $@)
- $(hide) cp -vf $< $@
-
-# List of all dependencies - all fonts and configuration files.
-FONT_FILES := $(fonts_device) $(font_config)
-
-.PHONY: layoutlib layoutlib-tests
-layoutlib layoutlib-tests: $(FONT_FILES)
-
-$(call dist-for-goals, layoutlib, $(foreach m,$(FONT_FILES), $(m):layoutlib_native/fonts/$(notdir $(m))))
-
-FONT_TEMP :=
-font_config :=
-fonts_device :=
-FONT_FILES :=
diff --git a/core/main.mk b/core/main.mk
index 7c25862..3f5f766 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -335,15 +335,14 @@
ro.build.ab_update=$(AB_OTA_UPDATER)
endif
-# Set ro.product.vndk.version to know the VNDK version required by product
-# modules. It uses the version in PRODUCT_PRODUCT_VNDK_VERSION. If the value
-# is "current", use PLATFORM_VNDK_VERSION.
-ifdef PRODUCT_PRODUCT_VNDK_VERSION
-ifeq ($(KEEP_VNDK),true)
-ifeq ($(PRODUCT_PRODUCT_VNDK_VERSION),current)
+# Set ro.product.vndk.version to PLATFORM_VNDK_VERSION only if
+# KEEP_VNDK is true, PRODUCT_PRODUCT_VNDK_VERSION is current and
+# PLATFORM_VNDK_VERSION is less than or equal to 35.
+# ro.product.vndk.version must be removed for the other future builds.
+ifeq ($(KEEP_VNDK)|$(PRODUCT_PRODUCT_VNDK_VERSION),true|current)
+ifeq ($(call math_is_number,$(PLATFORM_VNDK_VERSION)),true)
+ifeq ($(call math_lt_or_eq,$(PLATFORM_VNDK_VERSION),35),true)
ADDITIONAL_PRODUCT_PROPERTIES += ro.product.vndk.version=$(PLATFORM_VNDK_VERSION)
-else
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.vndk.version=$(PRODUCT_PRODUCT_VNDK_VERSION)
endif
endif
endif
@@ -563,6 +562,7 @@
# sources or dependencies for these tools may be missing from the tree.
ifeq (,$(TARGET_BUILD_UNBUNDLED_IMAGE))
droid_targets : blueprint_tools
+checkbuild: blueprint_tests
endif
endif # dont_bother
@@ -1411,28 +1411,7 @@
# var to prevent Make from trying to make a sense of it.
_unused := $(call copy-many-files, $(sort $(ALL_COMPATIBILITY_DIST_FILES)))
-# Don't include any GNU General Public License shared objects or static
-# libraries in SDK images. GPL executables (not static/dynamic libraries)
-# are okay if they don't link against any closed source libraries (directly
-# or indirectly)
-
-# It's ok (and necessary) to build the host tools, but nothing that's
-# going to be installed on the target (including static libraries).
-
ifdef is_sdk_build
- target_gnu_MODULES := \
- $(filter \
- $(TARGET_OUT_INTERMEDIATES)/% \
- $(TARGET_OUT)/% \
- $(TARGET_OUT_DATA)/%, \
- $(sort $(call get-tagged-modules,gnu)))
- target_gnu_MODULES := $(filter-out $(TARGET_OUT_EXECUTABLES)/%,$(target_gnu_MODULES))
- target_gnu_MODULES := $(filter-out %/libopenjdkjvmti.so,$(target_gnu_MODULES))
- target_gnu_MODULES := $(filter-out %/libopenjdkjvmtid.so,$(target_gnu_MODULES))
- $(info Removing from sdk:)$(foreach d,$(target_gnu_MODULES),$(info : $(d)))
- modules_to_install := \
- $(filter-out $(target_gnu_MODULES),$(modules_to_install))
-
# Ensure every module listed in PRODUCT_PACKAGES* gets something installed
# TODO: Should we do this for all builds and not just the sdk?
dangling_modules :=
@@ -2020,13 +1999,6 @@
.PHONY: check-elf-files
check-elf-files:
-#xxx scrape this from ALL_MODULE_NAME_TAGS
-.PHONY: modules
-modules:
- @echo "Available sub-modules:"
- @echo "$(call module-names-for-tag-list,$(ALL_MODULE_TAGS))" | \
- tr -s ' ' '\n' | sort -u
-
.PHONY: dump-files
dump-files:
@echo "Target files for $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) ($(INTERNAL_PRODUCT)):"
diff --git a/core/misc_prebuilt_internal.mk b/core/misc_prebuilt_internal.mk
index 921ea52..a562207 100644
--- a/core/misc_prebuilt_internal.mk
+++ b/core/misc_prebuilt_internal.mk
@@ -27,9 +27,6 @@
ifneq ($(filter init%rc,$(notdir $(LOCAL_INSTALLED_MODULE)))$(filter %/etc/init,$(dir $(LOCAL_INSTALLED_MODULE))),)
$(eval $(call copy-init-script-file-checked,$(my_prebuilt_src_file),$(LOCAL_BUILT_MODULE)))
-else ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
-$(LOCAL_BUILT_MODULE) : $(my_prebuilt_src_file)
- $(transform-prebuilt-to-target-strip-comments)
else
$(LOCAL_BUILT_MODULE) : $(my_prebuilt_src_file)
$(transform-prebuilt-to-target)
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 2b5ceee..d4b7c6d 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -14,7 +14,6 @@
$(PARSE_TIME_MAKE_GOALS) \
$(dont_bother_goals) \
all \
- ECLIPSE-% \
brillo_tests \
btnod \
build-art% \
diff --git a/core/rbe.mk b/core/rbe.mk
index 6754b0a..001a549 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -64,7 +64,7 @@
d8_exec_strategy := remote_local_fallback
endif
- platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62
+ platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:953fed4a6b2501256a0d17f055dc17884ff71b024e50ade773e0b348a6c303e6
cxx_platform := $(platform),Pool=$(cxx_pool)
java_r8_d8_platform := $(platform),Pool=$(java_pool)
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index 05b4b6b..143931b 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -251,30 +251,6 @@
$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)
-# We don't care about installed rlib/static libraries, since the libraries have
-# already been linked into the module at that point. We do, however, care
-# about the NOTICE files for any rlib/static libraries that we use.
-# (see notice_files.mk)
-#
-# Filter out some NDK libraries that are not being exported.
-my_static_libraries := \
- $(filter-out ndk_libc++_static ndk_libc++abi ndk_libandroid_support ndk_libunwind \
- ndk_libc++_static.native_bridge ndk_libc++abi.native_bridge \
- ndk_libandroid_support.native_bridge ndk_libunwind.native_bridge, \
- $(LOCAL_STATIC_LIBRARIES))
-installed_static_library_notice_file_targets := \
- $(foreach lib,$(my_static_libraries) $(LOCAL_WHOLE_STATIC_LIBRARIES), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-STATIC_LIBRARIES-$(lib))
-installed_static_library_notice_file_targets += \
- $(foreach lib,$(LOCAL_RLIB_LIBRARIES), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-RLIB_LIBRARIES-$(lib))
-installed_static_library_notice_file_targets += \
- $(foreach lib,$(LOCAL_PROC_MACRO_LIBRARIES), \
- NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-PROC_MACRO_LIBRARIES-$(lib))
-
-$(notice_target): | $(installed_static_library_notice_file_targets)
-$(LOCAL_INSTALLED_MODULE): | $(notice_target)
-
# Reinstall shared library dependencies of fuzz targets to /data/fuzz/ (for
# target) or /data/ (for host).
ifdef LOCAL_IS_FUZZ_TARGET
diff --git a/core/sysprop.mk b/core/sysprop.mk
index a37fd05..4e8e976 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -46,7 +46,6 @@
echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
- # Attestation specific properties for AOSP/GSI build running on device.
if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
fi; \
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 593b7b6..91cb2c9 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -37,7 +37,7 @@
cts_platform_release_path := cts/tests/tests/os/assets/platform_releases.txt
cts_platform_release_string := $(shell cat $(cts_platform_release_path))
- ifeq ($(RELEASE_PLATFORM_VERSION_CODENAME_REL),)
+ ifneq (REL,$(PLATFORM_VERSION_CODENAME))
ifeq (,$(findstring $(PLATFORM_VERSION),$(cts_platform_version_string)))
define error_msg
============================================================
diff --git a/core/tasks/ide.mk b/core/tasks/ide.mk
deleted file mode 100644
index a3aa0cd..0000000
--- a/core/tasks/ide.mk
+++ /dev/null
@@ -1,61 +0,0 @@
-#
-# Copyright (C) 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-define filter-ide-goals
-$(strip $(filter $(1)-%,$(MAKECMDGOALS)))
-endef
-
-define filter-ide-modules
-$(strip $(subst -,$(space),$(patsubst $(1)-%,%,$(2))))
-endef
-
-# eclipse
-eclipse_project_goals := $(call filter-ide-goals,ECLIPSE)
-ifdef eclipse_project_goals
- ifneq ($(words $(eclipse_project_goals)),1)
- $(error Only one ECLIPSE- goal may be specified: $(eclipse_project_goals))
- endif
- eclipse_project_modules := $(call filter-ide-modules,ECLIPSE,$(eclipse_project_goals))
-
- ifneq ($(filter lunch,$(eclipse_project_modules)),)
- eclipse_project_modules := $(filter-out lunch,$(eclipse_project_modules))
- installed_modules := $(foreach m,$(ALL_DEFAULT_INSTALLED_MODULES),\
- $(INSTALLABLE_FILES.$(m).MODULE))
- java_modules := $(foreach m,$(installed_modules),\
- $(if $(filter JAVA_LIBRARIES APPS,$(ALL_MODULES.$(m).CLASS)),$(m),))
- eclipse_project_modules := $(sort $(eclipse_project_modules) $(java_modules))
- endif
-
- source_paths := $(foreach m,$(eclipse_project_modules),$(ALL_MODULES.$(m).PATH)) \
- $(foreach m,$(eclipse_project_modules),$(ALL_MODULES.$(m).INTERMEDIATE_SOURCE_DIR))
- source_paths := $(sort $(source_paths))
-
-.classpath: PRIVATE_MODULES := $(eclipse_project_modules)
-.classpath: PRIVATE_DIRS := $(source_paths)
-
-# the mess below with ./src tries to guess whether the src
-$(eclipse_project_goals): .classpath
-.classpath: FORCE
- $(hide) echo Generating .classpath for eclipse
- $(hide) echo '<classpath>' > $@
- $(hide) for p in $(PRIVATE_DIRS) ; do \
- echo -n ' <classpathentry kind="src" path="' >> $@ ; \
- ( if [ -d $$p/src ] ; then echo -n $$p/src ; else echo -n $$p ; fi ) >> $@ ; \
- echo '"/>' >> $@ ; \
- done
- $(hide) echo '</classpath>' >> $@
-endif
-
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
deleted file mode 100644
index 379369e..0000000
--- a/core/version_defaults.mk
+++ /dev/null
@@ -1,111 +0,0 @@
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# Handle various build version information.
-#
-# Guarantees that the following are defined:
-# PLATFORM_VERSION
-# PLATFORM_DISPLAY_VERSION
-# PLATFORM_SDK_VERSION
-# PLATFORM_VERSION_CODENAME
-# DEFAULT_APP_TARGET_SDK
-# BUILD_ID
-# BUILD_NUMBER
-# PLATFORM_SECURITY_PATCH
-# PLATFORM_VNDK_VERSION
-# PLATFORM_SYSTEMSDK_VERSIONS
-#
-
-# Look for an optional file containing overrides of the defaults,
-# but don't cry if we don't find it. We could just use -include, but
-# the build.prop target also wants INTERNAL_BUILD_ID_MAKEFILE to be set
-# if the file exists.
-#
-INTERNAL_BUILD_ID_MAKEFILE := $(wildcard $(BUILD_SYSTEM)/build_id.mk)
-ifdef INTERNAL_BUILD_ID_MAKEFILE
- include $(INTERNAL_BUILD_ID_MAKEFILE)
-endif
-
-# Set release configuration. The default resides in build/release/build_flags.mk.
-MIN_PLATFORM_VERSION := UP1A
-MAX_PLATFORM_VERSION := VP1A
-
-# The last stable version name of the platform that was released. During
-# development, this stays at that previous version, while the codename indicates
-# further work based on the previous version.
-PLATFORM_VERSION_LAST_STABLE := 14
-.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
-
-# These are the current development codenames, if the build is not a final
-# release build. If this is a final release build, it is simply "REL".
-# Note that this may be overridden by RELEASE_VERSION_CODENAME_REL in
-# version_util.mk.
-PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
-PLATFORM_VERSION_CODENAME.VP1A := VanillaIceCream
-
-# This is the user-visible version. In a final release build it should
-# be empty to use PLATFORM_VERSION as the user-visible version. For
-# a preview release it can be set to a user-friendly value like `12 Preview 1`
-PLATFORM_DISPLAY_VERSION :=
-
-ifndef PLATFORM_SDK_VERSION
- # This is the canonical definition of the SDK version, which defines
- # the set of APIs and functionality available in the platform. It
- # is a single integer that increases monotonically as updates to
- # the SDK are released. It should only be incremented when the APIs for
- # the new release are frozen (so that developers don't write apps against
- # intermediate builds). During development, this number remains at the
- # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
- # the code-name of the new development work.
-
- # When you increment the PLATFORM_SDK_VERSION please ensure you also
- # clear out the following text file of all older PLATFORM_VERSION's:
- # cts/tests/tests/os/assets/platform_versions.txt
- PLATFORM_SDK_VERSION := 34
-endif
-.KATI_READONLY := PLATFORM_SDK_VERSION
-
-# This is the sdk extension version of this tree.
-PLATFORM_SDK_EXTENSION_VERSION := 7
-.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
-
-# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
-.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
-
-# This are all known codenames.
-PLATFORM_VERSION_KNOWN_CODENAMES := \
-Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
-Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
-JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
-Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
-
-# Convert from space separated list to comma separated
-PLATFORM_VERSION_KNOWN_CODENAMES := \
- $(call normalize-comma-list,$(PLATFORM_VERSION_KNOWN_CODENAMES))
-.KATI_READONLY := PLATFORM_VERSION_KNOWN_CODENAMES
-
-ifndef PLATFORM_SECURITY_PATCH
- # Used to indicate the security patch that has been applied to the device.
- # It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
- # It must be of the form "YYYY-MM-DD" on production devices.
- # It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
- # If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2023-10-05
-endif
-
-include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/version_util.mk b/core/version_util.mk
index 0cc3442..dfa0277 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -14,119 +14,99 @@
# limitations under the License.
#
-ALLOWED_VERSIONS := $(call allowed-platform-versions,\
- $(MIN_PLATFORM_VERSION),\
- $(MAX_PLATFORM_VERSION),\
- $(RELEASE_PLATFORM_VERSION))
+
+#
+# Handle various build version information.
+#
+# Guarantees that the following are defined:
+# PLATFORM_VERSION
+# PLATFORM_DISPLAY_VERSION
+# PLATFORM_SDK_VERSION
+# PLATFORM_SDK_EXTENSION_VERSION
+# PLATFORM_VERSION_CODENAME
+# DEFAULT_APP_TARGET_SDK
+# BUILD_ID
+# BUILD_NUMBER
+# PLATFORM_SECURITY_PATCH
+# PLATFORM_VNDK_VERSION
+# PLATFORM_SYSTEMSDK_VERSIONS
+# PLATFORM_VERSION_LAST_STABLE
+#
+
+# Look for an optional file containing overrides of the defaults,
+# but don't cry if we don't find it. We could just use -include, but
+# the build.prop target also wants INTERNAL_BUILD_ID_MAKEFILE to be set
+# if the file exists.
+#
+INTERNAL_BUILD_ID_MAKEFILE := $(wildcard $(BUILD_SYSTEM)/build_id.mk)
+ifdef INTERNAL_BUILD_ID_MAKEFILE
+ include $(INTERNAL_BUILD_ID_MAKEFILE)
+endif
ifdef TARGET_PLATFORM_VERSION
$(error Do not set TARGET_PLATFORM_VERSION directly. Use RELEASE_PLATFORM_VERSION. value: $(TARGET_PLATFORM_VERSION))
endif
-
TARGET_PLATFORM_VERSION := $(RELEASE_PLATFORM_VERSION)
-
-ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
- $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
- $(error $(ALLOWED_VERSIONS))
-endif
-ALLOWED_VERSIONS :=
-MIN_PLATFORM_VERSION :=
-MAX_PLATFORM_VERSION :=
-
.KATI_READONLY := TARGET_PLATFORM_VERSION
-# Default versions for each TARGET_PLATFORM_VERSION
-# TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
-# on this
-
-# This is the canonical definition of the platform version,
-# which is the version that we reveal to the end user.
-# Update this value when the platform version changes (rather
-# than overriding it somewhere else). Can be an arbitrary string.
-
-# When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
-# please add that PLATFORM_VERSION as well as clean up obsolete PLATFORM_VERSION's
-# in the following text file:
-# cts/tests/tests/os/assets/platform_versions.txt
-
-# Note that there should be one PLATFORM_VERSION and PLATFORM_VERSION_CODENAME
-# entry for each unreleased API level, regardless of
-# MIN_PLATFORM_VERSION/MAX_PLATFORM_VERSION. PLATFORM_VERSION is used to
-# generate the range of allowed SDK versions, so it must have an entry for every
-# unreleased API level targetable by this branch, not just those that are valid
-# lunch targets for this branch.
-
-# Release config flag to override the current version to REL. Note that the
-# codename can also be locked to REL by setting it in versino_defaults.mk.
-ifneq ($(RELEASE_PLATFORM_VERSION_CODENAME_REL),)
- PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION) := REL
+ifdef PLATFORM_SECURITY_PATCH
+ $(error Do not set PLATFORM_SECURITY_PATCH directly. Use RELEASE_PLATFORM_SECURITY_PATCH. value: $(PLATFORM_SECURITY_PATCH))
endif
+PLATFORM_SECURITY_PATCH := $(RELEASE_PLATFORM_SECURITY_PATCH)
+.KATI_READONLY := PLATFORM_SECURITY_PATCH
-PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
-ifndef PLATFORM_VERSION_CODENAME
- # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
- PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
+ifdef PLATFORM_SDK_VERSION
+ $(error Do not set PLATFORM_SDK_VERSION directly. Use RELEASE_PLATFORM_SDK_VERSION. value: $(PLATFORM_SDK_VERSION))
endif
+PLATFORM_SDK_VERSION := $(RELEASE_PLATFORM_SDK_VERSION)
+.KATI_READONLY := PLATFORM_SDK_VERSION
-# This is all of the *active* development codenames.
-# This confusing name is needed because
-# all_codenames has been baked into build.prop for ages.
-#
-# Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
-# list of additional codenames after PLATFORM_VERSION_CODENAME.
-PLATFORM_VERSION_ALL_CODENAMES :=
-
-# Build a list of all active code names. Avoid duplicates, and stop when we
-# reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
-# that is not included in our build).
-_versions_in_target := \
- $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
-$(foreach version,$(_versions_in_target),\
- $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
- $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
- $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
-
-# And the list of actually all the codenames that are in preview. The
-# ALL_CODENAMES variable is sort of a lie for historical reasons and only
-# includes codenames up to and including the currently active codename, whereas
-# this variable also includes future codenames. For example, while AOSP is still
-# merging into U, but V development has started, ALL_CODENAMES will only be U,
-# but ALL_PREVIEW_CODENAMES will be U and V.
-#
-# REL is filtered out of the list. The codename of the current release is
-# replaced by "REL" when the build is configured as a release rather than a
-# preview. For example, PLATFORM_VERSION_CODENAME.UpsideDownCake will be "REL"
-# rather than UpsideDownCake in a -next target when the upcoming release is
-# UpsideDownCake. "REL" is a codename (and android.os.Build relies on this:
-# https://cs.android.com/android/platform/superproject/main/+/main:frameworks/base/core/java/android/os/Build.java;l=484-487;drc=316e3d16c9f34212f3beace7695289651d15a071),
-# so it should be in PLATFORM_VERSION_ALL_CODENAMES, but it definitely is not a
-# preview codename.
-PLATFORM_VERSION_ALL_PREVIEW_CODENAMES :=
-$(foreach version,$(ALL_VERSIONS),\
- $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
- $(if $(filter REL,$(_codename)),,\
- $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)),,\
- $(eval PLATFORM_VERSION_ALL_PREVIEW_CODENAMES += $(_codename)))))
-
-# And convert from space separated to comma separated.
-PLATFORM_VERSION_ALL_CODENAMES := \
- $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
-PLATFORM_VERSION_ALL_PREVIEW_CODENAMES := \
- $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)))
-
-.KATI_READONLY := \
- PLATFORM_VERSION_CODENAME \
- PLATFORM_VERSION_ALL_CODENAMES \
- PLATFORM_VERSION_ALL_PREVIEW_CODENAMES \
-
-ifneq (REL,$(PLATFORM_VERSION_CODENAME))
- codenames := \
- $(subst $(comma),$(space),$(strip $(PLATFORM_VERSION_KNOWN_CODENAMES)))
- ifeq ($(filter $(PLATFORM_VERSION_CODENAME),$(codenames)),)
- $(error '$(PLATFORM_VERSION_CODENAME)' is not in '$(codenames)'. \
- Add PLATFORM_VERSION_CODENAME to PLATFORM_VERSION_KNOWN_CODENAMES)
- endif
+ifdef PLATFORM_SDK_EXTENSION_VERSION
+ $(error Do not set PLATFORM_SDK_EXTENSION_VERSION directly. Use RELEASE_PLATFORM_SDK_EXTENSION_VERSION. value: $(PLATFORM_SDK_EXTENSION_VERSION))
endif
+PLATFORM_SDK_EXTENSION_VERSION := $(RELEASE_PLATFORM_SDK_EXTENSION_VERSION)
+.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
+
+# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
+PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
+.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
+
+ifdef PLATFORM_VERSION_CODENAME
+ $(error Do not set PLATFORM_VERSION_CODENAME directly. Use RELEASE_PLATFORM_VERSION. value: $(PLATFORM_VERSION_CODENAME))
+endif
+PLATFORM_VERSION_CODENAME := $(RELEASE_PLATFORM_VERSION_CODENAME)
+.KATI_READONLY := PLATFORM_VERSION_CODENAME
+
+ifdef PLATFORM_VERSION_ALL_CODENAMES
+ $(error Do not set PLATFORM_VERSION_ALL_CODENAMES directly. Use RELEASE_PLATFORM_VERSION_ALL_CODENAMES. value: $(PLATFORM_VERSION_ALL_CODENAMES))
+endif
+PLATFORM_VERSION_ALL_CODENAMES := $(RELEASE_PLATFORM_VERSION_ALL_CODENAMES)
+.KATI_READONLY := PLATFORM_VERSION_ALL_CODENAMES
+
+ifdef PLATFORM_VERSION_ALL_PREVIEW_CODENAMES
+ $(error Do not set PLATFORM_VERSION_ALL_PREVIEW_CODENAMES directly. Use RELEASE_PLATFORM_VERSION_ALL_PREVIEW_CODENAMES. value: $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES))
+endif
+PLATFORM_VERSION_ALL_PREVIEW_CODENAMES := $(RELEASE_PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)
+.KATI_READONLY := PLATFORM_VERSION_ALL_PREVIEW_CODENAMES
+
+ifdef PLATFORM_VERSION_LAST_STABLE
+ $(error Do not set PLATFORM_VERSION_LAST_STABLE directly. Use RELEASE_PLATFORM_VERSION_LAST_STABLE. value: $(PLATFORM_VERSION_CODENAME))
+endif
+PLATFORM_VERSION_LAST_STABLE := $(RELEASE_PLATFORM_VERSION_LAST_STABLE)
+.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
+
+
+# This are all known codenames. Should this move into the release config?
+PLATFORM_VERSION_KNOWN_CODENAMES := \
+Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
+Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
+JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
+Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
+
+# Convert from space separated list to comma separated
+PLATFORM_VERSION_KNOWN_CODENAMES := \
+ $(call normalize-comma-list,$(PLATFORM_VERSION_KNOWN_CODENAMES))
+.KATI_READONLY := PLATFORM_VERSION_KNOWN_CODENAMES
ifndef PLATFORM_VERSION
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
diff --git a/envsetup.sh b/envsetup.sh
index af6695f..3b76980 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -56,7 +56,7 @@
Run "m help" for help with the build system itself.
Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
-- lunch: lunch <product_name>-<build_variant>
+- lunch: lunch <product_name>-<release_type>-<build_variant>
Selects <product_name> as the product to build, and <build_variant> as the variant to
build, and stores those selections in the environment to be read by subsequent
invocations of 'm' etc.
@@ -205,6 +205,7 @@
return
fi
TARGET_PRODUCT=$1 \
+ TARGET_RELEASE= \
TARGET_BUILD_VARIANT= \
TARGET_BUILD_TYPE= \
TARGET_BUILD_APPS= \
@@ -486,7 +487,7 @@
function multitree_lunch_help()
{
- echo "usage: lunch PRODUCT-VARIANT" 1>&2
+ echo "usage: lunch PRODUCT-RELEASE-VARIANT" 1>&2
echo " Set up android build environment based on a product short name and variant" 1>&2
echo 1>&2
echo "lunch COMBO_FILE VARIANT" 1>&2
@@ -728,7 +729,7 @@
{
local uname=$(uname)
local choices
- choices=$(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_BUILD_VARIANT= get_build_var COMMON_LUNCH_CHOICES 2>/dev/null)
+ choices=$(TARGET_BUILD_APPS= TARGET_PRODUCT= TARGET_RELEASE= TARGET_BUILD_VARIANT= get_build_var COMMON_LUNCH_CHOICES 2>/dev/null)
local ret=$?
echo
@@ -774,7 +775,7 @@
answer=$1
else
print_lunch_menu
- echo "Which would you like? [aosp_arm-eng]"
+ echo "Which would you like? [aosp_arm-trunk_staging-eng]"
echo -n "Pick from common choices above (e.g. 13) or specify your own (e.g. aosp_barbet-eng): "
read answer
used_lunch_menu=1
@@ -784,7 +785,7 @@
if [ -z "$answer" ]
then
- selection=aosp_arm-eng
+ selection=aosp_arm-trunk_staging-eng
elif (echo -n $answer | grep -q -e "^[0-9][0-9]*$")
then
local choices=($(TARGET_BUILD_APPS= get_build_var COMMON_LUNCH_CHOICES))
@@ -804,26 +805,16 @@
export TARGET_BUILD_APPS=
- # Support either <product>-<variant> or <product>-<release>-<variant>
- local product release_and_variant release variant
- product=${selection%%-*} # Trim everything after first dash
- release_and_variant=${selection#*-} # Trim everything up to first dash
- if [ "$release_and_variant" != "$selection" ]; then
- local first=${release_and_variant%%-*} # Trim everything after first dash
- if [ "$first" != "$release_and_variant" ]; then
- # There is a 2nd dash, split into release-variant
- release=$first # Everything up to the dash
- variant=${release_and_variant#*-} # Trim everything up to dash
- else
- # There is not a 2nd dash, default to variant as the second param
- variant=$first
- fi
- fi
+ # This must be <product>-<release>-<variant>
+ local product release variant
+ # Split string on the '-' character.
+ IFS="-" read -r product release variant <<< "$selection"
- if [ -z "$product" ]
+ if [[ -z "$product" ]] || [[ -z "$release" ]] || [[ -z "$variant" ]]
then
echo
echo "Invalid lunch combo: $selection"
+ echo "Valid combos must be of the form <product>-<release>-<variant>"
return 1
fi
@@ -841,11 +832,8 @@
fi
export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
- if [ -n "$release" ]; then
- export TARGET_RELEASE=$release
- else
- unset TARGET_RELEASE
- fi
+ export TARGET_RELEASE=$release
+ # Note this is the string "release", not the value of the variable.
export TARGET_BUILD_TYPE=release
if [ $used_lunch_menu -eq 1 ]; then
@@ -887,6 +875,8 @@
{
local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|arm64|x86_64)$' | xargs)"
+ # TODO(b/307975293): Expand tapas to take release arguments (and update hmm() usage).
+ local release="trunk_staging"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
local keys="$(echo $* | xargs -n 1 echo | \grep -E '^(devkeys)$' | xargs)"
@@ -902,6 +892,10 @@
echo "tapas: Error: Multiple build archs supplied: $arch"
return
fi
+ if [ $(echo $release | wc -w) -gt 1 ]; then
+ echo "tapas: Error: Multiple build releases supplied: $release"
+ return
+ fi
if [ $(echo $variant | wc -w) -gt 1 ]; then
echo "tapas: Error: Multiple build variants supplied: $variant"
return
@@ -936,6 +930,7 @@
fi
export TARGET_PRODUCT=$product
+ export TARGET_RELEASE=$release
export TARGET_BUILD_VARIANT=$variant
export TARGET_BUILD_DENSITY=$density
export TARGET_BUILD_TYPE=release
@@ -953,6 +948,8 @@
{
local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|riscv64|x86_64|arm64only|x86_64only)$' | xargs)"
+ # TODO: Expand banchan to take release arguments (and update hmm() usage).
+ local release="trunk_staging"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|(.*_)?(arm|x86|arm64|riscv64|x86_64))$' | xargs)"
@@ -967,6 +964,10 @@
echo "banchan: Error: Multiple build archs or products supplied: $products"
return
fi
+ if [ $(echo $release | wc -w) -gt 1 ]; then
+ echo "banchan: Error: Multiple build releases supplied: $release"
+ return
+ fi
if [ $(echo $variant | wc -w) -gt 1 ]; then
echo "banchan: Error: Multiple build variants supplied: $variant"
return
@@ -990,6 +991,7 @@
fi
export TARGET_PRODUCT=$product
+ export TARGET_RELEASE=$release
export TARGET_BUILD_VARIANT=$variant
export TARGET_BUILD_DENSITY=alldpi
export TARGET_BUILD_TYPE=release
@@ -1878,6 +1880,10 @@
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
return 1
fi
+
+ if [[ -z "${ANDROID_QUIET_BUILD:-}" && -n "${ANDROID_BUILD_BANNER}" ]]; then
+ echo "$ANDROID_BUILD_BANNER"
+ fi
)
function m()
@@ -2059,6 +2065,11 @@
(\cd "${T}" && build/make/tools/overrideflags.sh "$@")
}
+function aninja() {
+ local T="$(gettop)"
+ (\cd "${T}" && prebuilts/build-tools/linux-x86/bin/ninja -f out/combined-${TARGET_PRODUCT}.ninja "$@")
+}
+
validate_current_shell
set_global_paths
source_vendorsetup
diff --git a/rbesetup.sh b/rbesetup.sh
index 8386628..9e246ff 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -34,7 +34,7 @@
# for the build to be executed with RBE.
function use_rbe() {
local RBE_BINARIES_DIR="prebuilts/remoteexecution-client/latest"
- local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62"
+ local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:953fed4a6b2501256a0d17f055dc17884ff71b024e50ade773e0b348a6c303e6"
# Do not set an invocation-ID and let reproxy auto-generate one.
USE_RBE="true" \
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
deleted file mode 100644
index 6ed08f0..0000000
--- a/target/board/BoardConfigEmuCommon.mk
+++ /dev/null
@@ -1,74 +0,0 @@
-# BoardConfigEmuCommon.mk
-#
-# Common compile-time definitions for emulator
-#
-
-HAVE_HTC_AUDIO_DRIVER := true
-BOARD_USES_GENERIC_AUDIO := true
-TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
-
-# No Kernel
-TARGET_NO_KERNEL := true
-
-# no hardware camera
-USE_CAMERA_STUB := true
-
-NUM_FRAMEBUFFER_SURFACE_BUFFERS := 3
-
-# Build OpenGLES emulation guest and host libraries
-BUILD_EMULATOR_OPENGL := true
-BUILD_QEMU_IMAGES := true
-
-# Build and enable the OpenGL ES View renderer. When running on the emulator,
-# the GLES renderer disables itself if host GL acceleration isn't available.
-USE_OPENGL_RENDERER := true
-
-# Emulator doesn't support sparse image format.
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-
-# emulator is Non-A/B device
-AB_OTA_UPDATER := false
-
-# emulator needs super.img
-BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
-
-# 8G + 8M
-BOARD_SUPER_PARTITION_SIZE ?= 8598323200
-BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
-
-BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
- system \
- system_dlkm \
- system_ext \
- product \
- vendor
-
-TARGET_COPY_OUT_PRODUCT := product
-BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
-TARGET_COPY_OUT_SYSTEM_EXT := system_ext
-BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
-
-BOARD_USES_SYSTEM_DLKMIMAGE := true
-BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := erofs
-TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
-
-# 8G
-BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE ?= 8589934592
-
-#vendor boot
-BOARD_INCLUDE_DTB_IN_BOOTIMG := false
-BOARD_BOOT_HEADER_VERSION := 4
-BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
-BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE := 0x06000000
-BOARD_RAMDISK_USE_LZ4 := true
-
-# Enable chain partition for system.
-BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
-BOARD_AVB_SYSTEM_ALGORITHM := SHA256_RSA2048
-BOARD_AVB_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
-BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
-
-BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-BOARD_FLASH_BLOCK_SIZE := 512
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
diff --git a/target/board/emulator_arm64/BoardConfig.mk b/target/board/emulator_arm64/BoardConfig.mk
deleted file mode 100644
index c16e61b..0000000
--- a/target/board/emulator_arm64/BoardConfig.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# arm64 emulator specific definitions
-TARGET_ARCH := arm64
-TARGET_ARCH_VARIANT := armv8-a
-TARGET_CPU_VARIANT := generic
-TARGET_CPU_ABI := arm64-v8a
-
-ifneq ($(TARGET_BUILD_APPS)$(filter cts sdk,$(MAKECMDGOALS)),)
-# DO NOT USE
-# DO NOT USE
-#
-# This architecture / CPU variant must NOT be used for any 64 bit
-# platform builds. It is the lowest common denominator required
-# to build an unbundled application or cts for all supported 32 and 64 bit
-# platforms.
-#
-# If you're building a 64 bit platform (and not an application) the
-# ARM-v8 specification allows you to assume all the features available in an
-# armv7-a-neon CPU. You should set the following as 2nd arch/cpu variant:
-#
-# TARGET_2ND_ARCH_VARIANT := armv8-a
-# TARGET_2ND_CPU_VARIANT := generic
-#
-# DO NOT USE
-# DO NOT USE
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
-# DO NOT USE
-# DO NOT USE
-TARGET_2ND_CPU_VARIANT := generic
-# DO NOT USE
-# DO NOT USE
-else
-TARGET_2ND_ARCH_VARIANT := armv8-a
-TARGET_2ND_CPU_VARIANT := generic
-endif
-
-include build/make/target/board/BoardConfigGsiCommon.mk
-include build/make/target/board/BoardConfigEmuCommon.mk
-
-BOARD_BOOTIMAGE_PARTITION_SIZE := 0x02000000
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-
-# Wifi.
-BOARD_WLAN_DEVICE := emulator
-BOARD_HOSTAPD_DRIVER := NL80211
-BOARD_WPA_SUPPLICANT_DRIVER := NL80211
-BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
-BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
-WPA_SUPPLICANT_VERSION := VER_0_8_X
-WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
-WIFI_DRIVER_FW_PATH_STA := "/dev/null"
-WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
deleted file mode 100644
index d221e64..0000000
--- a/target/board/emulator_arm64/device.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
-
diff --git a/target/board/emulator_arm64/system_ext.prop b/target/board/emulator_arm64/system_ext.prop
deleted file mode 100644
index 2f8f803..0000000
--- a/target/board/emulator_arm64/system_ext.prop
+++ /dev/null
@@ -1,5 +0,0 @@
-#
-# system.prop for emulator arm64 sdk
-#
-
-rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/emulator_x86_64/BoardConfig.mk b/target/board/emulator_x86_64/BoardConfig.mk
deleted file mode 100755
index b9cbd8a..0000000
--- a/target/board/emulator_x86_64/BoardConfig.mk
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# x86_64 emulator specific definitions
-TARGET_CPU_ABI := x86_64
-TARGET_ARCH := x86_64
-TARGET_ARCH_VARIANT := x86_64
-
-TARGET_2ND_CPU_ABI := x86
-TARGET_2ND_ARCH := x86
-TARGET_2ND_ARCH_VARIANT := x86_64
-
-TARGET_PRELINK_MODULE := false
-include build/make/target/board/BoardConfigGsiCommon.mk
-include build/make/target/board/BoardConfigEmuCommon.mk
-
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
-
-# Wifi.
-BOARD_WLAN_DEVICE := emulator
-BOARD_HOSTAPD_DRIVER := NL80211
-BOARD_WPA_SUPPLICANT_DRIVER := NL80211
-BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
-BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
-WPA_SUPPLICANT_VERSION := VER_0_8_X
-WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
-WIFI_DRIVER_FW_PATH_STA := "/dev/null"
-WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/emulator_x86_64/device.mk b/target/board/emulator_x86_64/device.mk
deleted file mode 100755
index 8a9d8da..0000000
--- a/target/board/emulator_x86_64/device.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
-
-ifdef NET_ETH0_STARTONBOOT
- PRODUCT_VENDOR_PROPERTIES += net.eth0.startonboot=1
-endif
-
-# Ensure we package the BIOS files too.
-PRODUCT_HOST_PACKAGES += \
- bios.bin \
- vgabios-cirrus.bin \
diff --git a/target/board/emulator_x86_64/system_ext.prop b/target/board/emulator_x86_64/system_ext.prop
deleted file mode 100644
index ed9d173..0000000
--- a/target/board/emulator_x86_64/system_ext.prop
+++ /dev/null
@@ -1,5 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/emulator_x86_64_arm64/BoardConfig.mk b/target/board/emulator_x86_64_arm64/BoardConfig.mk
deleted file mode 100755
index 26b61a6..0000000
--- a/target/board/emulator_x86_64_arm64/BoardConfig.mk
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# x86_64 emulator specific definitions
-TARGET_CPU_ABI := x86_64
-TARGET_ARCH := x86_64
-TARGET_ARCH_VARIANT := x86_64
-
-TARGET_2ND_CPU_ABI := x86
-TARGET_2ND_ARCH := x86
-TARGET_2ND_ARCH_VARIANT := x86_64
-
-TARGET_NATIVE_BRIDGE_ARCH := arm64
-TARGET_NATIVE_BRIDGE_ARCH_VARIANT := armv8-a
-TARGET_NATIVE_BRIDGE_CPU_VARIANT := generic
-TARGET_NATIVE_BRIDGE_ABI := arm64-v8a
-
-TARGET_NATIVE_BRIDGE_2ND_ARCH := arm
-TARGET_NATIVE_BRIDGE_2ND_ARCH_VARIANT := armv7-a-neon
-TARGET_NATIVE_BRIDGE_2ND_CPU_VARIANT := generic
-TARGET_NATIVE_BRIDGE_2ND_ABI := armeabi-v7a armeabi
-
-BUILD_BROKEN_DUP_RULES := true
-
-TARGET_PRELINK_MODULE := false
-
-include build/make/target/board/BoardConfigMainlineCommon.mk
-include build/make/target/board/BoardConfigEmuCommon.mk
-
-# the settings differ from BoardConfigMainlineCommon.mk
-BOARD_USES_SYSTEM_OTHER_ODEX :=
-
-# Resize to 4G to accommodate ASAN and CTS
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
-
-# Wifi.
-BOARD_WLAN_DEVICE := emulator
-BOARD_HOSTAPD_DRIVER := NL80211
-BOARD_WPA_SUPPLICANT_DRIVER := NL80211
-BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
-BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
-WPA_SUPPLICANT_VERSION := VER_0_8_X
-WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
-WIFI_DRIVER_FW_PATH_STA := "/dev/null"
-WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/emulator_x86_64_arm64/device.mk b/target/board/emulator_x86_64_arm64/device.mk
deleted file mode 100755
index af023eb..0000000
--- a/target/board/emulator_x86_64_arm64/device.mk
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
diff --git a/target/board/emulator_x86_64_arm64/system_ext.prop b/target/board/emulator_x86_64_arm64/system_ext.prop
deleted file mode 100644
index ed9d173..0000000
--- a/target/board/emulator_x86_64_arm64/system_ext.prop
+++ /dev/null
@@ -1,5 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/emulator_x86_arm/BoardConfig.mk b/target/board/emulator_x86_arm/BoardConfig.mk
deleted file mode 100644
index 21fdbc8..0000000
--- a/target/board/emulator_x86_arm/BoardConfig.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# x86 emulator specific definitions
-TARGET_CPU_ABI := x86
-TARGET_ARCH := x86
-TARGET_ARCH_VARIANT := x86
-
-TARGET_NATIVE_BRIDGE_ARCH := arm
-TARGET_NATIVE_BRIDGE_ARCH_VARIANT := armv7-a-neon
-TARGET_NATIVE_BRIDGE_CPU_VARIANT := generic
-TARGET_NATIVE_BRIDGE_ABI := armeabi-v7a armeabi
-
-BUILD_BROKEN_DUP_RULES := true
-
-#
-# The inclusion order below is important.
-# The settings in latter makefiles overwrite those in the former.
-#
-include build/make/target/board/BoardConfigMainlineCommon.mk
-include build/make/target/board/BoardConfigEmuCommon.mk
-
-# the settings differ from BoardConfigMainlineCommon.mk
-BOARD_USES_SYSTEM_OTHER_ODEX :=
-
-# Resize to 4G to accommodate ASAN and CTS
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 4294967296
-
-BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/x86
-
-# Wifi.
-BOARD_WLAN_DEVICE := emulator
-BOARD_HOSTAPD_DRIVER := NL80211
-BOARD_WPA_SUPPLICANT_DRIVER := NL80211
-BOARD_HOSTAPD_PRIVATE_LIB := lib_driver_cmd_simulated
-BOARD_WPA_SUPPLICANT_PRIVATE_LIB := lib_driver_cmd_simulated
-WPA_SUPPLICANT_VERSION := VER_0_8_X
-WIFI_DRIVER_FW_PATH_PARAM := "/dev/null"
-WIFI_DRIVER_FW_PATH_STA := "/dev/null"
-WIFI_DRIVER_FW_PATH_AP := "/dev/null"
diff --git a/target/board/emulator_x86_arm/device.mk b/target/board/emulator_x86_arm/device.mk
deleted file mode 100644
index af023eb..0000000
--- a/target/board/emulator_x86_arm/device.mk
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
-PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
diff --git a/target/board/emulator_x86_arm/system_ext.prop b/target/board/emulator_x86_arm/system_ext.prop
deleted file mode 100644
index 64829f3..0000000
--- a/target/board/emulator_x86_arm/system_ext.prop
+++ /dev/null
@@ -1,5 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/vendor/lib/libreference-ril.so
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index c3bc14b..76b1c58 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -67,8 +67,6 @@
$(LOCAL_DIR)/mainline_system_x86_arm.mk \
$(LOCAL_DIR)/ndk.mk \
$(LOCAL_DIR)/sdk.mk \
- $(LOCAL_DIR)/sdk_phone_arm64.mk \
- $(LOCAL_DIR)/sdk_phone_x86_64.mk \
endif
@@ -84,7 +82,7 @@
$(LOCAL_DIR)/module_x86_64only.mk \
COMMON_LUNCH_CHOICES := \
- aosp_arm64-eng \
- aosp_arm-eng \
- aosp_x86_64-eng \
- aosp_x86-eng \
+ aosp_arm64-trunk_staging-eng \
+ aosp_arm-trunk_staging-eng \
+ aosp_x86_64-trunk_staging-eng \
+ aosp_x86-trunk_staging-eng \
diff --git a/target/product/angle_default.mk b/target/product/angle_default.mk
index bea0be6..fdfc7f5 100644
--- a/target/product/angle_default.mk
+++ b/target/product/angle_default.mk
@@ -15,9 +15,9 @@
#
# To enable ANGLE as the default system GLES drivers, add
-# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_enabled.mk) to the Makefile.
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_default.mk) to the Makefile.
$(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk)
-PRODUCT_VENDOR_PROPERTIES += \
+PRODUCT_SYSTEM_PROPERTIES += \
persist.graphics.egl=angle
diff --git a/target/product/angle_supported.mk b/target/product/angle_supported.mk
index c83ff5f..59e6ea3 100644
--- a/target/product/angle_supported.mk
+++ b/target/product/angle_supported.mk
@@ -18,6 +18,7 @@
# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk) to the Makefile.
# By default, this will allow ANGLE binaries to coexist with native GLES drivers.
+ifneq ($(RELEASE_ANGLE_ON_SYSTEM),true)
PRODUCT_PACKAGES += \
libEGL_angle \
libGLESv1_CM_angle \
@@ -25,3 +26,4 @@
# Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
PRODUCT_VENDOR_PROPERTIES += ro.gfx.angle.supported=true
+endif
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 322b98e..e3ebaa3 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -95,6 +95,7 @@
flags_health_check \
framework-graphics \
framework-minus-apex \
+ framework-minus-apex-install-dependencies \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
@@ -345,6 +346,15 @@
WallpaperBackup
endif
+# Moving angle from vendor to system
+ifeq ($(RELEASE_ANGLE_ON_SYSTEM),true)
+PRODUCT_PACKAGES += \
+ libEGL_angle \
+ libGLESv1_CM_angle \
+ libGLESv2_angle
+$(call soong_config_set,angle,angle_on_system,true)
+endif
+
# For testing purposes
ifeq ($(FORCE_AUDIO_SILENT), true)
PRODUCT_SYSTEM_PROPERTIES += ro.audio.silent=1
diff --git a/target/product/emulator_system.mk b/target/product/emulator_system.mk
deleted file mode 100644
index b7e7cfa..0000000
--- a/target/product/emulator_system.mk
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# This file lists emulator experimental modules added to PRODUCT_PACKAGES,
-# only included by targets sdk_phone_x86/64 and sdk_gphone_x86/64
-
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST := \
- system/lib/libemulator_multidisplay_jni.so \
- system/lib64/libemulator_multidisplay_jni.so \
- system/priv-app/MultiDisplayProvider/MultiDisplayProvider.apk \
-
-PRODUCT_PACKAGES += MultiDisplayProvider
diff --git a/target/product/emulator_vendor.mk b/target/product/emulator_vendor.mk
deleted file mode 100644
index f71b275..0000000
--- a/target/product/emulator_vendor.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# This file is included by other product makefiles to add all the
-# emulator-related modules to PRODUCT_PACKAGES.
-#
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_vendor.mk)
-
-# need this for gles libraries to load properly
-# after moving to /vendor/lib/
-PRODUCT_PACKAGES += \
- vndk-sp
-
-DEVICE_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
-
-PRODUCT_CHARACTERISTICS := emulator
-
-PRODUCT_FULL_TREBLE_OVERRIDE := true
-
-# goldfish vendor partition configurations
-$(call inherit-product-if-exists, device/generic/goldfish/vendor.mk)
-
-#watchdog tiggers reboot because location service is not
-#responding, disble it for now.
-#still keep it on internal main (master) as it is still working
-#once it is fixed in aosp, remove this block of comment.
-#PRODUCT_VENDOR_PROPERTIES += \
-#config.disable_location=true
-
-# enable Google-specific location features,
-# like NetworkLocationProvider and LocationCollector
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
- ro.com.google.locationfeatures=1
-
-# disable setupwizard
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
- ro.setupwizard.mode?=DISABLED
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
index d47c685..5726c06 100644
--- a/target/product/fullmte.mk
+++ b/target/product/fullmte.mk
@@ -20,7 +20,8 @@
# For more details, see:
# https://source.android.com/docs/security/test/memory-safety/arm-mte
ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),)
- SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack)
+ # TODO(b/292478827): Re-enable memtag_stack when new toolchain rolls.
+ SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap)
SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
endif
PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index fa3d1da..007aabd 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -7,7 +7,7 @@
#####################################################################
# This is the up-to-date list of vndk libs.
LATEST_VNDK_LIB_LIST := $(LOCAL_PATH)/current.txt
-UNFROZEN_VNDK :=
+UNFROZEN_VNDK := true
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
# Use frozen vndk lib list only if "34 >= PLATFORM_VNDK_VERSION"
ifeq ($(call math_gt_or_eq,34,$(PLATFORM_VNDK_VERSION)),true)
@@ -15,8 +15,7 @@
ifeq ($(wildcard $(LATEST_VNDK_LIB_LIST)),)
$(error $(LATEST_VNDK_LIB_LIST) file not found. Please copy "$(LOCAL_PATH)/current.txt" to "$(LATEST_VNDK_LIB_LIST)" and commit a CL for release branch)
endif
- else
- UNFROZEN_VNDK := true
+ UNFROZEN_VNDK :=
endif
endif
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
deleted file mode 100644
index c16c403..0000000
--- a/target/product/sdk_phone_arm64.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-PRODUCT_USE_DYNAMIC_PARTITIONS := true
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-# Enable mainline checking for exact this product name
-ifeq (sdk_phone_arm64,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# All components inherited here go to vendor or vendor_boot image
-#
-$(call inherit-product-if-exists, device/generic/goldfish/arm64-vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_arm64/device.mk)
-
-# keep this apk for sdk targets for now
-PRODUCT_PACKAGES += \
- EmulatorSmokeTests
-
-# Overrides
-PRODUCT_BRAND := Android
-PRODUCT_NAME := sdk_phone_arm64
-PRODUCT_DEVICE := emulator_arm64
-PRODUCT_MODEL := Android SDK built for arm64
-# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
-# RadioConfigLib), which makes it impossible to translate their module names to
-# library name, so the check fails.
-PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
-
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
deleted file mode 100644
index b2e14a5..0000000
--- a/target/product/sdk_phone_x86_64.mk
+++ /dev/null
@@ -1,62 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-PRODUCT_USE_DYNAMIC_PARTITIONS := true
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-# Enable mainline checking for exact this product name
-ifeq (sdk_phone_x86_64,$(TARGET_PRODUCT))
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-endif
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# All components inherited here go to vendor image
-#
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_x86_64/device.mk)
-
-# Overrides
-PRODUCT_BRAND := Android
-PRODUCT_NAME := sdk_phone_x86_64
-PRODUCT_DEVICE := emulator_x86_64
-PRODUCT_MODEL := Android SDK built for x86_64
-# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
-# RadioConfigLib), which makes it impossible to translate their module names to
-# library name, so the check fails.
-PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
-
-PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API := true
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
index 02fc57c..425d8a9 100644
--- a/tools/aconfig/Android.bp
+++ b/tools/aconfig/Android.bp
@@ -13,6 +13,11 @@
type: "lite",
},
sdk_version: "current",
+ min_sdk_version: "UpsideDownCake",
+ apex_available: [
+ "com.android.configinfrastructure",
+ "//apex_available:platform",
+ ]
}
java_library_host {
diff --git a/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java b/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java
index 50b6289..dbb07ac 100644
--- a/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java
+++ b/tools/aconfig/fake_device_config/src/android/provider/DeviceConfig.java
@@ -26,4 +26,14 @@
public static boolean getBoolean(String ns, String name, boolean def) {
return false;
}
+
+ public static Properties getProperties(String namespace, String... names) {
+ return new Properties();
+ }
+
+ public static class Properties {
+ public boolean getBoolean(String name, boolean def) {
+ return false;
+ }
+ }
}
diff --git a/tools/aconfig/overrideflags/overrideflags.py b/tools/aconfig/overrideflags/overrideflags.py
index 480af08..e355c21 100644
--- a/tools/aconfig/overrideflags/overrideflags.py
+++ b/tools/aconfig/overrideflags/overrideflags.py
@@ -28,12 +28,12 @@
_VALUE_LIST_TEMPLATE: str = """
-VALUE_LIST_LIST = [{}]
+ACONFIG_VALUES_LIST_LOCAL = [{}]
"""
_ACONFIG_VALUES_TEMPLATE: str = """
aconfig_values {{
- name: "aconfig-local-{}",
+ name: "{}",
package: "{}",
srcs: [
"{}",
@@ -41,6 +41,8 @@
}}
"""
+_ACONFIG_VALUES_NAME_SUFFIX: str = "aconfig-local-override-{}"
+
_PACKAGE_REGEX = re.compile(r"^package\:\s*\"([\w\d\.]+)\"")
_ANDROID_BP_FILE_NAME = r"Android.bp"
@@ -61,13 +63,17 @@
def _create_android_bp(packages: set[str], file_name: str) -> str:
android_bp = ""
- value_list = ",\n ".join(map(lambda n: "aconfig-local-" + n, packages))
+ value_list = ",\n ".join(
+ map(f'"{_ACONFIG_VALUES_NAME_SUFFIX}"'.format, packages)
+ )
if value_list:
value_list = "\n " + value_list + "\n"
android_bp += _VALUE_LIST_TEMPLATE.format(value_list) + "\n"
for package in packages:
- android_bp += _ACONFIG_VALUES_TEMPLATE.format(package, package, file_name)
+ android_bp += _ACONFIG_VALUES_TEMPLATE.format(
+ _ACONFIG_VALUES_NAME_SUFFIX.format(package), package, file_name
+ )
android_bp += "\n"
return android_bp
@@ -78,8 +84,18 @@
out.mkdir(parents=True, exist_ok=True)
output = out.joinpath(_ANDROID_BP_FILE_NAME)
- with open(output, "w+") as f:
- f.write(new_android_bp)
+ with open(output, "r+", encoding="utf8") as file:
+ lines = []
+ for line in file:
+ line = line.rstrip("\n")
+ if line.startswith("ACONFIG_VALUES_LIST_LOCAL"):
+ break
+ lines.append(line)
+ # Overwrite the file with the updated contents.
+ file.seek(0)
+ file.truncate()
+ file.write("\n".join(lines))
+ file.write(new_android_bp)
def main(args):
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
index 43c2ecf..05ee0d7 100644
--- a/tools/aconfig/src/codegen_java.rs
+++ b/tools/aconfig/src/codegen_java.rs
@@ -16,6 +16,7 @@
use anyhow::Result;
use serde::Serialize;
+use std::collections::BTreeSet;
use std::path::PathBuf;
use tinytemplate::TinyTemplate;
@@ -31,12 +32,19 @@
where
I: Iterator<Item = &'a ProtoParsedFlag>,
{
- let class_elements: Vec<ClassElement> =
- parsed_flags_iter.map(|pf| create_class_element(package, pf)).collect();
- let is_read_write = class_elements.iter().any(|elem| elem.is_read_write);
+ let flag_elements: Vec<FlagElement> =
+ parsed_flags_iter.map(|pf| create_flag_element(package, pf)).collect();
+ let properties_set: BTreeSet<String> =
+ flag_elements.iter().map(|fe| format_property_name(&fe.device_config_namespace)).collect();
+ let is_read_write = flag_elements.iter().any(|elem| elem.is_read_write);
let is_test_mode = codegen_mode == CodegenMode::Test;
- let context =
- Context { class_elements, is_test_mode, is_read_write, package_name: package.to_string() };
+ let context = Context {
+ flag_elements,
+ is_test_mode,
+ is_read_write,
+ properties_set,
+ package_name: package.to_string(),
+ };
let mut template = TinyTemplate::new();
template.add_template("Flags.java", include_str!("../templates/Flags.java.template"))?;
template.add_template(
@@ -66,49 +74,62 @@
#[derive(Serialize)]
struct Context {
- pub class_elements: Vec<ClassElement>,
+ pub flag_elements: Vec<FlagElement>,
pub is_test_mode: bool,
pub is_read_write: bool,
+ pub properties_set: BTreeSet<String>,
pub package_name: String,
}
#[derive(Serialize)]
-struct ClassElement {
+struct FlagElement {
pub default_value: bool,
pub device_config_namespace: String,
pub device_config_flag: String,
pub flag_name_constant_suffix: String,
pub is_read_write: bool,
pub method_name: String,
+ pub properties: String,
}
-fn create_class_element(package: &str, pf: &ProtoParsedFlag) -> ClassElement {
+fn create_flag_element(package: &str, pf: &ProtoParsedFlag) -> FlagElement {
let device_config_flag = codegen::create_device_config_ident(package, pf.name())
.expect("values checked at flag parse time");
- ClassElement {
+ FlagElement {
default_value: pf.state() == ProtoFlagState::ENABLED,
device_config_namespace: pf.namespace().to_string(),
device_config_flag,
flag_name_constant_suffix: pf.name().to_ascii_uppercase(),
is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE,
method_name: format_java_method_name(pf.name()),
+ properties: format_property_name(pf.namespace()),
}
}
fn format_java_method_name(flag_name: &str) -> String {
- flag_name
- .split('_')
- .filter(|&word| !word.is_empty())
- .enumerate()
- .map(|(index, word)| {
- if index == 0 {
- word.to_ascii_lowercase()
- } else {
- word[0..1].to_ascii_uppercase() + &word[1..].to_ascii_lowercase()
- }
- })
- .collect::<Vec<String>>()
- .join("")
+ let splits: Vec<&str> = flag_name.split('_').filter(|&word| !word.is_empty()).collect();
+ if splits.len() == 1 {
+ let name = splits[0];
+ name[0..1].to_ascii_lowercase() + &name[1..]
+ } else {
+ splits
+ .iter()
+ .enumerate()
+ .map(|(index, word)| {
+ if index == 0 {
+ word.to_ascii_lowercase()
+ } else {
+ word[0..1].to_ascii_uppercase() + &word[1..].to_ascii_lowercase()
+ }
+ })
+ .collect::<Vec<String>>()
+ .join("")
+ }
+}
+
+fn format_property_name(property_name: &str) -> String {
+ let name = format_java_method_name(property_name);
+ format!("mProperties{}{}", &name[0..1].to_ascii_uppercase(), &name[1..])
}
#[cfg(test)]
@@ -265,8 +286,10 @@
// TODO(b/303773055): Remove the annotation after access issue is resolved.
import android.compat.annotation.UnsupportedAppUsage;
import android.provider.DeviceConfig;
+ import android.provider.DeviceConfig.Properties;
/** @hide */
public final class FeatureFlagsImpl implements FeatureFlags {
+ private Properties mPropertiesAconfigTest;
@Override
@UnsupportedAppUsage
public boolean disabledRo() {
@@ -275,11 +298,18 @@
@Override
@UnsupportedAppUsage
public boolean disabledRw() {
- return getValue(
- "aconfig_test",
- "com.android.aconfig.test.disabled_rw",
- false
- );
+ if (mPropertiesAconfigTest == null) {
+ mPropertiesAconfigTest =
+ getProperties(
+ "aconfig_test",
+ "com.android.aconfig.test.disabled_rw"
+ );
+ }
+ return mPropertiesAconfigTest
+ .getBoolean(
+ "com.android.aconfig.test.disabled_rw",
+ false
+ );
}
@Override
@UnsupportedAppUsage
@@ -294,32 +324,36 @@
@Override
@UnsupportedAppUsage
public boolean enabledRw() {
- return getValue(
- "aconfig_test",
- "com.android.aconfig.test.enabled_rw",
- true
- );
- }
- private boolean getValue(String nameSpace,
- String flagName, boolean defaultValue) {
- boolean value = defaultValue;
- try {
- value = DeviceConfig.getBoolean(
- nameSpace,
- flagName,
- defaultValue
+ if (mPropertiesAconfigTest == null) {
+ mPropertiesAconfigTest =
+ getProperties(
+ "aconfig_test",
+ "com.android.aconfig.test.enabled_rw"
+ );
+ }
+ return mPropertiesAconfigTest
+ .getBoolean(
+ "com.android.aconfig.test.enabled_rw",
+ true
);
+ }
+ private Properties getProperties(
+ String namespace,
+ String flagName) {
+ Properties properties = null;
+ try {
+ properties = DeviceConfig.getProperties(namespace);
} catch (NullPointerException e) {
throw new RuntimeException(
- "Cannot read value of flag " + flagName + " from DeviceConfig. " +
- "It could be that the code using flag executed " +
- "before SettingsProvider initialization. " +
- "Please use fixed read-only flag by adding " +
- "is_fixed_read_only: true in flag declaration.",
+ "Cannot read value of flag " + flagName + " from DeviceConfig. "
+ + "It could be that the code using flag executed "
+ + "before SettingsProvider initialization. "
+ + "Please use fixed read-only flag by adding "
+ + "is_fixed_read_only: true in flag declaration.",
e
);
}
- return value;
+ return properties;
}
}
"#;
@@ -441,9 +475,45 @@
#[test]
fn test_format_java_method_name() {
- let input = "____some_snake___name____";
let expected = "someSnakeName";
+ let input = "____some_snake___name____";
let formatted_name = format_java_method_name(input);
assert_eq!(expected, formatted_name);
+
+ let input = "someSnakeName";
+ let formatted_name = format_java_method_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "SomeSnakeName";
+ let formatted_name = format_java_method_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "SomeSnakeName_";
+ let formatted_name = format_java_method_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "_SomeSnakeName";
+ let formatted_name = format_java_method_name(input);
+ assert_eq!(expected, formatted_name);
+ }
+
+ #[test]
+ fn test_format_property_name() {
+ let expected = "mPropertiesSomeSnakeName";
+ let input = "____some_snake___name____";
+ let formatted_name = format_property_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "someSnakeName";
+ let formatted_name = format_property_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "SomeSnakeName";
+ let formatted_name = format_property_name(input);
+ assert_eq!(expected, formatted_name);
+
+ let input = "SomeSnakeName_";
+ let formatted_name = format_property_name(input);
+ assert_eq!(expected, formatted_name);
}
}
diff --git a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
index 72a896f..933d6a7 100644
--- a/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FakeFeatureFlagsImpl.java.template
@@ -11,7 +11,7 @@
resetAll();
}
-{{ for item in class_elements}}
+{{ for item in flag_elements}}
@Override
@UnsupportedAppUsage
public boolean {item.method_name}() \{
@@ -41,7 +41,7 @@
private Map<String, Boolean> mFlagMap = new HashMap<>(
Map.ofEntries(
- {{-for item in class_elements}}
+ {{-for item in flag_elements}}
Map.entry(Flags.FLAG_{item.flag_name_constant_suffix}, false)
{{ -if not @last }},{{ endif }}
{{ -endfor }}
diff --git a/tools/aconfig/templates/FeatureFlags.java.template b/tools/aconfig/templates/FeatureFlags.java.template
index 02305e6..da850ae 100644
--- a/tools/aconfig/templates/FeatureFlags.java.template
+++ b/tools/aconfig/templates/FeatureFlags.java.template
@@ -4,7 +4,7 @@
/** @hide */
public interface FeatureFlags \{
-{{ for item in class_elements}}
+{{ for item in flag_elements }}
{{ -if not item.is_read_write }}
{{ -if item.default_value }}
@com.android.aconfig.annotations.AssumeTrueForR8
diff --git a/tools/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/templates/FeatureFlagsImpl.java.template
index 1620dfe..ff089df 100644
--- a/tools/aconfig/templates/FeatureFlagsImpl.java.template
+++ b/tools/aconfig/templates/FeatureFlagsImpl.java.template
@@ -4,45 +4,58 @@
{{ if not is_test_mode }}
{{ if is_read_write- }}
import android.provider.DeviceConfig;
+import android.provider.DeviceConfig.Properties;
{{ endif }}
/** @hide */
public final class FeatureFlagsImpl implements FeatureFlags \{
-{{ for item in class_elements}}
+{{ if is_read_write- }}
+{{ for properties in properties_set }}
+ private Properties {properties};
+{{ endfor }}
+{{ endif- }}
+
+{{ for flag in flag_elements }}
@Override
@UnsupportedAppUsage
- public boolean {item.method_name}() \{
- {{ -if item.is_read_write }}
- return getValue(
- "{item.device_config_namespace}",
- "{item.device_config_flag}",
- {item.default_value}
- );
+ public boolean {flag.method_name}() \{
+ {{ -if flag.is_read_write }}
+ if ({flag.properties} == null) \{
+ {flag.properties} =
+ getProperties(
+ "{flag.device_config_namespace}",
+ "{flag.device_config_flag}"
+ );
+ }
+ return {flag.properties}
+ .getBoolean(
+ "{flag.device_config_flag}",
+ {flag.default_value}
+ );
{{ else }}
- return {item.default_value};
+ return {flag.default_value};
{{ endif- }}
}
{{ endfor }}
-{{ if is_read_write- }}
- private boolean getValue(String nameSpace,
- String flagName, boolean defaultValue) \{
- boolean value = defaultValue;
+
+{{ -if is_read_write }}
+ private Properties getProperties(
+ String namespace,
+ String flagName) \{
+ Properties properties = null;
try \{
- value = DeviceConfig.getBoolean(
- nameSpace,
- flagName,
- defaultValue
- );
+ properties = DeviceConfig.getProperties(namespace);
} catch (NullPointerException e) \{
throw new RuntimeException(
- "Cannot read value of flag " + flagName + " from DeviceConfig. " +
- "It could be that the code using flag executed " +
- "before SettingsProvider initialization. " +
- "Please use fixed read-only flag by adding " +
- "is_fixed_read_only: true in flag declaration.",
+ "Cannot read value of flag " + flagName + " from DeviceConfig. "
+ + "It could be that the code using flag executed "
+ + "before SettingsProvider initialization. "
+ + "Please use fixed read-only flag by adding "
+ + "is_fixed_read_only: true in flag declaration.",
e
);
}
- return value;
+
+ return properties;
}
{{ endif- }}
}
@@ -50,10 +63,10 @@
{#- Generate only stub if in test mode #}
/** @hide */
public final class FeatureFlagsImpl implements FeatureFlags \{
-{{ for item in class_elements}}
+{{ for flag in flag_elements }}
@Override
@UnsupportedAppUsage
- public boolean {item.method_name}() \{
+ public boolean {flag.method_name}() \{
throw new UnsupportedOperationException(
"Method is not implemented.");
}
diff --git a/tools/aconfig/templates/Flags.java.template b/tools/aconfig/templates/Flags.java.template
index 66c4c5a..cf6604c 100644
--- a/tools/aconfig/templates/Flags.java.template
+++ b/tools/aconfig/templates/Flags.java.template
@@ -5,11 +5,11 @@
/** @hide */
public final class Flags \{
-{{- for item in class_elements}}
+{{- for item in flag_elements}}
/** @hide */
public static final String FLAG_{item.flag_name_constant_suffix} = "{item.device_config_flag}";
{{- endfor }}
-{{ for item in class_elements}}
+{{ for item in flag_elements}}
{{ -if not item.is_read_write }}
{{ -if item.default_value }}
@com.android.aconfig.annotations.AssumeTrueForR8
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
index ce97723..b7ef0b0 100644
--- a/tools/auto_gen_test_config_test.py
+++ b/tools/auto_gen_test_config_test.py
@@ -93,7 +93,7 @@
</target_preparer>
<test class="com.android.tradefed.testtype.AndroidJUnitTest" >
- {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="com.android.my.tests.x" />
+ <option name="package" value="com.android.my.tests.x" />
<option name="runner" value="androidx.test.runner.AndroidJUnitRunner" />
</test>
</configuration>
@@ -125,7 +125,7 @@
</target_preparer>
<test class="com.android.tradefed.testtype.InstrumentationTest" >
- {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="com.android.my.tests.x" />
+ <option name="package" value="com.android.my.tests.x" />
<option name="runner" value="android.test.InstrumentationTestRunner" />
</test>
</configuration>
@@ -178,7 +178,7 @@
</target_preparer>
<test class="com.android.tradefed.testtype.{TEST_TYPE}" >
- {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="{PACKAGE}" />
+ <option name="package" value="{PACKAGE}" />
<option name="runner" value="{RUNNER}" />
</test>
</configuration>
diff --git a/tools/overrideflags.sh b/tools/overrideflags.sh
index ff311dd..b8605dc 100755
--- a/tools/overrideflags.sh
+++ b/tools/overrideflags.sh
@@ -14,7 +14,7 @@
# limitations under the License.
-source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../../make/shell_utils.sh
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../shell_utils.sh
require_top
function print_help() {
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index ee266b7..ad014af 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -483,8 +483,13 @@
defaults: ["releasetools_binary_defaults"],
srcs: [
"make_recovery_patch.py",
+ "non_ab_ota.py",
+ "edify_generator.py",
+ "check_target_files_vintf.py",
],
libs: [
+ "ota_utils_lib",
+ "ota_metadata_proto",
"releasetools_common",
],
}
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 33624f5..e7d3a18 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -31,7 +31,6 @@
import zipfile
import common
-from apex_manifest import ParseApexManifest
logger = logging.getLogger(__name__)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 8ee983f..462c3bf 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -15,7 +15,6 @@
from __future__ import print_function
import base64
-import collections
import copy
import datetime
import errno
@@ -23,7 +22,6 @@
import getopt
import getpass
import gzip
-import imp
import json
import logging
import logging.config
@@ -36,17 +34,13 @@
import stat
import sys
import tempfile
-import threading
-import time
import zipfile
from dataclasses import dataclass
-from genericpath import isdir
from hashlib import sha1, sha256
import images
-import rangelib
import sparse_img
-from blockimgdiff import BlockImageDiff
+
logger = logging.getLogger(__name__)
@@ -155,35 +149,6 @@
self.partition, self.rollback_index_location, self.pubkey_path)
-class ErrorCode(object):
- """Define error_codes for failures that happen during the actual
- update package installation.
-
- Error codes 0-999 are reserved for failures before the package
- installation (i.e. low battery, package verification failure).
- Detailed code in 'bootable/recovery/error_code.h' """
-
- SYSTEM_VERIFICATION_FAILURE = 1000
- SYSTEM_UPDATE_FAILURE = 1001
- SYSTEM_UNEXPECTED_CONTENTS = 1002
- SYSTEM_NONZERO_CONTENTS = 1003
- SYSTEM_RECOVER_FAILURE = 1004
- VENDOR_VERIFICATION_FAILURE = 2000
- VENDOR_UPDATE_FAILURE = 2001
- VENDOR_UNEXPECTED_CONTENTS = 2002
- VENDOR_NONZERO_CONTENTS = 2003
- VENDOR_RECOVER_FAILURE = 2004
- OEM_PROP_MISMATCH = 3000
- FINGERPRINT_MISMATCH = 3001
- THUMBPRINT_MISMATCH = 3002
- OLDER_BUILD = 3003
- DEVICE_MISMATCH = 3004
- BAD_PATCH_FILE = 3005
- INSUFFICIENT_CACHE_SPACE = 3006
- TUNE_PARTITION_FAILURE = 3007
- APPLY_PATCH_FAILURE = 3008
-
-
class ExternalError(RuntimeError):
pass
@@ -210,7 +175,7 @@
'': {
'handlers': ['default'],
'propagate': True,
- 'level': 'WARNING',
+ 'level': 'NOTSET',
}
}
}
@@ -1235,26 +1200,16 @@
system_root_image = info_dict.get('system_root_image') == 'true'
if info_dict.get('no_recovery') != 'true':
recovery_fstab_path = 'RECOVERY/RAMDISK/system/etc/recovery.fstab'
- if isinstance(input_file, zipfile.ZipFile):
- if recovery_fstab_path not in input_file.namelist():
- recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
- else:
- path = os.path.join(input_file, *recovery_fstab_path.split('/'))
- if not os.path.exists(path):
- recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
+ if not DoesInputFileContain(input_file, recovery_fstab_path):
+ recovery_fstab_path = 'RECOVERY/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
read_helper, info_dict['fstab_version'], recovery_fstab_path,
system_root_image)
if info_dict.get('recovery_as_boot') == 'true':
recovery_fstab_path = 'BOOT/RAMDISK/system/etc/recovery.fstab'
- if isinstance(input_file, zipfile.ZipFile):
- if recovery_fstab_path not in input_file.namelist():
- recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
- else:
- path = os.path.join(input_file, *recovery_fstab_path.split('/'))
- if not os.path.exists(path):
- recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
+ if not DoesInputFileContain(input_file, recovery_fstab_path):
+ recovery_fstab_path = 'BOOT/RAMDISK/etc/recovery.fstab'
return LoadRecoveryFSTab(
read_helper, info_dict['fstab_version'], recovery_fstab_path,
system_root_image)
@@ -1947,7 +1902,18 @@
cmd = [avbtool, "add_hash_footer", "--image", image_path,
"--partition_size", str(part_size), "--partition_name",
partition_name]
- AppendAVBSigningArgs(cmd, partition_name)
+ # Use sha256 of the kernel as salt for reproducible builds
+ with tempfile.TemporaryDirectory() as tmpdir:
+ RunAndCheckOutput(["unpack_bootimg", "--boot_img", image_path, "--out", tmpdir])
+ for filename in ["kernel", "ramdisk", "vendor_ramdisk00"]:
+ path = os.path.join(tmpdir, filename)
+ if os.path.exists(path) and os.path.getsize(path):
+ print("Using {} as salt for avb footer of {}".format(
+ filename, partition_name))
+ with open(path, "rb") as fp:
+ salt = sha256(fp.read()).hexdigest()
+ break
+ AppendAVBSigningArgs(cmd, partition_name, salt)
args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
if args and args.strip():
split_args = ResolveAVBSigningPathArgs(shlex.split(args))
@@ -2653,7 +2619,9 @@
device = p.device
if "/" in device:
device = device[device.rfind("/")+1:]
- limit = info_dict.get(device + "_size")
+ limit = info_dict.get(device + "_size", 0)
+ if isinstance(limit, str):
+ limit = int(limit, 0)
if not fs_type or not limit:
return
@@ -3136,107 +3104,6 @@
zipfile.ZIP64_LIMIT = saved_zip64_limit
-class DeviceSpecificParams(object):
- module = None
-
- def __init__(self, **kwargs):
- """Keyword arguments to the constructor become attributes of this
- object, which is passed to all functions in the device-specific
- module."""
- for k, v in kwargs.items():
- setattr(self, k, v)
- self.extras = OPTIONS.extras
-
- if self.module is None:
- path = OPTIONS.device_specific
- if not path:
- return
- try:
- if os.path.isdir(path):
- info = imp.find_module("releasetools", [path])
- else:
- d, f = os.path.split(path)
- b, x = os.path.splitext(f)
- if x == ".py":
- f = b
- info = imp.find_module(f, [d])
- logger.info("loaded device-specific extensions from %s", path)
- self.module = imp.load_module("device_specific", *info)
- except ImportError:
- logger.info("unable to load device-specific module; assuming none")
-
- def _DoCall(self, function_name, *args, **kwargs):
- """Call the named function in the device-specific module, passing
- the given args and kwargs. The first argument to the call will be
- the DeviceSpecific object itself. If there is no module, or the
- module does not define the function, return the value of the
- 'default' kwarg (which itself defaults to None)."""
- if self.module is None or not hasattr(self.module, function_name):
- return kwargs.get("default")
- return getattr(self.module, function_name)(*((self,) + args), **kwargs)
-
- def FullOTA_Assertions(self):
- """Called after emitting the block of assertions at the top of a
- full OTA package. Implementations can add whatever additional
- assertions they like."""
- return self._DoCall("FullOTA_Assertions")
-
- def FullOTA_InstallBegin(self):
- """Called at the start of full OTA installation."""
- return self._DoCall("FullOTA_InstallBegin")
-
- def FullOTA_GetBlockDifferences(self):
- """Called during full OTA installation and verification.
- Implementation should return a list of BlockDifference objects describing
- the update on each additional partitions.
- """
- return self._DoCall("FullOTA_GetBlockDifferences")
-
- def FullOTA_InstallEnd(self):
- """Called at the end of full OTA installation; typically this is
- used to install the image for the device's baseband processor."""
- return self._DoCall("FullOTA_InstallEnd")
-
- def IncrementalOTA_Assertions(self):
- """Called after emitting the block of assertions at the top of an
- incremental OTA package. Implementations can add whatever
- additional assertions they like."""
- return self._DoCall("IncrementalOTA_Assertions")
-
- def IncrementalOTA_VerifyBegin(self):
- """Called at the start of the verification phase of incremental
- OTA installation; additional checks can be placed here to abort
- the script before any changes are made."""
- return self._DoCall("IncrementalOTA_VerifyBegin")
-
- def IncrementalOTA_VerifyEnd(self):
- """Called at the end of the verification phase of incremental OTA
- installation; additional checks can be placed here to abort the
- script before any changes are made."""
- return self._DoCall("IncrementalOTA_VerifyEnd")
-
- def IncrementalOTA_InstallBegin(self):
- """Called at the start of incremental OTA installation (after
- verification is complete)."""
- return self._DoCall("IncrementalOTA_InstallBegin")
-
- def IncrementalOTA_GetBlockDifferences(self):
- """Called during incremental OTA installation and verification.
- Implementation should return a list of BlockDifference objects describing
- the update on each additional partitions.
- """
- return self._DoCall("IncrementalOTA_GetBlockDifferences")
-
- def IncrementalOTA_InstallEnd(self):
- """Called at the end of incremental OTA installation; typically
- this is used to install the image for the device's baseband
- processor."""
- return self._DoCall("IncrementalOTA_InstallEnd")
-
- def VerifyOTA_Assertions(self):
- return self._DoCall("VerifyOTA_Assertions")
-
-
class File(object):
def __init__(self, name, data, compress_size=None):
self.name = name
@@ -3266,454 +3133,11 @@
ZipWriteStr(z, self.name, self.data, compress_type=compression)
-DIFF_PROGRAM_BY_EXT = {
- ".gz": "imgdiff",
- ".zip": ["imgdiff", "-z"],
- ".jar": ["imgdiff", "-z"],
- ".apk": ["imgdiff", "-z"],
- ".img": "imgdiff",
-}
-
-
-class Difference(object):
- def __init__(self, tf, sf, diff_program=None):
- self.tf = tf
- self.sf = sf
- self.patch = None
- self.diff_program = diff_program
-
- def ComputePatch(self):
- """Compute the patch (as a string of data) needed to turn sf into
- tf. Returns the same tuple as GetPatch()."""
-
- tf = self.tf
- sf = self.sf
-
- if self.diff_program:
- diff_program = self.diff_program
- else:
- ext = os.path.splitext(tf.name)[1]
- diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
-
- ttemp = tf.WriteToTemp()
- stemp = sf.WriteToTemp()
-
- ext = os.path.splitext(tf.name)[1]
-
- try:
- ptemp = tempfile.NamedTemporaryFile()
- if isinstance(diff_program, list):
- cmd = copy.copy(diff_program)
- else:
- cmd = [diff_program]
- cmd.append(stemp.name)
- cmd.append(ttemp.name)
- cmd.append(ptemp.name)
- p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- err = []
-
- def run():
- _, e = p.communicate()
- if e:
- err.append(e)
- th = threading.Thread(target=run)
- th.start()
- th.join(timeout=300) # 5 mins
- if th.is_alive():
- logger.warning("diff command timed out")
- p.terminate()
- th.join(5)
- if th.is_alive():
- p.kill()
- th.join()
-
- if p.returncode != 0:
- logger.warning("Failure running %s:\n%s\n", cmd, "".join(err))
- self.patch = None
- return None, None, None
- diff = ptemp.read()
- finally:
- ptemp.close()
- stemp.close()
- ttemp.close()
-
- self.patch = diff
- return self.tf, self.sf, self.patch
-
- def GetPatch(self):
- """Returns a tuple of (target_file, source_file, patch_data).
-
- patch_data may be None if ComputePatch hasn't been called, or if
- computing the patch failed.
- """
- return self.tf, self.sf, self.patch
-
-
-def ComputeDifferences(diffs):
- """Call ComputePatch on all the Difference objects in 'diffs'."""
- logger.info("%d diffs to compute", len(diffs))
-
- # Do the largest files first, to try and reduce the long-pole effect.
- by_size = [(i.tf.size, i) for i in diffs]
- by_size.sort(reverse=True)
- by_size = [i[1] for i in by_size]
-
- lock = threading.Lock()
- diff_iter = iter(by_size) # accessed under lock
-
- def worker():
- try:
- lock.acquire()
- for d in diff_iter:
- lock.release()
- start = time.time()
- d.ComputePatch()
- dur = time.time() - start
- lock.acquire()
-
- tf, sf, patch = d.GetPatch()
- if sf.name == tf.name:
- name = tf.name
- else:
- name = "%s (%s)" % (tf.name, sf.name)
- if patch is None:
- logger.error("patching failed! %40s", name)
- else:
- logger.info(
- "%8.2f sec %8d / %8d bytes (%6.2f%%) %s", dur, len(patch),
- tf.size, 100.0 * len(patch) / tf.size, name)
- lock.release()
- except Exception:
- logger.exception("Failed to compute diff from worker")
- raise
-
- # start worker threads; wait for them all to finish.
- threads = [threading.Thread(target=worker)
- for i in range(OPTIONS.worker_threads)]
- for th in threads:
- th.start()
- while threads:
- threads.pop().join()
-
-
-class BlockDifference(object):
- def __init__(self, partition, tgt, src=None, check_first_block=False,
- version=None, disable_imgdiff=False):
- self.tgt = tgt
- self.src = src
- self.partition = partition
- self.check_first_block = check_first_block
- self.disable_imgdiff = disable_imgdiff
-
- if version is None:
- version = max(
- int(i) for i in
- OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
- assert version >= 3
- self.version = version
-
- b = BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
- version=self.version,
- disable_imgdiff=self.disable_imgdiff)
- self.path = os.path.join(MakeTempDir(), partition)
- b.Compute(self.path)
- self._required_cache = b.max_stashed_size
- self.touched_src_ranges = b.touched_src_ranges
- self.touched_src_sha1 = b.touched_src_sha1
-
- # On devices with dynamic partitions, for new partitions,
- # src is None but OPTIONS.source_info_dict is not.
- if OPTIONS.source_info_dict is None:
- is_dynamic_build = OPTIONS.info_dict.get(
- "use_dynamic_partitions") == "true"
- is_dynamic_source = False
- else:
- is_dynamic_build = OPTIONS.source_info_dict.get(
- "use_dynamic_partitions") == "true"
- is_dynamic_source = partition in shlex.split(
- OPTIONS.source_info_dict.get("dynamic_partition_list", "").strip())
-
- is_dynamic_target = partition in shlex.split(
- OPTIONS.info_dict.get("dynamic_partition_list", "").strip())
-
- # For dynamic partitions builds, check partition list in both source
- # and target build because new partitions may be added, and existing
- # partitions may be removed.
- is_dynamic = is_dynamic_build and (is_dynamic_source or is_dynamic_target)
-
- if is_dynamic:
- self.device = 'map_partition("%s")' % partition
- else:
- if OPTIONS.source_info_dict is None:
- _, device_expr = GetTypeAndDeviceExpr("/" + partition,
- OPTIONS.info_dict)
- else:
- _, device_expr = GetTypeAndDeviceExpr("/" + partition,
- OPTIONS.source_info_dict)
- self.device = device_expr
-
- @property
- def required_cache(self):
- return self._required_cache
-
- def WriteScript(self, script, output_zip, progress=None,
- write_verify_script=False):
- if not self.src:
- # write the output unconditionally
- script.Print("Patching %s image unconditionally..." % (self.partition,))
- else:
- script.Print("Patching %s image after verification." % (self.partition,))
-
- if progress:
- script.ShowProgress(progress, 0)
- self._WriteUpdate(script, output_zip)
-
- if write_verify_script:
- self.WritePostInstallVerifyScript(script)
-
- def WriteStrictVerifyScript(self, script):
- """Verify all the blocks in the care_map, including clobbered blocks.
-
- This differs from the WriteVerifyScript() function: a) it prints different
- error messages; b) it doesn't allow half-way updated images to pass the
- verification."""
-
- partition = self.partition
- script.Print("Verifying %s..." % (partition,))
- ranges = self.tgt.care_map
- ranges_str = ranges.to_string_raw()
- script.AppendExtra(
- 'range_sha1(%s, "%s") == "%s" && ui_print(" Verified.") || '
- 'ui_print("%s has unexpected contents.");' % (
- self.device, ranges_str,
- self.tgt.TotalSha1(include_clobbered_blocks=True),
- self.partition))
- script.AppendExtra("")
-
- def WriteVerifyScript(self, script, touched_blocks_only=False):
- partition = self.partition
-
- # full OTA
- if not self.src:
- script.Print("Image %s will be patched unconditionally." % (partition,))
-
- # incremental OTA
- else:
- if touched_blocks_only:
- ranges = self.touched_src_ranges
- expected_sha1 = self.touched_src_sha1
- else:
- ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
- expected_sha1 = self.src.TotalSha1()
-
- # No blocks to be checked, skipping.
- if not ranges:
- return
-
- ranges_str = ranges.to_string_raw()
- script.AppendExtra(
- 'if (range_sha1(%s, "%s") == "%s" || block_image_verify(%s, '
- 'package_extract_file("%s.transfer.list"), "%s.new.dat", '
- '"%s.patch.dat")) then' % (
- self.device, ranges_str, expected_sha1,
- self.device, partition, partition, partition))
- script.Print('Verified %s image...' % (partition,))
- script.AppendExtra('else')
-
- if self.version >= 4:
-
- # Bug: 21124327
- # When generating incrementals for the system and vendor partitions in
- # version 4 or newer, explicitly check the first block (which contains
- # the superblock) of the partition to see if it's what we expect. If
- # this check fails, give an explicit log message about the partition
- # having been remounted R/W (the most likely explanation).
- if self.check_first_block:
- script.AppendExtra('check_first_block(%s);' % (self.device,))
-
- # If version >= 4, try block recovery before abort update
- if partition == "system":
- code = ErrorCode.SYSTEM_RECOVER_FAILURE
- else:
- code = ErrorCode.VENDOR_RECOVER_FAILURE
- script.AppendExtra((
- 'ifelse (block_image_recover({device}, "{ranges}") && '
- 'block_image_verify({device}, '
- 'package_extract_file("{partition}.transfer.list"), '
- '"{partition}.new.dat", "{partition}.patch.dat"), '
- 'ui_print("{partition} recovered successfully."), '
- 'abort("E{code}: {partition} partition fails to recover"));\n'
- 'endif;').format(device=self.device, ranges=ranges_str,
- partition=partition, code=code))
-
- # Abort the OTA update. Note that the incremental OTA cannot be applied
- # even if it may match the checksum of the target partition.
- # a) If version < 3, operations like move and erase will make changes
- # unconditionally and damage the partition.
- # b) If version >= 3, it won't even reach here.
- else:
- if partition == "system":
- code = ErrorCode.SYSTEM_VERIFICATION_FAILURE
- else:
- code = ErrorCode.VENDOR_VERIFICATION_FAILURE
- script.AppendExtra((
- 'abort("E%d: %s partition has unexpected contents");\n'
- 'endif;') % (code, partition))
-
- def WritePostInstallVerifyScript(self, script):
- partition = self.partition
- script.Print('Verifying the updated %s image...' % (partition,))
- # Unlike pre-install verification, clobbered_blocks should not be ignored.
- ranges = self.tgt.care_map
- ranges_str = ranges.to_string_raw()
- script.AppendExtra(
- 'if range_sha1(%s, "%s") == "%s" then' % (
- self.device, ranges_str,
- self.tgt.TotalSha1(include_clobbered_blocks=True)))
-
- # Bug: 20881595
- # Verify that extended blocks are really zeroed out.
- if self.tgt.extended:
- ranges_str = self.tgt.extended.to_string_raw()
- script.AppendExtra(
- 'if range_sha1(%s, "%s") == "%s" then' % (
- self.device, ranges_str,
- self._HashZeroBlocks(self.tgt.extended.size())))
- script.Print('Verified the updated %s image.' % (partition,))
- if partition == "system":
- code = ErrorCode.SYSTEM_NONZERO_CONTENTS
- else:
- code = ErrorCode.VENDOR_NONZERO_CONTENTS
- script.AppendExtra(
- 'else\n'
- ' abort("E%d: %s partition has unexpected non-zero contents after '
- 'OTA update");\n'
- 'endif;' % (code, partition))
- else:
- script.Print('Verified the updated %s image.' % (partition,))
-
- if partition == "system":
- code = ErrorCode.SYSTEM_UNEXPECTED_CONTENTS
- else:
- code = ErrorCode.VENDOR_UNEXPECTED_CONTENTS
-
- script.AppendExtra(
- 'else\n'
- ' abort("E%d: %s partition has unexpected contents after OTA '
- 'update");\n'
- 'endif;' % (code, partition))
-
- def _WriteUpdate(self, script, output_zip):
- ZipWrite(output_zip,
- '{}.transfer.list'.format(self.path),
- '{}.transfer.list'.format(self.partition))
-
- # For full OTA, compress the new.dat with brotli with quality 6 to reduce
- # its size. Quailty 9 almost triples the compression time but doesn't
- # further reduce the size too much. For a typical 1.8G system.new.dat
- # zip | brotli(quality 6) | brotli(quality 9)
- # compressed_size: 942M | 869M (~8% reduced) | 854M
- # compression_time: 75s | 265s | 719s
- # decompression_time: 15s | 25s | 25s
-
- if not self.src:
- brotli_cmd = ['brotli', '--quality=6',
- '--output={}.new.dat.br'.format(self.path),
- '{}.new.dat'.format(self.path)]
- print("Compressing {}.new.dat with brotli".format(self.partition))
- RunAndCheckOutput(brotli_cmd)
-
- new_data_name = '{}.new.dat.br'.format(self.partition)
- ZipWrite(output_zip,
- '{}.new.dat.br'.format(self.path),
- new_data_name,
- compress_type=zipfile.ZIP_STORED)
- else:
- new_data_name = '{}.new.dat'.format(self.partition)
- ZipWrite(output_zip, '{}.new.dat'.format(self.path), new_data_name)
-
- ZipWrite(output_zip,
- '{}.patch.dat'.format(self.path),
- '{}.patch.dat'.format(self.partition),
- compress_type=zipfile.ZIP_STORED)
-
- if self.partition == "system":
- code = ErrorCode.SYSTEM_UPDATE_FAILURE
- else:
- code = ErrorCode.VENDOR_UPDATE_FAILURE
-
- call = ('block_image_update({device}, '
- 'package_extract_file("{partition}.transfer.list"), '
- '"{new_data_name}", "{partition}.patch.dat") ||\n'
- ' abort("E{code}: Failed to update {partition} image.");'.format(
- device=self.device, partition=self.partition,
- new_data_name=new_data_name, code=code))
- script.AppendExtra(script.WordWrap(call))
-
- def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
- data = source.ReadRangeSet(ranges)
- ctx = sha1()
-
- for p in data:
- ctx.update(p)
-
- return ctx.hexdigest()
-
- def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
- """Return the hash value for all zero blocks."""
- zero_block = '\x00' * 4096
- ctx = sha1()
- for _ in range(num_blocks):
- ctx.update(zero_block)
-
- return ctx.hexdigest()
-
-
# Expose these two classes to support vendor-specific scripts
DataImage = images.DataImage
EmptyImage = images.EmptyImage
-# map recovery.fstab's fs_types to mount/format "partition types"
-PARTITION_TYPES = {
- "ext4": "EMMC",
- "emmc": "EMMC",
- "f2fs": "EMMC",
- "squashfs": "EMMC",
- "erofs": "EMMC"
-}
-
-
-def GetTypeAndDevice(mount_point, info, check_no_slot=True):
- """
- Use GetTypeAndDeviceExpr whenever possible. This function is kept for
- backwards compatibility. It aborts if the fstab entry has slotselect option
- (unless check_no_slot is explicitly set to False).
- """
- fstab = info["fstab"]
- if fstab:
- if check_no_slot:
- assert not fstab[mount_point].slotselect, \
- "Use GetTypeAndDeviceExpr instead"
- return (PARTITION_TYPES[fstab[mount_point].fs_type],
- fstab[mount_point].device)
- raise KeyError
-
-
-def GetTypeAndDeviceExpr(mount_point, info):
- """
- Return the filesystem of the partition, and an edify expression that evaluates
- to the device at runtime.
- """
- fstab = info["fstab"]
- if fstab:
- p = fstab[mount_point]
- device_expr = '"%s"' % fstab[mount_point].device
- if p.slotselect:
- device_expr = 'add_slot_suffix(%s)' % device_expr
- return (PARTITION_TYPES[fstab[mount_point].fs_type], device_expr)
- raise KeyError
-
def GetEntryForDevice(fstab, device):
"""
@@ -3789,349 +3213,6 @@
return output
-def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
- info_dict=None):
- """Generates the recovery-from-boot patch and writes the script to output.
-
- Most of the space in the boot and recovery images is just the kernel, which is
- identical for the two, so the resulting patch should be efficient. Add it to
- the output zip, along with a shell script that is run from init.rc on first
- boot to actually do the patching and install the new recovery image.
-
- Args:
- input_dir: The top-level input directory of the target-files.zip.
- output_sink: The callback function that writes the result.
- recovery_img: File object for the recovery image.
- boot_img: File objects for the boot image.
- info_dict: A dict returned by common.LoadInfoDict() on the input
- target_files. Will use OPTIONS.info_dict if None has been given.
- """
- if info_dict is None:
- info_dict = OPTIONS.info_dict
-
- full_recovery_image = info_dict.get("full_recovery_image") == "true"
- board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
-
- if board_uses_vendorimage:
- # In this case, the output sink is rooted at VENDOR
- recovery_img_path = "etc/recovery.img"
- recovery_resource_dat_path = "VENDOR/etc/recovery-resource.dat"
- sh_dir = "bin"
- else:
- # In this case the output sink is rooted at SYSTEM
- recovery_img_path = "vendor/etc/recovery.img"
- recovery_resource_dat_path = "SYSTEM/vendor/etc/recovery-resource.dat"
- sh_dir = "vendor/bin"
-
- if full_recovery_image:
- output_sink(recovery_img_path, recovery_img.data)
-
- else:
- system_root_image = info_dict.get("system_root_image") == "true"
- include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
- include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
- path = os.path.join(input_dir, recovery_resource_dat_path)
- # With system-root-image, boot and recovery images will have mismatching
- # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
- # to handle such a case.
- if system_root_image or include_recovery_dtbo or include_recovery_acpio:
- diff_program = ["bsdiff"]
- bonus_args = ""
- assert not os.path.exists(path)
- else:
- diff_program = ["imgdiff"]
- if os.path.exists(path):
- diff_program.append("-b")
- diff_program.append(path)
- bonus_args = "--bonus /vendor/etc/recovery-resource.dat"
- else:
- bonus_args = ""
-
- d = Difference(recovery_img, boot_img, diff_program=diff_program)
- _, _, patch = d.ComputePatch()
- output_sink("recovery-from-boot.p", patch)
-
- try:
- # The following GetTypeAndDevice()s need to use the path in the target
- # info_dict instead of source_info_dict.
- boot_type, boot_device = GetTypeAndDevice("/boot", info_dict,
- check_no_slot=False)
- recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict,
- check_no_slot=False)
- except KeyError:
- return
-
- if full_recovery_image:
-
- # Note that we use /vendor to refer to the recovery resources. This will
- # work for a separate vendor partition mounted at /vendor or a
- # /system/vendor subdirectory on the system partition, for which init will
- # create a symlink from /vendor to /system/vendor.
-
- sh = """#!/vendor/bin/sh
-if ! applypatch --check %(type)s:%(device)s:%(size)d:%(sha1)s; then
- applypatch \\
- --flash /vendor/etc/recovery.img \\
- --target %(type)s:%(device)s:%(size)d:%(sha1)s && \\
- log -t recovery "Installing new recovery image: succeeded" || \\
- log -t recovery "Installing new recovery image: failed"
-else
- log -t recovery "Recovery image already installed"
-fi
-""" % {'type': recovery_type,
- 'device': recovery_device,
- 'sha1': recovery_img.sha1,
- 'size': recovery_img.size}
- else:
- sh = """#!/vendor/bin/sh
-if ! applypatch --check %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
- applypatch %(bonus_args)s \\
- --patch /vendor/recovery-from-boot.p \\
- --source %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s \\
- --target %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s && \\
- log -t recovery "Installing new recovery image: succeeded" || \\
- log -t recovery "Installing new recovery image: failed"
-else
- log -t recovery "Recovery image already installed"
-fi
-""" % {'boot_size': boot_img.size,
- 'boot_sha1': boot_img.sha1,
- 'recovery_size': recovery_img.size,
- 'recovery_sha1': recovery_img.sha1,
- 'boot_type': boot_type,
- 'boot_device': boot_device + '$(getprop ro.boot.slot_suffix)',
- 'recovery_type': recovery_type,
- 'recovery_device': recovery_device + '$(getprop ro.boot.slot_suffix)',
- 'bonus_args': bonus_args}
-
- # The install script location moved from /system/etc to /system/bin in the L
- # release. In the R release it is in VENDOR/bin or SYSTEM/vendor/bin.
- sh_location = os.path.join(sh_dir, "install-recovery.sh")
-
- logger.info("putting script in %s", sh_location)
-
- output_sink(sh_location, sh.encode())
-
-
-class DynamicPartitionUpdate(object):
- def __init__(self, src_group=None, tgt_group=None, progress=None,
- block_difference=None):
- self.src_group = src_group
- self.tgt_group = tgt_group
- self.progress = progress
- self.block_difference = block_difference
-
- @property
- def src_size(self):
- if not self.block_difference:
- return 0
- return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.src)
-
- @property
- def tgt_size(self):
- if not self.block_difference:
- return 0
- return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.tgt)
-
- @staticmethod
- def _GetSparseImageSize(img):
- if not img:
- return 0
- return img.blocksize * img.total_blocks
-
-
-class DynamicGroupUpdate(object):
- def __init__(self, src_size=None, tgt_size=None):
- # None: group does not exist. 0: no size limits.
- self.src_size = src_size
- self.tgt_size = tgt_size
-
-
-class DynamicPartitionsDifference(object):
- def __init__(self, info_dict, block_diffs, progress_dict=None,
- source_info_dict=None):
- if progress_dict is None:
- progress_dict = {}
-
- self._remove_all_before_apply = False
- if source_info_dict is None:
- self._remove_all_before_apply = True
- source_info_dict = {}
-
- block_diff_dict = collections.OrderedDict(
- [(e.partition, e) for e in block_diffs])
-
- assert len(block_diff_dict) == len(block_diffs), \
- "Duplicated BlockDifference object for {}".format(
- [partition for partition, count in
- collections.Counter(e.partition for e in block_diffs).items()
- if count > 1])
-
- self._partition_updates = collections.OrderedDict()
-
- for p, block_diff in block_diff_dict.items():
- self._partition_updates[p] = DynamicPartitionUpdate()
- self._partition_updates[p].block_difference = block_diff
-
- for p, progress in progress_dict.items():
- if p in self._partition_updates:
- self._partition_updates[p].progress = progress
-
- tgt_groups = shlex.split(info_dict.get(
- "super_partition_groups", "").strip())
- src_groups = shlex.split(source_info_dict.get(
- "super_partition_groups", "").strip())
-
- for g in tgt_groups:
- for p in shlex.split(info_dict.get(
- "super_%s_partition_list" % g, "").strip()):
- assert p in self._partition_updates, \
- "{} is in target super_{}_partition_list but no BlockDifference " \
- "object is provided.".format(p, g)
- self._partition_updates[p].tgt_group = g
-
- for g in src_groups:
- for p in shlex.split(source_info_dict.get(
- "super_%s_partition_list" % g, "").strip()):
- assert p in self._partition_updates, \
- "{} is in source super_{}_partition_list but no BlockDifference " \
- "object is provided.".format(p, g)
- self._partition_updates[p].src_group = g
-
- target_dynamic_partitions = set(shlex.split(info_dict.get(
- "dynamic_partition_list", "").strip()))
- block_diffs_with_target = set(p for p, u in self._partition_updates.items()
- if u.tgt_size)
- assert block_diffs_with_target == target_dynamic_partitions, \
- "Target Dynamic partitions: {}, BlockDifference with target: {}".format(
- list(target_dynamic_partitions), list(block_diffs_with_target))
-
- source_dynamic_partitions = set(shlex.split(source_info_dict.get(
- "dynamic_partition_list", "").strip()))
- block_diffs_with_source = set(p for p, u in self._partition_updates.items()
- if u.src_size)
- assert block_diffs_with_source == source_dynamic_partitions, \
- "Source Dynamic partitions: {}, BlockDifference with source: {}".format(
- list(source_dynamic_partitions), list(block_diffs_with_source))
-
- if self._partition_updates:
- logger.info("Updating dynamic partitions %s",
- self._partition_updates.keys())
-
- self._group_updates = collections.OrderedDict()
-
- for g in tgt_groups:
- self._group_updates[g] = DynamicGroupUpdate()
- self._group_updates[g].tgt_size = int(info_dict.get(
- "super_%s_group_size" % g, "0").strip())
-
- for g in src_groups:
- if g not in self._group_updates:
- self._group_updates[g] = DynamicGroupUpdate()
- self._group_updates[g].src_size = int(source_info_dict.get(
- "super_%s_group_size" % g, "0").strip())
-
- self._Compute()
-
- def WriteScript(self, script, output_zip, write_verify_script=False):
- script.Comment('--- Start patching dynamic partitions ---')
- for p, u in self._partition_updates.items():
- if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
- script.Comment('Patch partition %s' % p)
- u.block_difference.WriteScript(script, output_zip, progress=u.progress,
- write_verify_script=False)
-
- op_list_path = MakeTempFile()
- with open(op_list_path, 'w') as f:
- for line in self._op_list:
- f.write('{}\n'.format(line))
-
- ZipWrite(output_zip, op_list_path, "dynamic_partitions_op_list")
-
- script.Comment('Update dynamic partition metadata')
- script.AppendExtra('assert(update_dynamic_partitions('
- 'package_extract_file("dynamic_partitions_op_list")));')
-
- if write_verify_script:
- for p, u in self._partition_updates.items():
- if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
- u.block_difference.WritePostInstallVerifyScript(script)
- script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
-
- for p, u in self._partition_updates.items():
- if u.tgt_size and u.src_size <= u.tgt_size:
- script.Comment('Patch partition %s' % p)
- u.block_difference.WriteScript(script, output_zip, progress=u.progress,
- write_verify_script=write_verify_script)
- if write_verify_script:
- script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
-
- script.Comment('--- End patching dynamic partitions ---')
-
- def _Compute(self):
- self._op_list = list()
-
- def append(line):
- self._op_list.append(line)
-
- def comment(line):
- self._op_list.append("# %s" % line)
-
- if self._remove_all_before_apply:
- comment('Remove all existing dynamic partitions and groups before '
- 'applying full OTA')
- append('remove_all_groups')
-
- for p, u in self._partition_updates.items():
- if u.src_group and not u.tgt_group:
- append('remove %s' % p)
-
- for p, u in self._partition_updates.items():
- if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
- comment('Move partition %s from %s to default' % (p, u.src_group))
- append('move %s default' % p)
-
- for p, u in self._partition_updates.items():
- if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
- comment('Shrink partition %s from %d to %d' %
- (p, u.src_size, u.tgt_size))
- append('resize %s %s' % (p, u.tgt_size))
-
- for g, u in self._group_updates.items():
- if u.src_size is not None and u.tgt_size is None:
- append('remove_group %s' % g)
- if (u.src_size is not None and u.tgt_size is not None and
- u.src_size > u.tgt_size):
- comment('Shrink group %s from %d to %d' % (g, u.src_size, u.tgt_size))
- append('resize_group %s %d' % (g, u.tgt_size))
-
- for g, u in self._group_updates.items():
- if u.src_size is None and u.tgt_size is not None:
- comment('Add group %s with maximum size %d' % (g, u.tgt_size))
- append('add_group %s %d' % (g, u.tgt_size))
- if (u.src_size is not None and u.tgt_size is not None and
- u.src_size < u.tgt_size):
- comment('Grow group %s from %d to %d' % (g, u.src_size, u.tgt_size))
- append('resize_group %s %d' % (g, u.tgt_size))
-
- for p, u in self._partition_updates.items():
- if u.tgt_group and not u.src_group:
- comment('Add partition %s to group %s' % (p, u.tgt_group))
- append('add %s %s' % (p, u.tgt_group))
-
- for p, u in self._partition_updates.items():
- if u.tgt_size and u.src_size < u.tgt_size:
- comment('Grow partition %s from %d to %d' %
- (p, u.src_size, u.tgt_size))
- append('resize %s %d' % (p, u.tgt_size))
-
- for p, u in self._partition_updates.items():
- if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
- comment('Move partition %s from default to %s' %
- (p, u.tgt_group))
- append('move %s %s' % (p, u.tgt_group))
-
-
def GetBootImageBuildProp(boot_img, ramdisk_format=RamdiskFormat.LZ4):
"""
Get build.prop from ramdisk within the boot image
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 033c02e..0a7653c 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -16,6 +16,45 @@
import common
+# map recovery.fstab's fs_types to mount/format "partition types"
+PARTITION_TYPES = {
+ "ext4": "EMMC",
+ "emmc": "EMMC",
+ "f2fs": "EMMC",
+ "squashfs": "EMMC",
+ "erofs": "EMMC"
+}
+
+
+class ErrorCode(object):
+ """Define error_codes for failures that happen during the actual
+ update package installation.
+
+ Error codes 0-999 are reserved for failures before the package
+ installation (i.e. low battery, package verification failure).
+ Detailed code in 'bootable/recovery/error_code.h' """
+
+ SYSTEM_VERIFICATION_FAILURE = 1000
+ SYSTEM_UPDATE_FAILURE = 1001
+ SYSTEM_UNEXPECTED_CONTENTS = 1002
+ SYSTEM_NONZERO_CONTENTS = 1003
+ SYSTEM_RECOVER_FAILURE = 1004
+ VENDOR_VERIFICATION_FAILURE = 2000
+ VENDOR_UPDATE_FAILURE = 2001
+ VENDOR_UNEXPECTED_CONTENTS = 2002
+ VENDOR_NONZERO_CONTENTS = 2003
+ VENDOR_RECOVER_FAILURE = 2004
+ OEM_PROP_MISMATCH = 3000
+ FINGERPRINT_MISMATCH = 3001
+ THUMBPRINT_MISMATCH = 3002
+ OLDER_BUILD = 3003
+ DEVICE_MISMATCH = 3004
+ BAD_PATCH_FILE = 3005
+ INSUFFICIENT_CACHE_SPACE = 3006
+ TUNE_PARTITION_FAILURE = 3007
+ APPLY_PATCH_FAILURE = 3008
+
+
class EdifyGenerator(object):
"""Class to generate scripts in the 'edify' recovery script language
used from donut onwards."""
@@ -88,7 +127,7 @@
'abort("E{code}: This package expects the value \\"{values}\\" for '
'\\"{name}\\"; this has value \\"" + '
'{get_prop_command} + "\\".");').format(
- code=common.ErrorCode.OEM_PROP_MISMATCH,
+ code=ErrorCode.OEM_PROP_MISMATCH,
get_prop_command=get_prop_command, name=name,
values='\\" or \\"'.join(values))
self.script.append(cmd)
@@ -101,7 +140,7 @@
for i in fp]) +
' ||\n abort("E%d: Package expects build fingerprint of %s; '
'this device has " + getprop("ro.build.fingerprint") + ".");') % (
- common.ErrorCode.FINGERPRINT_MISMATCH, " or ".join(fp))
+ ErrorCode.FINGERPRINT_MISMATCH, " or ".join(fp))
self.script.append(cmd)
def AssertSomeThumbprint(self, *fp):
@@ -112,7 +151,7 @@
for i in fp]) +
' ||\n abort("E%d: Package expects build thumbprint of %s; this '
'device has " + getprop("ro.build.thumbprint") + ".");') % (
- common.ErrorCode.THUMBPRINT_MISMATCH, " or ".join(fp))
+ ErrorCode.THUMBPRINT_MISMATCH, " or ".join(fp))
self.script.append(cmd)
def AssertFingerprintOrThumbprint(self, fp, tp):
@@ -133,14 +172,14 @@
('(!less_than_int(%s, getprop("ro.build.date.utc"))) || '
'abort("E%d: Can\'t install this package (%s) over newer '
'build (" + getprop("ro.build.date") + ").");') % (
- timestamp, common.ErrorCode.OLDER_BUILD, timestamp_text))
+ timestamp, ErrorCode.OLDER_BUILD, timestamp_text))
def AssertDevice(self, device):
"""Assert that the device identifier is the given string."""
cmd = ('getprop("ro.product.device") == "%s" || '
'abort("E%d: This package is for \\"%s\\" devices; '
'this is a \\"" + getprop("ro.product.device") + "\\".");') % (
- device, common.ErrorCode.DEVICE_MISMATCH, device)
+ device, ErrorCode.DEVICE_MISMATCH, device)
self.script.append(cmd)
def AssertSomeBootloader(self, *bootloaders):
@@ -207,7 +246,7 @@
'unexpected contents."));').format(
target=target_expr,
source=source_expr,
- code=common.ErrorCode.BAD_PATCH_FILE)))
+ code=ErrorCode.BAD_PATCH_FILE)))
def CacheFreeSpaceCheck(self, amount):
"""Check that there's at least 'amount' space that can be made
@@ -216,7 +255,7 @@
self.script.append(('apply_patch_space(%d) || abort("E%d: Not enough free '
'space on /cache to apply patches.");') % (
amount,
- common.ErrorCode.INSUFFICIENT_CACHE_SPACE))
+ ErrorCode.INSUFFICIENT_CACHE_SPACE))
def Mount(self, mount_point, mount_options_by_format=""):
"""Mount the partition with the given mount_point.
@@ -238,7 +277,7 @@
if p.context is not None:
mount_flags = p.context + ("," + mount_flags if mount_flags else "")
self.script.append('mount("%s", "%s", %s, "%s", "%s");' % (
- p.fs_type, common.PARTITION_TYPES[p.fs_type],
+ p.fs_type, PARTITION_TYPES[p.fs_type],
self._GetSlotSuffixDeviceForEntry(p),
p.mount_point, mount_flags))
self.mounts.add(p.mount_point)
@@ -264,7 +303,7 @@
'tune2fs(' + "".join(['"%s", ' % (i,) for i in options]) +
'%s) || abort("E%d: Failed to tune partition %s");' % (
self._GetSlotSuffixDeviceForEntry(p),
- common.ErrorCode.TUNE_PARTITION_FAILURE, partition))
+ ErrorCode.TUNE_PARTITION_FAILURE, partition))
def FormatPartition(self, partition):
"""Format the given partition, specified by its mount point (eg,
@@ -274,7 +313,7 @@
if fstab:
p = fstab[partition]
self.script.append('format("%s", "%s", %s, "%s", "%s");' %
- (p.fs_type, common.PARTITION_TYPES[p.fs_type],
+ (p.fs_type, PARTITION_TYPES[p.fs_type],
self._GetSlotSuffixDeviceForEntry(p),
p.length, p.mount_point))
@@ -354,7 +393,7 @@
target=target_expr,
source=source_expr,
patch=patch_expr,
- code=common.ErrorCode.APPLY_PATCH_FAILURE)))
+ code=ErrorCode.APPLY_PATCH_FAILURE)))
def _GetSlotSuffixDeviceForEntry(self, entry=None):
"""
@@ -388,7 +427,7 @@
fstab = self.fstab
if fstab:
p = fstab[mount_point]
- partition_type = common.PARTITION_TYPES[p.fs_type]
+ partition_type = PARTITION_TYPES[p.fs_type]
device = self._GetSlotSuffixDeviceForEntry(p)
args = {'device': device, 'fn': fn}
if partition_type == "EMMC":
diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py
index 1497d69..397bf23 100644
--- a/tools/releasetools/make_recovery_patch.py
+++ b/tools/releasetools/make_recovery_patch.py
@@ -21,6 +21,7 @@
import sys
import common
+from non_ab_ota import MakeRecoveryPatch
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -60,7 +61,7 @@
*fn.split("/")), "wb") as f:
f.write(data)
- common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
+ MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
if __name__ == '__main__':
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 667891c..80c3083 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -13,17 +13,25 @@
# limitations under the License.
import collections
+import copy
+import imp
import logging
import os
+import time
+import threading
+import tempfile
import zipfile
+import subprocess
+import shlex
import common
import edify_generator
-import verity_utils
+from edify_generator import ErrorCode, PARTITION_TYPES
from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition
-from common import OPTIONS
+from common import OPTIONS, Run, MakeTempDir, RunAndCheckOutput, ZipWrite, MakeTempFile
from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles
-import subprocess
+from blockimgdiff import BlockImageDiff
+from hashlib import sha1
logger = logging.getLogger(__name__)
@@ -51,10 +59,10 @@
check_first_block = partition_source_info.fs_type == "ext4"
# Disable imgdiff because it relies on zlib to produce stable output
# across different versions, which is often not the case.
- return common.BlockDifference(name, partition_tgt, partition_src,
- check_first_block,
- version=blockimgdiff_version,
- disable_imgdiff=True)
+ return BlockDifference(name, partition_tgt, partition_src,
+ check_first_block,
+ version=blockimgdiff_version,
+ disable_imgdiff=True)
if source_zip:
# See notes in common.GetUserImage()
@@ -76,8 +84,8 @@
tgt = common.GetUserImage(partition, OPTIONS.input_tmp, target_zip,
info_dict=target_info,
reset_file_map=True)
- block_diff_dict[partition] = common.BlockDifference(partition, tgt,
- src=None)
+ block_diff_dict[partition] = BlockDifference(partition, tgt,
+ src=None)
# Incremental OTA update.
else:
block_diff_dict[partition] = GetIncrementalBlockDifferenceForPartition(
@@ -95,7 +103,7 @@
function_name = "FullOTA_GetBlockDifferences"
if device_specific_diffs:
- assert all(isinstance(diff, common.BlockDifference)
+ assert all(isinstance(diff, BlockDifference)
for diff in device_specific_diffs), \
"{} is not returning a list of BlockDifference objects".format(
function_name)
@@ -131,7 +139,7 @@
output_zip = zipfile.ZipFile(
staging_file, "w", compression=zipfile.ZIP_DEFLATED)
- device_specific = common.DeviceSpecificParams(
+ device_specific = DeviceSpecificParams(
input_zip=input_zip,
input_version=target_api_version,
output_zip=output_zip,
@@ -217,7 +225,7 @@
if target_info.get('use_dynamic_partitions') == "true":
# Use empty source_info_dict to indicate that all partitions / groups must
# be re-added.
- dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ dynamic_partitions_diff = DynamicPartitionsDifference(
info_dict=OPTIONS.info_dict,
block_diffs=block_diff_dict.values(),
progress_dict=progress_dict)
@@ -309,7 +317,7 @@
output_zip = zipfile.ZipFile(
staging_file, "w", compression=zipfile.ZIP_DEFLATED)
- device_specific = common.DeviceSpecificParams(
+ device_specific = DeviceSpecificParams(
source_zip=source_zip,
source_version=source_api_version,
source_tmp=OPTIONS.source_tmp,
@@ -404,9 +412,9 @@
required_cache_sizes = [diff.required_cache for diff in
block_diff_dict.values()]
if updating_boot:
- boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot",
- source_info)
- d = common.Difference(target_boot, source_boot, "bsdiff")
+ boot_type, boot_device_expr = GetTypeAndDeviceExpr("/boot",
+ source_info)
+ d = Difference(target_boot, source_boot, "bsdiff")
_, _, d = d.ComputePatch()
if d is None:
include_full_boot = True
@@ -461,7 +469,7 @@
if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true":
raise RuntimeError(
"can't generate incremental that disables dynamic partitions")
- dynamic_partitions_diff = common.DynamicPartitionsDifference(
+ dynamic_partitions_diff = DynamicPartitionsDifference(
info_dict=OPTIONS.target_info_dict,
source_info_dict=OPTIONS.source_info_dict,
block_diffs=block_diff_dict.values(),
@@ -687,3 +695,881 @@
namelist = target_files_zip.namelist()
return patch in namelist or img in namelist
+
+
+class DeviceSpecificParams(object):
+ module = None
+
+ def __init__(self, **kwargs):
+ """Keyword arguments to the constructor become attributes of this
+ object, which is passed to all functions in the device-specific
+ module."""
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+ self.extras = OPTIONS.extras
+
+ if self.module is None:
+ path = OPTIONS.device_specific
+ if not path:
+ return
+ try:
+ if os.path.isdir(path):
+ info = imp.find_module("releasetools", [path])
+ else:
+ d, f = os.path.split(path)
+ b, x = os.path.splitext(f)
+ if x == ".py":
+ f = b
+ info = imp.find_module(f, [d])
+ logger.info("loaded device-specific extensions from %s", path)
+ self.module = imp.load_module("device_specific", *info)
+ except ImportError:
+ logger.info("unable to load device-specific module; assuming none")
+
+ def _DoCall(self, function_name, *args, **kwargs):
+ """Call the named function in the device-specific module, passing
+ the given args and kwargs. The first argument to the call will be
+ the DeviceSpecific object itself. If there is no module, or the
+ module does not define the function, return the value of the
+ 'default' kwarg (which itself defaults to None)."""
+ if self.module is None or not hasattr(self.module, function_name):
+ return kwargs.get("default")
+ return getattr(self.module, function_name)(*((self,) + args), **kwargs)
+
+ def FullOTA_Assertions(self):
+ """Called after emitting the block of assertions at the top of a
+ full OTA package. Implementations can add whatever additional
+ assertions they like."""
+ return self._DoCall("FullOTA_Assertions")
+
+ def FullOTA_InstallBegin(self):
+ """Called at the start of full OTA installation."""
+ return self._DoCall("FullOTA_InstallBegin")
+
+ def FullOTA_GetBlockDifferences(self):
+ """Called during full OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("FullOTA_GetBlockDifferences")
+
+ def FullOTA_InstallEnd(self):
+ """Called at the end of full OTA installation; typically this is
+ used to install the image for the device's baseband processor."""
+ return self._DoCall("FullOTA_InstallEnd")
+
+ def IncrementalOTA_Assertions(self):
+ """Called after emitting the block of assertions at the top of an
+ incremental OTA package. Implementations can add whatever
+ additional assertions they like."""
+ return self._DoCall("IncrementalOTA_Assertions")
+
+ def IncrementalOTA_VerifyBegin(self):
+ """Called at the start of the verification phase of incremental
+ OTA installation; additional checks can be placed here to abort
+ the script before any changes are made."""
+ return self._DoCall("IncrementalOTA_VerifyBegin")
+
+ def IncrementalOTA_VerifyEnd(self):
+ """Called at the end of the verification phase of incremental OTA
+ installation; additional checks can be placed here to abort the
+ script before any changes are made."""
+ return self._DoCall("IncrementalOTA_VerifyEnd")
+
+ def IncrementalOTA_InstallBegin(self):
+ """Called at the start of incremental OTA installation (after
+ verification is complete)."""
+ return self._DoCall("IncrementalOTA_InstallBegin")
+
+ def IncrementalOTA_GetBlockDifferences(self):
+ """Called during incremental OTA installation and verification.
+ Implementation should return a list of BlockDifference objects describing
+ the update on each additional partitions.
+ """
+ return self._DoCall("IncrementalOTA_GetBlockDifferences")
+
+ def IncrementalOTA_InstallEnd(self):
+ """Called at the end of incremental OTA installation; typically
+ this is used to install the image for the device's baseband
+ processor."""
+ return self._DoCall("IncrementalOTA_InstallEnd")
+
+ def VerifyOTA_Assertions(self):
+ return self._DoCall("VerifyOTA_Assertions")
+
+
+DIFF_PROGRAM_BY_EXT = {
+ ".gz": "imgdiff",
+ ".zip": ["imgdiff", "-z"],
+ ".jar": ["imgdiff", "-z"],
+ ".apk": ["imgdiff", "-z"],
+ ".img": "imgdiff",
+}
+
+
+class Difference(object):
+ def __init__(self, tf, sf, diff_program=None):
+ self.tf = tf
+ self.sf = sf
+ self.patch = None
+ self.diff_program = diff_program
+
+ def ComputePatch(self):
+ """Compute the patch (as a string of data) needed to turn sf into
+ tf. Returns the same tuple as GetPatch()."""
+
+ tf = self.tf
+ sf = self.sf
+
+ if self.diff_program:
+ diff_program = self.diff_program
+ else:
+ ext = os.path.splitext(tf.name)[1]
+ diff_program = DIFF_PROGRAM_BY_EXT.get(ext, "bsdiff")
+
+ ttemp = tf.WriteToTemp()
+ stemp = sf.WriteToTemp()
+
+ ext = os.path.splitext(tf.name)[1]
+
+ try:
+ ptemp = tempfile.NamedTemporaryFile()
+ if isinstance(diff_program, list):
+ cmd = copy.copy(diff_program)
+ else:
+ cmd = [diff_program]
+ cmd.append(stemp.name)
+ cmd.append(ttemp.name)
+ cmd.append(ptemp.name)
+ p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ err = []
+
+ def run():
+ _, e = p.communicate()
+ if e:
+ err.append(e)
+ th = threading.Thread(target=run)
+ th.start()
+ th.join(timeout=300) # 5 mins
+ if th.is_alive():
+ logger.warning("diff command timed out")
+ p.terminate()
+ th.join(5)
+ if th.is_alive():
+ p.kill()
+ th.join()
+
+ if p.returncode != 0:
+ logger.warning("Failure running %s:\n%s\n", cmd, "".join(err))
+ self.patch = None
+ return None, None, None
+ diff = ptemp.read()
+ finally:
+ ptemp.close()
+ stemp.close()
+ ttemp.close()
+
+ self.patch = diff
+ return self.tf, self.sf, self.patch
+
+ def GetPatch(self):
+ """Returns a tuple of (target_file, source_file, patch_data).
+
+ patch_data may be None if ComputePatch hasn't been called, or if
+ computing the patch failed.
+ """
+ return self.tf, self.sf, self.patch
+
+
+def ComputeDifferences(diffs):
+ """Call ComputePatch on all the Difference objects in 'diffs'."""
+ logger.info("%d diffs to compute", len(diffs))
+
+ # Do the largest files first, to try and reduce the long-pole effect.
+ by_size = [(i.tf.size, i) for i in diffs]
+ by_size.sort(reverse=True)
+ by_size = [i[1] for i in by_size]
+
+ lock = threading.Lock()
+ diff_iter = iter(by_size) # accessed under lock
+
+ def worker():
+ try:
+ lock.acquire()
+ for d in diff_iter:
+ lock.release()
+ start = time.time()
+ d.ComputePatch()
+ dur = time.time() - start
+ lock.acquire()
+
+ tf, sf, patch = d.GetPatch()
+ if sf.name == tf.name:
+ name = tf.name
+ else:
+ name = "%s (%s)" % (tf.name, sf.name)
+ if patch is None:
+ logger.error("patching failed! %40s", name)
+ else:
+ logger.info(
+ "%8.2f sec %8d / %8d bytes (%6.2f%%) %s", dur, len(patch),
+ tf.size, 100.0 * len(patch) / tf.size, name)
+ lock.release()
+ except Exception:
+ logger.exception("Failed to compute diff from worker")
+ raise
+
+ # start worker threads; wait for them all to finish.
+ threads = [threading.Thread(target=worker)
+ for i in range(OPTIONS.worker_threads)]
+ for th in threads:
+ th.start()
+ while threads:
+ threads.pop().join()
+
+
+class BlockDifference(object):
+ def __init__(self, partition, tgt, src=None, check_first_block=False,
+ version=None, disable_imgdiff=False):
+ self.tgt = tgt
+ self.src = src
+ self.partition = partition
+ self.check_first_block = check_first_block
+ self.disable_imgdiff = disable_imgdiff
+
+ if version is None:
+ version = max(
+ int(i) for i in
+ OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+ assert version >= 3
+ self.version = version
+
+ b = BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
+ version=self.version,
+ disable_imgdiff=self.disable_imgdiff)
+ self.path = os.path.join(MakeTempDir(), partition)
+ b.Compute(self.path)
+ self._required_cache = b.max_stashed_size
+ self.touched_src_ranges = b.touched_src_ranges
+ self.touched_src_sha1 = b.touched_src_sha1
+
+ # On devices with dynamic partitions, for new partitions,
+ # src is None but OPTIONS.source_info_dict is not.
+ if OPTIONS.source_info_dict is None:
+ is_dynamic_build = OPTIONS.info_dict.get(
+ "use_dynamic_partitions") == "true"
+ is_dynamic_source = False
+ else:
+ is_dynamic_build = OPTIONS.source_info_dict.get(
+ "use_dynamic_partitions") == "true"
+ is_dynamic_source = partition in shlex.split(
+ OPTIONS.source_info_dict.get("dynamic_partition_list", "").strip())
+
+ is_dynamic_target = partition in shlex.split(
+ OPTIONS.info_dict.get("dynamic_partition_list", "").strip())
+
+ # For dynamic partitions builds, check partition list in both source
+ # and target build because new partitions may be added, and existing
+ # partitions may be removed.
+ is_dynamic = is_dynamic_build and (is_dynamic_source or is_dynamic_target)
+
+ if is_dynamic:
+ self.device = 'map_partition("%s")' % partition
+ else:
+ if OPTIONS.source_info_dict is None:
+ _, device_expr = GetTypeAndDeviceExpr("/" + partition,
+ OPTIONS.info_dict)
+ else:
+ _, device_expr = GetTypeAndDeviceExpr("/" + partition,
+ OPTIONS.source_info_dict)
+ self.device = device_expr
+
+ @property
+ def required_cache(self):
+ return self._required_cache
+
+ def WriteScript(self, script, output_zip, progress=None,
+ write_verify_script=False):
+ if not self.src:
+ # write the output unconditionally
+ script.Print("Patching %s image unconditionally..." % (self.partition,))
+ else:
+ script.Print("Patching %s image after verification." % (self.partition,))
+
+ if progress:
+ script.ShowProgress(progress, 0)
+ self._WriteUpdate(script, output_zip)
+
+ if write_verify_script:
+ self.WritePostInstallVerifyScript(script)
+
+ def WriteStrictVerifyScript(self, script):
+ """Verify all the blocks in the care_map, including clobbered blocks.
+
+ This differs from the WriteVerifyScript() function: a) it prints different
+ error messages; b) it doesn't allow half-way updated images to pass the
+ verification."""
+
+ partition = self.partition
+ script.Print("Verifying %s..." % (partition,))
+ ranges = self.tgt.care_map
+ ranges_str = ranges.to_string_raw()
+ script.AppendExtra(
+ 'range_sha1(%s, "%s") == "%s" && ui_print(" Verified.") || '
+ 'ui_print("%s has unexpected contents.");' % (
+ self.device, ranges_str,
+ self.tgt.TotalSha1(include_clobbered_blocks=True),
+ self.partition))
+ script.AppendExtra("")
+
+ def WriteVerifyScript(self, script, touched_blocks_only=False):
+ partition = self.partition
+
+ # full OTA
+ if not self.src:
+ script.Print("Image %s will be patched unconditionally." % (partition,))
+
+ # incremental OTA
+ else:
+ if touched_blocks_only:
+ ranges = self.touched_src_ranges
+ expected_sha1 = self.touched_src_sha1
+ else:
+ ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
+ expected_sha1 = self.src.TotalSha1()
+
+ # No blocks to be checked, skipping.
+ if not ranges:
+ return
+
+ ranges_str = ranges.to_string_raw()
+ script.AppendExtra(
+ 'if (range_sha1(%s, "%s") == "%s" || block_image_verify(%s, '
+ 'package_extract_file("%s.transfer.list"), "%s.new.dat", '
+ '"%s.patch.dat")) then' % (
+ self.device, ranges_str, expected_sha1,
+ self.device, partition, partition, partition))
+ script.Print('Verified %s image...' % (partition,))
+ script.AppendExtra('else')
+
+ if self.version >= 4:
+
+ # Bug: 21124327
+ # When generating incrementals for the system and vendor partitions in
+ # version 4 or newer, explicitly check the first block (which contains
+ # the superblock) of the partition to see if it's what we expect. If
+ # this check fails, give an explicit log message about the partition
+ # having been remounted R/W (the most likely explanation).
+ if self.check_first_block:
+ script.AppendExtra('check_first_block(%s);' % (self.device,))
+
+ # If version >= 4, try block recovery before abort update
+ if partition == "system":
+ code = ErrorCode.SYSTEM_RECOVER_FAILURE
+ else:
+ code = ErrorCode.VENDOR_RECOVER_FAILURE
+ script.AppendExtra((
+ 'ifelse (block_image_recover({device}, "{ranges}") && '
+ 'block_image_verify({device}, '
+ 'package_extract_file("{partition}.transfer.list"), '
+ '"{partition}.new.dat", "{partition}.patch.dat"), '
+ 'ui_print("{partition} recovered successfully."), '
+ 'abort("E{code}: {partition} partition fails to recover"));\n'
+ 'endif;').format(device=self.device, ranges=ranges_str,
+ partition=partition, code=code))
+
+ # Abort the OTA update. Note that the incremental OTA cannot be applied
+ # even if it may match the checksum of the target partition.
+ # a) If version < 3, operations like move and erase will make changes
+ # unconditionally and damage the partition.
+ # b) If version >= 3, it won't even reach here.
+ else:
+ if partition == "system":
+ code = ErrorCode.SYSTEM_VERIFICATION_FAILURE
+ else:
+ code = ErrorCode.VENDOR_VERIFICATION_FAILURE
+ script.AppendExtra((
+ 'abort("E%d: %s partition has unexpected contents");\n'
+ 'endif;') % (code, partition))
+
+ def WritePostInstallVerifyScript(self, script):
+ partition = self.partition
+ script.Print('Verifying the updated %s image...' % (partition,))
+ # Unlike pre-install verification, clobbered_blocks should not be ignored.
+ ranges = self.tgt.care_map
+ ranges_str = ranges.to_string_raw()
+ script.AppendExtra(
+ 'if range_sha1(%s, "%s") == "%s" then' % (
+ self.device, ranges_str,
+ self.tgt.TotalSha1(include_clobbered_blocks=True)))
+
+ # Bug: 20881595
+ # Verify that extended blocks are really zeroed out.
+ if self.tgt.extended:
+ ranges_str = self.tgt.extended.to_string_raw()
+ script.AppendExtra(
+ 'if range_sha1(%s, "%s") == "%s" then' % (
+ self.device, ranges_str,
+ self._HashZeroBlocks(self.tgt.extended.size())))
+ script.Print('Verified the updated %s image.' % (partition,))
+ if partition == "system":
+ code = ErrorCode.SYSTEM_NONZERO_CONTENTS
+ else:
+ code = ErrorCode.VENDOR_NONZERO_CONTENTS
+ script.AppendExtra(
+ 'else\n'
+ ' abort("E%d: %s partition has unexpected non-zero contents after '
+ 'OTA update");\n'
+ 'endif;' % (code, partition))
+ else:
+ script.Print('Verified the updated %s image.' % (partition,))
+
+ if partition == "system":
+ code = ErrorCode.SYSTEM_UNEXPECTED_CONTENTS
+ else:
+ code = ErrorCode.VENDOR_UNEXPECTED_CONTENTS
+
+ script.AppendExtra(
+ 'else\n'
+ ' abort("E%d: %s partition has unexpected contents after OTA '
+ 'update");\n'
+ 'endif;' % (code, partition))
+
+ def _WriteUpdate(self, script, output_zip):
+ ZipWrite(output_zip,
+ '{}.transfer.list'.format(self.path),
+ '{}.transfer.list'.format(self.partition))
+
+ # For full OTA, compress the new.dat with brotli with quality 6 to reduce
+ # its size. Quailty 9 almost triples the compression time but doesn't
+ # further reduce the size too much. For a typical 1.8G system.new.dat
+ # zip | brotli(quality 6) | brotli(quality 9)
+ # compressed_size: 942M | 869M (~8% reduced) | 854M
+ # compression_time: 75s | 265s | 719s
+ # decompression_time: 15s | 25s | 25s
+
+ if not self.src:
+ brotli_cmd = ['brotli', '--quality=6',
+ '--output={}.new.dat.br'.format(self.path),
+ '{}.new.dat'.format(self.path)]
+ print("Compressing {}.new.dat with brotli".format(self.partition))
+ RunAndCheckOutput(brotli_cmd)
+
+ new_data_name = '{}.new.dat.br'.format(self.partition)
+ ZipWrite(output_zip,
+ '{}.new.dat.br'.format(self.path),
+ new_data_name,
+ compress_type=zipfile.ZIP_STORED)
+ else:
+ new_data_name = '{}.new.dat'.format(self.partition)
+ ZipWrite(output_zip, '{}.new.dat'.format(self.path), new_data_name)
+
+ ZipWrite(output_zip,
+ '{}.patch.dat'.format(self.path),
+ '{}.patch.dat'.format(self.partition),
+ compress_type=zipfile.ZIP_STORED)
+
+ if self.partition == "system":
+ code = ErrorCode.SYSTEM_UPDATE_FAILURE
+ else:
+ code = ErrorCode.VENDOR_UPDATE_FAILURE
+
+ call = ('block_image_update({device}, '
+ 'package_extract_file("{partition}.transfer.list"), '
+ '"{new_data_name}", "{partition}.patch.dat") ||\n'
+ ' abort("E{code}: Failed to update {partition} image.");'.format(
+ device=self.device, partition=self.partition,
+ new_data_name=new_data_name, code=code))
+ script.AppendExtra(script.WordWrap(call))
+
+ def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
+ data = source.ReadRangeSet(ranges)
+ ctx = sha1()
+
+ for p in data:
+ ctx.update(p)
+
+ return ctx.hexdigest()
+
+ def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use
+ """Return the hash value for all zero blocks."""
+ zero_block = '\x00' * 4096
+ ctx = sha1()
+ for _ in range(num_blocks):
+ ctx.update(zero_block)
+
+ return ctx.hexdigest()
+
+
+def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
+ info_dict=None):
+ """Generates the recovery-from-boot patch and writes the script to output.
+
+ Most of the space in the boot and recovery images is just the kernel, which is
+ identical for the two, so the resulting patch should be efficient. Add it to
+ the output zip, along with a shell script that is run from init.rc on first
+ boot to actually do the patching and install the new recovery image.
+
+ Args:
+ input_dir: The top-level input directory of the target-files.zip.
+ output_sink: The callback function that writes the result.
+ recovery_img: File object for the recovery image.
+ boot_img: File objects for the boot image.
+ info_dict: A dict returned by common.LoadInfoDict() on the input
+ target_files. Will use OPTIONS.info_dict if None has been given.
+ """
+ if info_dict is None:
+ info_dict = OPTIONS.info_dict
+
+ full_recovery_image = info_dict.get("full_recovery_image") == "true"
+ board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
+
+ if board_uses_vendorimage:
+ # In this case, the output sink is rooted at VENDOR
+ recovery_img_path = "etc/recovery.img"
+ recovery_resource_dat_path = "VENDOR/etc/recovery-resource.dat"
+ sh_dir = "bin"
+ else:
+ # In this case the output sink is rooted at SYSTEM
+ recovery_img_path = "vendor/etc/recovery.img"
+ recovery_resource_dat_path = "SYSTEM/vendor/etc/recovery-resource.dat"
+ sh_dir = "vendor/bin"
+
+ if full_recovery_image:
+ output_sink(recovery_img_path, recovery_img.data)
+
+ else:
+ system_root_image = info_dict.get("system_root_image") == "true"
+ include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
+ include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
+ path = os.path.join(input_dir, recovery_resource_dat_path)
+ # With system-root-image, boot and recovery images will have mismatching
+ # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
+ # to handle such a case.
+ if system_root_image or include_recovery_dtbo or include_recovery_acpio:
+ diff_program = ["bsdiff"]
+ bonus_args = ""
+ assert not os.path.exists(path)
+ else:
+ diff_program = ["imgdiff"]
+ if os.path.exists(path):
+ diff_program.append("-b")
+ diff_program.append(path)
+ bonus_args = "--bonus /vendor/etc/recovery-resource.dat"
+ else:
+ bonus_args = ""
+
+ d = Difference(recovery_img, boot_img, diff_program=diff_program)
+ _, _, patch = d.ComputePatch()
+ output_sink("recovery-from-boot.p", patch)
+
+ try:
+ # The following GetTypeAndDevice()s need to use the path in the target
+ # info_dict instead of source_info_dict.
+ boot_type, boot_device = GetTypeAndDevice("/boot", info_dict,
+ check_no_slot=False)
+ recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict,
+ check_no_slot=False)
+ except KeyError:
+ return
+
+ if full_recovery_image:
+
+ # Note that we use /vendor to refer to the recovery resources. This will
+ # work for a separate vendor partition mounted at /vendor or a
+ # /system/vendor subdirectory on the system partition, for which init will
+ # create a symlink from /vendor to /system/vendor.
+
+ sh = """#!/vendor/bin/sh
+if ! applypatch --check %(type)s:%(device)s:%(size)d:%(sha1)s; then
+ applypatch \\
+ --flash /vendor/etc/recovery.img \\
+ --target %(type)s:%(device)s:%(size)d:%(sha1)s && \\
+ log -t recovery "Installing new recovery image: succeeded" || \\
+ log -t recovery "Installing new recovery image: failed"
+else
+ log -t recovery "Recovery image already installed"
+fi
+""" % {'type': recovery_type,
+ 'device': recovery_device,
+ 'sha1': recovery_img.sha1,
+ 'size': recovery_img.size}
+ else:
+ sh = """#!/vendor/bin/sh
+if ! applypatch --check %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
+ applypatch %(bonus_args)s \\
+ --patch /vendor/recovery-from-boot.p \\
+ --source %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s \\
+ --target %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s && \\
+ log -t recovery "Installing new recovery image: succeeded" || \\
+ log -t recovery "Installing new recovery image: failed"
+else
+ log -t recovery "Recovery image already installed"
+fi
+""" % {'boot_size': boot_img.size,
+ 'boot_sha1': boot_img.sha1,
+ 'recovery_size': recovery_img.size,
+ 'recovery_sha1': recovery_img.sha1,
+ 'boot_type': boot_type,
+ 'boot_device': boot_device + '$(getprop ro.boot.slot_suffix)',
+ 'recovery_type': recovery_type,
+ 'recovery_device': recovery_device + '$(getprop ro.boot.slot_suffix)',
+ 'bonus_args': bonus_args}
+
+ # The install script location moved from /system/etc to /system/bin in the L
+ # release. In the R release it is in VENDOR/bin or SYSTEM/vendor/bin.
+ sh_location = os.path.join(sh_dir, "install-recovery.sh")
+
+ logger.info("putting script in %s", sh_location)
+
+ output_sink(sh_location, sh.encode())
+
+
+class DynamicPartitionUpdate(object):
+ def __init__(self, src_group=None, tgt_group=None, progress=None,
+ block_difference=None):
+ self.src_group = src_group
+ self.tgt_group = tgt_group
+ self.progress = progress
+ self.block_difference = block_difference
+
+ @property
+ def src_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.src)
+
+ @property
+ def tgt_size(self):
+ if not self.block_difference:
+ return 0
+ return DynamicPartitionUpdate._GetSparseImageSize(self.block_difference.tgt)
+
+ @staticmethod
+ def _GetSparseImageSize(img):
+ if not img:
+ return 0
+ return img.blocksize * img.total_blocks
+
+
+class DynamicGroupUpdate(object):
+ def __init__(self, src_size=None, tgt_size=None):
+ # None: group does not exist. 0: no size limits.
+ self.src_size = src_size
+ self.tgt_size = tgt_size
+
+
+class DynamicPartitionsDifference(object):
+ def __init__(self, info_dict, block_diffs, progress_dict=None,
+ source_info_dict=None):
+ if progress_dict is None:
+ progress_dict = {}
+
+ self._remove_all_before_apply = False
+ if source_info_dict is None:
+ self._remove_all_before_apply = True
+ source_info_dict = {}
+
+ block_diff_dict = collections.OrderedDict(
+ [(e.partition, e) for e in block_diffs])
+
+ assert len(block_diff_dict) == len(block_diffs), \
+ "Duplicated BlockDifference object for {}".format(
+ [partition for partition, count in
+ collections.Counter(e.partition for e in block_diffs).items()
+ if count > 1])
+
+ self._partition_updates = collections.OrderedDict()
+
+ for p, block_diff in block_diff_dict.items():
+ self._partition_updates[p] = DynamicPartitionUpdate()
+ self._partition_updates[p].block_difference = block_diff
+
+ for p, progress in progress_dict.items():
+ if p in self._partition_updates:
+ self._partition_updates[p].progress = progress
+
+ tgt_groups = shlex.split(info_dict.get(
+ "super_partition_groups", "").strip())
+ src_groups = shlex.split(source_info_dict.get(
+ "super_partition_groups", "").strip())
+
+ for g in tgt_groups:
+ for p in shlex.split(info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in target super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].tgt_group = g
+
+ for g in src_groups:
+ for p in shlex.split(source_info_dict.get(
+ "super_%s_partition_list" % g, "").strip()):
+ assert p in self._partition_updates, \
+ "{} is in source super_{}_partition_list but no BlockDifference " \
+ "object is provided.".format(p, g)
+ self._partition_updates[p].src_group = g
+
+ target_dynamic_partitions = set(shlex.split(info_dict.get(
+ "dynamic_partition_list", "").strip()))
+ block_diffs_with_target = set(p for p, u in self._partition_updates.items()
+ if u.tgt_size)
+ assert block_diffs_with_target == target_dynamic_partitions, \
+ "Target Dynamic partitions: {}, BlockDifference with target: {}".format(
+ list(target_dynamic_partitions), list(block_diffs_with_target))
+
+ source_dynamic_partitions = set(shlex.split(source_info_dict.get(
+ "dynamic_partition_list", "").strip()))
+ block_diffs_with_source = set(p for p, u in self._partition_updates.items()
+ if u.src_size)
+ assert block_diffs_with_source == source_dynamic_partitions, \
+ "Source Dynamic partitions: {}, BlockDifference with source: {}".format(
+ list(source_dynamic_partitions), list(block_diffs_with_source))
+
+ if self._partition_updates:
+ logger.info("Updating dynamic partitions %s",
+ self._partition_updates.keys())
+
+ self._group_updates = collections.OrderedDict()
+
+ for g in tgt_groups:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].tgt_size = int(info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ for g in src_groups:
+ if g not in self._group_updates:
+ self._group_updates[g] = DynamicGroupUpdate()
+ self._group_updates[g].src_size = int(source_info_dict.get(
+ "super_%s_group_size" % g, "0").strip())
+
+ self._Compute()
+
+ def WriteScript(self, script, output_zip, write_verify_script=False):
+ script.Comment('--- Start patching dynamic partitions ---')
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=False)
+
+ op_list_path = MakeTempFile()
+ with open(op_list_path, 'w') as f:
+ for line in self._op_list:
+ f.write('{}\n'.format(line))
+
+ ZipWrite(output_zip, op_list_path, "dynamic_partitions_op_list")
+
+ script.Comment('Update dynamic partition metadata')
+ script.AppendExtra('assert(update_dynamic_partitions('
+ 'package_extract_file("dynamic_partitions_op_list")));')
+
+ if write_verify_script:
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ u.block_difference.WritePostInstallVerifyScript(script)
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size <= u.tgt_size:
+ script.Comment('Patch partition %s' % p)
+ u.block_difference.WriteScript(script, output_zip, progress=u.progress,
+ write_verify_script=write_verify_script)
+ if write_verify_script:
+ script.AppendExtra('unmap_partition("%s");' % p) # ignore errors
+
+ script.Comment('--- End patching dynamic partitions ---')
+
+ def _Compute(self):
+ self._op_list = list()
+
+ def append(line):
+ self._op_list.append(line)
+
+ def comment(line):
+ self._op_list.append("# %s" % line)
+
+ if self._remove_all_before_apply:
+ comment('Remove all existing dynamic partitions and groups before '
+ 'applying full OTA')
+ append('remove_all_groups')
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and not u.tgt_group:
+ append('remove %s' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from %s to default' % (p, u.src_group))
+ append('move %s default' % p)
+
+ for p, u in self._partition_updates.items():
+ if u.src_size and u.tgt_size and u.src_size > u.tgt_size:
+ comment('Shrink partition %s from %d to %d' %
+ (p, u.src_size, u.tgt_size))
+ append('resize %s %s' % (p, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is not None and u.tgt_size is None:
+ append('remove_group %s' % g)
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size > u.tgt_size):
+ comment('Shrink group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for g, u in self._group_updates.items():
+ if u.src_size is None and u.tgt_size is not None:
+ comment('Add group %s with maximum size %d' % (g, u.tgt_size))
+ append('add_group %s %d' % (g, u.tgt_size))
+ if (u.src_size is not None and u.tgt_size is not None and
+ u.src_size < u.tgt_size):
+ comment('Grow group %s from %d to %d' % (g, u.src_size, u.tgt_size))
+ append('resize_group %s %d' % (g, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_group and not u.src_group:
+ comment('Add partition %s to group %s' % (p, u.tgt_group))
+ append('add %s %s' % (p, u.tgt_group))
+
+ for p, u in self._partition_updates.items():
+ if u.tgt_size and u.src_size < u.tgt_size:
+ comment('Grow partition %s from %d to %d' %
+ (p, u.src_size, u.tgt_size))
+ append('resize %s %d' % (p, u.tgt_size))
+
+ for p, u in self._partition_updates.items():
+ if u.src_group and u.tgt_group and u.src_group != u.tgt_group:
+ comment('Move partition %s from default to %s' %
+ (p, u.tgt_group))
+ append('move %s %s' % (p, u.tgt_group))
+
+
+def GetTypeAndDevice(mount_point, info, check_no_slot=True):
+ """
+ Use GetTypeAndDeviceExpr whenever possible. This function is kept for
+ backwards compatibility. It aborts if the fstab entry has slotselect option
+ (unless check_no_slot is explicitly set to False).
+ """
+ fstab = info["fstab"]
+ if fstab:
+ if check_no_slot:
+ assert not fstab[mount_point].slotselect, \
+ "Use GetTypeAndDeviceExpr instead"
+ return (PARTITION_TYPES[fstab[mount_point].fs_type],
+ fstab[mount_point].device)
+ raise KeyError
+
+
+def GetTypeAndDeviceExpr(mount_point, info):
+ """
+ Return the filesystem of the partition, and an edify expression that evaluates
+ to the device at runtime.
+ """
+ fstab = info["fstab"]
+ if fstab:
+ p = fstab[mount_point]
+ device_expr = '"%s"' % fstab[mount_point].device
+ if p.slotselect:
+ device_expr = 'add_slot_suffix(%s)' % device_expr
+ return (PARTITION_TYPES[fstab[mount_point].fs_type], device_expr)
+ raise KeyError
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 7be9876..de0e187 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -995,7 +995,7 @@
metadata = GetPackageMetadata(target_info, source_info)
# Generate payload.
payload = PayloadGenerator(
- wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
+ wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial, spl_downgrade=OPTIONS.spl_downgrade)
partition_timestamps_flags = []
# Enforce a max timestamp this payload can be applied on top of.
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 5c70223..6ca9d64 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -791,7 +791,7 @@
SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
- def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
+ def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False, spl_downgrade=False):
"""Initializes a Payload instance.
Args:
@@ -803,6 +803,7 @@
self.wipe_user_data = wipe_user_data
self.minor_version = minor_version
self.is_partial_update = is_partial_update
+ self.spl_downgrade = spl_downgrade
def _Run(self, cmd): # pylint: disable=no-self-use
# Don't pipe (buffer) the output if verbose is set. Let
@@ -912,13 +913,15 @@
"--properties_file=" + properties_file]
self._Run(cmd)
- if self.secondary:
- with open(properties_file, "a") as f:
- f.write("SWITCH_SLOT_ON_REBOOT=0\n")
- if self.wipe_user_data:
- with open(properties_file, "a") as f:
+ with open(properties_file, "a") as f:
+ if self.wipe_user_data:
f.write("POWERWASH=1\n")
+ if self.secondary:
+ f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+ if self.spl_downgrade:
+ f.write("SPL_DOWNGRADE=1\n")
+
self.payload_properties = properties_file
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 14f0e88..8052821 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -26,7 +26,6 @@
import common
import test_utils
import validate_target_files
-from images import EmptyImage, DataImage
from rangelib import RangeSet
@@ -1671,292 +1670,6 @@
test_file.name, 'generic_kernel')
-class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
- """Checks the format of install-recovery.sh.
-
- Its format should match between common.py and validate_target_files.py.
- """
-
- def setUp(self):
- self._tempdir = common.MakeTempDir()
- # Create a fake dict that contains the fstab info for boot&recovery.
- self._info = {"fstab": {}}
- fake_fstab = [
- "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
- "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
- self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, fake_fstab)
- # Construct the gzipped recovery.img and boot.img
- self.recovery_data = bytearray([
- 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
- 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
- 0x08, 0x00, 0x00, 0x00
- ])
- # echo -n "boot" | gzip -f | hd
- self.boot_data = bytearray([
- 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
- 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
- ])
-
- def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
- loc = os.path.join(self._tempdir, prefix, name)
- if not os.path.exists(os.path.dirname(loc)):
- os.makedirs(os.path.dirname(loc))
- with open(loc, "wb") as f:
- f.write(data)
-
- def test_full_recovery(self):
- recovery_image = common.File("recovery.img", self.recovery_data)
- boot_image = common.File("boot.img", self.boot_data)
- self._info["full_recovery_image"] = "true"
-
- common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
- recovery_image, boot_image, self._info)
- validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
- self._info)
-
- @test_utils.SkipIfExternalToolsUnavailable()
- def test_recovery_from_boot(self):
- recovery_image = common.File("recovery.img", self.recovery_data)
- self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
- boot_image = common.File("boot.img", self.boot_data)
- self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
-
- common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
- recovery_image, boot_image, self._info)
- validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
- self._info)
- # Validate 'recovery-from-boot' with bonus argument.
- self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
- common.MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
- recovery_image, boot_image, self._info)
- validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
- self._info)
-
-
-class MockBlockDifference(object):
-
- def __init__(self, partition, tgt, src=None):
- self.partition = partition
- self.tgt = tgt
- self.src = src
-
- def WriteScript(self, script, _, progress=None,
- write_verify_script=False):
- if progress:
- script.AppendExtra("progress({})".format(progress))
- script.AppendExtra("patch({});".format(self.partition))
- if write_verify_script:
- self.WritePostInstallVerifyScript(script)
-
- def WritePostInstallVerifyScript(self, script):
- script.AppendExtra("verify({});".format(self.partition))
-
-
-class FakeSparseImage(object):
-
- def __init__(self, size):
- self.blocksize = 4096
- self.total_blocks = size // 4096
- assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
-
-
-class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
-
- @staticmethod
- def get_op_list(output_path):
- with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
- with output_zip.open('dynamic_partitions_op_list') as op_list:
- return [line.decode().strip() for line in op_list.readlines()
- if not line.startswith(b'#')]
-
- def setUp(self):
- self.script = test_utils.MockScriptWriter()
- self.output_path = common.MakeTempFile(suffix='.zip')
-
- def test_full(self):
- target_info = common.LoadDictionaryFromLines("""
-dynamic_partition_list=system vendor
-super_partition_groups=group_foo
-super_group_foo_group_size={group_size}
-super_group_foo_partition_list=system vendor
-""".format(group_size=4 * GiB).split("\n"))
- block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
- MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
-
- dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs)
- with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
- dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
-
- self.assertEqual(str(self.script).strip(), """
-assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
-patch(system);
-verify(system);
-unmap_partition("system");
-patch(vendor);
-verify(vendor);
-unmap_partition("vendor");
-""".strip())
-
- lines = self.get_op_list(self.output_path)
-
- remove_all_groups = lines.index("remove_all_groups")
- add_group = lines.index("add_group group_foo 4294967296")
- add_vendor = lines.index("add vendor group_foo")
- add_system = lines.index("add system group_foo")
- resize_vendor = lines.index("resize vendor 1073741824")
- resize_system = lines.index("resize system 3221225472")
-
- self.assertLess(remove_all_groups, add_group,
- "Should add groups after removing all groups")
- self.assertLess(add_group, min(add_vendor, add_system),
- "Should add partitions after adding group")
- self.assertLess(add_system, resize_system,
- "Should resize system after adding it")
- self.assertLess(add_vendor, resize_vendor,
- "Should resize vendor after adding it")
-
- def test_inc_groups(self):
- source_info = common.LoadDictionaryFromLines("""
-super_partition_groups=group_foo group_bar group_baz
-super_group_foo_group_size={group_foo_size}
-super_group_bar_group_size={group_bar_size}
-""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
- target_info = common.LoadDictionaryFromLines("""
-super_partition_groups=group_foo group_baz group_qux
-super_group_foo_group_size={group_foo_size}
-super_group_baz_group_size={group_baz_size}
-super_group_qux_group_size={group_qux_size}
-""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
- group_qux_size=1 * GiB).split("\n"))
-
- dp_diff = common.DynamicPartitionsDifference(target_info,
- block_diffs=[],
- source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
- dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
-
- lines = self.get_op_list(self.output_path)
-
- removed = lines.index("remove_group group_bar")
- shrunk = lines.index("resize_group group_foo 3221225472")
- grown = lines.index("resize_group group_baz 4294967296")
- added = lines.index("add_group group_qux 1073741824")
-
- self.assertLess(max(removed, shrunk),
- min(grown, added),
- "ops that remove / shrink partitions must precede ops that "
- "grow / add partitions")
-
- def test_incremental(self):
- source_info = common.LoadDictionaryFromLines("""
-dynamic_partition_list=system vendor product system_ext
-super_partition_groups=group_foo
-super_group_foo_group_size={group_foo_size}
-super_group_foo_partition_list=system vendor product system_ext
-""".format(group_foo_size=4 * GiB).split("\n"))
- target_info = common.LoadDictionaryFromLines("""
-dynamic_partition_list=system vendor product odm
-super_partition_groups=group_foo group_bar
-super_group_foo_group_size={group_foo_size}
-super_group_foo_partition_list=system vendor odm
-super_group_bar_group_size={group_bar_size}
-super_group_bar_partition_list=product
-""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
-
- block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
- src=FakeSparseImage(1024 * MiB)),
- MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
- src=FakeSparseImage(1024 * MiB)),
- MockBlockDifference("product", FakeSparseImage(1024 * MiB),
- src=FakeSparseImage(1024 * MiB)),
- MockBlockDifference("system_ext", None,
- src=FakeSparseImage(1024 * MiB)),
- MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
- src=None)]
-
- dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
- source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
- dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
-
- metadata_idx = self.script.lines.index(
- 'assert(update_dynamic_partitions(package_extract_file('
- '"dynamic_partitions_op_list")));')
- self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
- self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
- for p in ("product", "system", "odm"):
- patch_idx = self.script.lines.index("patch({});".format(p))
- verify_idx = self.script.lines.index("verify({});".format(p))
- self.assertLess(metadata_idx, patch_idx,
- "Should patch {} after updating metadata".format(p))
- self.assertLess(patch_idx, verify_idx,
- "Should verify {} after patching".format(p))
-
- self.assertNotIn("patch(system_ext);", self.script.lines)
-
- lines = self.get_op_list(self.output_path)
-
- remove = lines.index("remove system_ext")
- move_product_out = lines.index("move product default")
- shrink = lines.index("resize vendor 536870912")
- shrink_group = lines.index("resize_group group_foo 3221225472")
- add_group_bar = lines.index("add_group group_bar 1073741824")
- add_odm = lines.index("add odm group_foo")
- grow_existing = lines.index("resize system 1610612736")
- grow_added = lines.index("resize odm 1073741824")
- move_product_in = lines.index("move product group_bar")
-
- max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
- min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
-
- self.assertLess(max_idx_move_partition_out_foo, shrink_group,
- "Must shrink group after partitions inside group are shrunk"
- " / removed")
-
- self.assertLess(add_group_bar, move_product_in,
- "Must add partitions to group after group is added")
-
- self.assertLess(max_idx_move_partition_out_foo,
- min_idx_move_partition_in_foo,
- "Must shrink partitions / remove partitions from group"
- "before adding / moving partitions into group")
-
- def test_remove_partition(self):
- source_info = common.LoadDictionaryFromLines("""
-blockimgdiff_versions=3,4
-use_dynamic_partitions=true
-dynamic_partition_list=foo
-super_partition_groups=group_foo
-super_group_foo_group_size={group_foo_size}
-super_group_foo_partition_list=foo
-""".format(group_foo_size=4 * GiB).split("\n"))
- target_info = common.LoadDictionaryFromLines("""
-blockimgdiff_versions=3,4
-use_dynamic_partitions=true
-super_partition_groups=group_foo
-super_group_foo_group_size={group_foo_size}
-""".format(group_foo_size=4 * GiB).split("\n"))
-
- common.OPTIONS.info_dict = target_info
- common.OPTIONS.target_info_dict = target_info
- common.OPTIONS.source_info_dict = source_info
- common.OPTIONS.cache_size = 4 * 4096
-
- block_diffs = [common.BlockDifference("foo", EmptyImage(),
- src=DataImage("source", pad=True))]
-
- dp_diff = common.DynamicPartitionsDifference(target_info, block_diffs,
- source_info_dict=source_info)
- with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
- dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
-
- self.assertNotIn("block_image_update", str(self.script),
- "Removed partition should not be patched.")
-
- lines = self.get_op_list(self.output_path)
- self.assertEqual(lines, ["remove foo"])
-
-
class PartitionBuildPropsTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.odm_build_prop = [
diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py
index 5207e2f..7a5ccd3 100644
--- a/tools/releasetools/test_non_ab_ota.py
+++ b/tools/releasetools/test_non_ab_ota.py
@@ -15,19 +15,24 @@
#
import copy
+import os
import zipfile
import common
import test_utils
+import validate_target_files
-from non_ab_ota import NonAbOtaPropertyFiles, WriteFingerprintAssertion
+from images import EmptyImage, DataImage
+from non_ab_ota import NonAbOtaPropertyFiles, WriteFingerprintAssertion, BlockDifference, DynamicPartitionsDifference, MakeRecoveryPatch
from test_utils import PropertyFilesTestCase
class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
"""Additional validity checks specialized for NonAbOtaPropertyFiles."""
+
def setUp(self):
- common.OPTIONS.no_signing = False
+ common.OPTIONS.no_signing = False
+
def test_init(self):
property_files = NonAbOtaPropertyFiles()
self.assertEqual('ota-property-files', property_files.name)
@@ -55,7 +60,8 @@
with zipfile.ZipFile(zip_file) as zip_fp:
raw_metadata = property_files.GetPropertyFilesString(
zip_fp, reserve_space=False)
- property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+ property_files_string = property_files.Finalize(
+ zip_fp, len(raw_metadata))
tokens = self._parse_property_files_string(property_files_string)
self.assertEqual(2, len(tokens))
@@ -77,6 +83,7 @@
property_files.Verify(zip_fp, raw_metadata)
+
class NonAbOTATest(test_utils.ReleaseToolsTestCase):
TEST_TARGET_INFO_DICT = {
'build.prop': common.PartitionBuildProps.FromDictionary(
@@ -98,7 +105,7 @@
),
'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
'vendor', {
- 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
+ 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
),
'property1': 'value1',
'property2': 4096,
@@ -118,6 +125,7 @@
'ro.product.device': 'device3',
},
]
+
def test_WriteFingerprintAssertion_without_oem_props(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
@@ -170,3 +178,296 @@
[('AssertSomeThumbprint', 'build-thumbprint',
'source-build-thumbprint')],
script_writer.lines)
+
+
+KiB = 1024
+MiB = 1024 * KiB
+GiB = 1024 * MiB
+
+
+class MockBlockDifference(object):
+
+ def __init__(self, partition, tgt, src=None):
+ self.partition = partition
+ self.tgt = tgt
+ self.src = src
+
+ def WriteScript(self, script, _, progress=None,
+ write_verify_script=False):
+ if progress:
+ script.AppendExtra("progress({})".format(progress))
+ script.AppendExtra("patch({});".format(self.partition))
+ if write_verify_script:
+ self.WritePostInstallVerifyScript(script)
+
+ def WritePostInstallVerifyScript(self, script):
+ script.AppendExtra("verify({});".format(self.partition))
+
+
+class FakeSparseImage(object):
+
+ def __init__(self, size):
+ self.blocksize = 4096
+ self.total_blocks = size // 4096
+ assert size % 4096 == 0, "{} is not a multiple of 4096".format(size)
+
+
+class DynamicPartitionsDifferenceTest(test_utils.ReleaseToolsTestCase):
+
+ @staticmethod
+ def get_op_list(output_path):
+ with zipfile.ZipFile(output_path, allowZip64=True) as output_zip:
+ with output_zip.open('dynamic_partitions_op_list') as op_list:
+ return [line.decode().strip() for line in op_list.readlines()
+ if not line.startswith(b'#')]
+
+ def setUp(self):
+ self.script = test_utils.MockScriptWriter()
+ self.output_path = common.MakeTempFile(suffix='.zip')
+
+ def test_full(self):
+ target_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor
+super_partition_groups=group_foo
+super_group_foo_group_size={group_size}
+super_group_foo_partition_list=system vendor
+""".format(group_size=4 * GiB).split("\n"))
+ block_diffs = [MockBlockDifference("system", FakeSparseImage(3 * GiB)),
+ MockBlockDifference("vendor", FakeSparseImage(1 * GiB))]
+
+ dp_diff = DynamicPartitionsDifference(target_info, block_diffs)
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ self.assertEqual(str(self.script).strip(), """
+assert(update_dynamic_partitions(package_extract_file("dynamic_partitions_op_list")));
+patch(system);
+verify(system);
+unmap_partition("system");
+patch(vendor);
+verify(vendor);
+unmap_partition("vendor");
+""".strip())
+
+ lines = self.get_op_list(self.output_path)
+
+ remove_all_groups = lines.index("remove_all_groups")
+ add_group = lines.index("add_group group_foo 4294967296")
+ add_vendor = lines.index("add vendor group_foo")
+ add_system = lines.index("add system group_foo")
+ resize_vendor = lines.index("resize vendor 1073741824")
+ resize_system = lines.index("resize system 3221225472")
+
+ self.assertLess(remove_all_groups, add_group,
+ "Should add groups after removing all groups")
+ self.assertLess(add_group, min(add_vendor, add_system),
+ "Should add partitions after adding group")
+ self.assertLess(add_system, resize_system,
+ "Should resize system after adding it")
+ self.assertLess(add_vendor, resize_vendor,
+ "Should resize vendor after adding it")
+
+ def test_inc_groups(self):
+ source_info = common.LoadDictionaryFromLines("""
+super_partition_groups=group_foo group_bar group_baz
+super_group_foo_group_size={group_foo_size}
+super_group_bar_group_size={group_bar_size}
+""".format(group_foo_size=4 * GiB, group_bar_size=3 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+super_partition_groups=group_foo group_baz group_qux
+super_group_foo_group_size={group_foo_size}
+super_group_baz_group_size={group_baz_size}
+super_group_qux_group_size={group_qux_size}
+""".format(group_foo_size=3 * GiB, group_baz_size=4 * GiB,
+ group_qux_size=1 * GiB).split("\n"))
+
+ dp_diff = DynamicPartitionsDifference(target_info,
+ block_diffs=[],
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ lines = self.get_op_list(self.output_path)
+
+ removed = lines.index("remove_group group_bar")
+ shrunk = lines.index("resize_group group_foo 3221225472")
+ grown = lines.index("resize_group group_baz 4294967296")
+ added = lines.index("add_group group_qux 1073741824")
+
+ self.assertLess(max(removed, shrunk),
+ min(grown, added),
+ "ops that remove / shrink partitions must precede ops that "
+ "grow / add partitions")
+
+ def test_incremental(self):
+ source_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor product system_ext
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=system vendor product system_ext
+""".format(group_foo_size=4 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+dynamic_partition_list=system vendor product odm
+super_partition_groups=group_foo group_bar
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=system vendor odm
+super_group_bar_group_size={group_bar_size}
+super_group_bar_partition_list=product
+""".format(group_foo_size=3 * GiB, group_bar_size=1 * GiB).split("\n"))
+
+ block_diffs = [MockBlockDifference("system", FakeSparseImage(1536 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("vendor", FakeSparseImage(512 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("product", FakeSparseImage(1024 * MiB),
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("system_ext", None,
+ src=FakeSparseImage(1024 * MiB)),
+ MockBlockDifference("odm", FakeSparseImage(1024 * MiB),
+ src=None)]
+
+ dp_diff = DynamicPartitionsDifference(target_info, block_diffs,
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ metadata_idx = self.script.lines.index(
+ 'assert(update_dynamic_partitions(package_extract_file('
+ '"dynamic_partitions_op_list")));')
+ self.assertLess(self.script.lines.index('patch(vendor);'), metadata_idx)
+ self.assertLess(metadata_idx, self.script.lines.index('verify(vendor);'))
+ for p in ("product", "system", "odm"):
+ patch_idx = self.script.lines.index("patch({});".format(p))
+ verify_idx = self.script.lines.index("verify({});".format(p))
+ self.assertLess(metadata_idx, patch_idx,
+ "Should patch {} after updating metadata".format(p))
+ self.assertLess(patch_idx, verify_idx,
+ "Should verify {} after patching".format(p))
+
+ self.assertNotIn("patch(system_ext);", self.script.lines)
+
+ lines = self.get_op_list(self.output_path)
+
+ remove = lines.index("remove system_ext")
+ move_product_out = lines.index("move product default")
+ shrink = lines.index("resize vendor 536870912")
+ shrink_group = lines.index("resize_group group_foo 3221225472")
+ add_group_bar = lines.index("add_group group_bar 1073741824")
+ add_odm = lines.index("add odm group_foo")
+ grow_existing = lines.index("resize system 1610612736")
+ grow_added = lines.index("resize odm 1073741824")
+ move_product_in = lines.index("move product group_bar")
+
+ max_idx_move_partition_out_foo = max(remove, move_product_out, shrink)
+ min_idx_move_partition_in_foo = min(add_odm, grow_existing, grow_added)
+
+ self.assertLess(max_idx_move_partition_out_foo, shrink_group,
+ "Must shrink group after partitions inside group are shrunk"
+ " / removed")
+
+ self.assertLess(add_group_bar, move_product_in,
+ "Must add partitions to group after group is added")
+
+ self.assertLess(max_idx_move_partition_out_foo,
+ min_idx_move_partition_in_foo,
+ "Must shrink partitions / remove partitions from group"
+ "before adding / moving partitions into group")
+
+ def test_remove_partition(self):
+ source_info = common.LoadDictionaryFromLines("""
+blockimgdiff_versions=3,4
+use_dynamic_partitions=true
+dynamic_partition_list=foo
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+super_group_foo_partition_list=foo
+""".format(group_foo_size=4 * GiB).split("\n"))
+ target_info = common.LoadDictionaryFromLines("""
+blockimgdiff_versions=3,4
+use_dynamic_partitions=true
+super_partition_groups=group_foo
+super_group_foo_group_size={group_foo_size}
+""".format(group_foo_size=4 * GiB).split("\n"))
+
+ common.OPTIONS.info_dict = target_info
+ common.OPTIONS.target_info_dict = target_info
+ common.OPTIONS.source_info_dict = source_info
+ common.OPTIONS.cache_size = 4 * 4096
+
+ block_diffs = [BlockDifference("foo", EmptyImage(),
+ src=DataImage("source", pad=True))]
+
+ dp_diff = DynamicPartitionsDifference(target_info, block_diffs,
+ source_info_dict=source_info)
+ with zipfile.ZipFile(self.output_path, 'w', allowZip64=True) as output_zip:
+ dp_diff.WriteScript(self.script, output_zip, write_verify_script=True)
+
+ self.assertNotIn("block_image_update", str(self.script),
+ "Removed partition should not be patched.")
+
+ lines = self.get_op_list(self.output_path)
+ self.assertEqual(lines, ["remove foo"])
+
+
+
+class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
+ """Checks the format of install-recovery.sh.
+
+ Its format should match between common.py and validate_target_files.py.
+ """
+
+ def setUp(self):
+ self._tempdir = common.MakeTempDir()
+ # Create a fake dict that contains the fstab info for boot&recovery.
+ self._info = {"fstab": {}}
+ fake_fstab = [
+ "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
+ "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
+ self._info["fstab"] = common.LoadRecoveryFSTab("\n".join, 2, fake_fstab)
+ # Construct the gzipped recovery.img and boot.img
+ self.recovery_data = bytearray([
+ 0x1f, 0x8b, 0x08, 0x00, 0x81, 0x11, 0x02, 0x5a, 0x00, 0x03, 0x2b, 0x4a,
+ 0x4d, 0xce, 0x2f, 0x4b, 0x2d, 0xaa, 0x04, 0x00, 0xc9, 0x93, 0x43, 0xf3,
+ 0x08, 0x00, 0x00, 0x00
+ ])
+ # echo -n "boot" | gzip -f | hd
+ self.boot_data = bytearray([
+ 0x1f, 0x8b, 0x08, 0x00, 0x8c, 0x12, 0x02, 0x5a, 0x00, 0x03, 0x4b, 0xca,
+ 0xcf, 0x2f, 0x01, 0x00, 0xc4, 0xae, 0xed, 0x46, 0x04, 0x00, 0x00, 0x00
+ ])
+
+ def _out_tmp_sink(self, name, data, prefix="SYSTEM"):
+ loc = os.path.join(self._tempdir, prefix, name)
+ if not os.path.exists(os.path.dirname(loc)):
+ os.makedirs(os.path.dirname(loc))
+ with open(loc, "wb") as f:
+ f.write(data)
+
+ def test_full_recovery(self):
+ recovery_image = common.File("recovery.img", self.recovery_data)
+ boot_image = common.File("boot.img", self.boot_data)
+ self._info["full_recovery_image"] = "true"
+
+ MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
+ recovery_image, boot_image, self._info)
+ validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
+ self._info)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_recovery_from_boot(self):
+ recovery_image = common.File("recovery.img", self.recovery_data)
+ self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES")
+ boot_image = common.File("boot.img", self.boot_data)
+ self._out_tmp_sink("boot.img", boot_image.data, "IMAGES")
+
+ MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
+ recovery_image, boot_image, self._info)
+ validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
+ self._info)
+ # Validate 'recovery-from-boot' with bonus argument.
+ self._out_tmp_sink("etc/recovery-resource.dat", b"bonus", "SYSTEM")
+ MakeRecoveryPatch(self._tempdir, self._out_tmp_sink,
+ recovery_image, boot_image, self._info)
+ validate_target_files.ValidateInstallRecoveryScript(self._tempdir,
+ self._info)
+
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index 519251e..2b2b573 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -77,3 +77,18 @@
},
test_suites: ["general-tests"],
}
+
+python_binary_host {
+ name: "generate-sbom-framework_res",
+ srcs: [
+ "generate-sbom-framework_res.py",
+ ],
+ version: {
+ py3: {
+ embedded_launcher: true,
+ },
+ },
+ libs: [
+ "sbom_lib",
+ ],
+}
\ No newline at end of file
diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py
new file mode 100644
index 0000000..e637d53
--- /dev/null
+++ b/tools/sbom/generate-sbom-framework_res.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import hashlib
+import json
+import sbom_data
+import sbom_writers
+
+'''
+This script generates SBOM of framework_res.jar of layoutlib shipped with Android Studio.
+
+The generated SBOM contains some placeholders which should be substituted by release_layoutlib.sh.
+The placeholders include: document name, document namespace, organization, created timestamp and
+the SHA1 checksum of framework_res.jar.
+'''
+
+def get_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Print more information.')
+ parser.add_argument('--output_file', required=True,
+ help='The generated SBOM file in SPDX format.')
+ parser.add_argument('--layoutlib_sbom', required=True,
+ help='The file path of the SBOM of layoutlib.')
+
+ return parser.parse_args()
+
+
+def main():
+ global args
+ args = get_args()
+
+ doc = sbom_data.Document(name='<name>',
+ namespace='<namespace>',
+ creators=['Organization: <organization>'],
+ created='<created>')
+
+ filename = 'data/framework_res.jar'
+ file_id = f'SPDXRef-{sbom_data.encode_for_spdxid(filename)}'
+ file = sbom_data.File(id=file_id, name=filename, checksum='SHA1: <checksum>')
+ doc.files.append(file)
+ doc.describes = file_id
+
+ with open(args.layoutlib_sbom, 'r', encoding='utf-8') as f:
+ layoutlib_sbom = json.load(f)
+
+ with open(args.layoutlib_sbom, 'rb') as f:
+ sha1 = hashlib.file_digest(f, 'sha1')
+
+ layoutlib_sbom_namespace = layoutlib_sbom[sbom_writers.PropNames.DOCUMENT_NAMESPACE]
+ external_doc_ref = 'DocumentRef-layoutlib'
+ doc.external_refs = [
+ sbom_data.DocumentExternalReference(external_doc_ref, layoutlib_sbom_namespace,
+ f'SHA1: {sha1.hexdigest()}')]
+
+ resource_file_spdxids = []
+ for file in layoutlib_sbom[sbom_writers.PropNames.FILES]:
+ if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'):
+ resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID])
+
+ doc.relationships = []
+ for spdxid in resource_file_spdxids:
+ doc.relationships.append(
+ sbom_data.Relationship(file_id, sbom_data.RelationshipType.GENERATED_FROM,
+ f'{external_doc_ref}:{spdxid}'))
+
+ # write sbom file
+ with open(args.output_file, 'w', encoding='utf-8') as f:
+ sbom_writers.JSONWriter.write(doc, f)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 0a8f10a..5eae262 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -130,6 +130,7 @@
parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
parser.add_argument('--build_version', required=True, help='The build version.')
parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+ parser.add_argument('--module_name', help='The module name. If specified, the generated SBOM is for the module.')
parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
parser.add_argument('--unbundled_apk', action='store_true', default=False, help='Generate SBOM for unbundled APKs')
parser.add_argument('--unbundled_apex', action='store_true', default=False, help='Generate SBOM for unbundled APEXs')
@@ -143,26 +144,12 @@
print(i)
-def encode_for_spdxid(s):
- """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
- result = ''
- for c in s:
- if c.isalnum() or c in '.-':
- result += c
- elif c in '_@/':
- result += '-'
- else:
- result += '0x' + c.encode('utf-8').hex()
-
- return result.lstrip('-')
-
-
def new_package_id(package_name, type):
- return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+ return f'SPDXRef-{type}-{sbom_data.encode_for_spdxid(package_name)}'
def new_file_id(file_path):
- return f'SPDXRef-{encode_for_spdxid(file_path)}'
+ return f'SPDXRef-{sbom_data.encode_for_spdxid(file_path)}'
def checksum(file_path):
@@ -497,16 +484,25 @@
global metadata_file_protos
metadata_file_protos = {}
- product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
- name=sbom_data.PACKAGE_NAME_PRODUCT,
+ product_package_id = sbom_data.SPDXID_PRODUCT
+ product_package_name = sbom_data.PACKAGE_NAME_PRODUCT
+ if args.module_name:
+ # Build SBOM of a module so use the module name instead.
+ product_package_id = f'SPDXRef-{sbom_data.encode_for_spdxid(args.module_name)}'
+ product_package_name = args.module_name
+ product_package = sbom_data.Package(id=product_package_id,
+ name=product_package_name,
download_location=sbom_data.VALUE_NONE,
version=args.build_version,
supplier='Organization: ' + args.product_mfr,
files_analyzed=True)
-
- doc = sbom_data.Document(name=args.build_version,
- namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
- creators=['Organization: ' + args.product_mfr])
+ doc_name = args.build_version
+ if args.module_name:
+ doc_name = f'{args.build_version}/{args.module_name}'
+ doc = sbom_data.Document(name=doc_name,
+ namespace=f'https://www.google.com/sbom/spdx/android/{doc_name}',
+ creators=['Organization: ' + args.product_mfr],
+ describes=product_package_id)
if not args.unbundled_apex:
doc.packages.append(product_package)
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index 71f8660..b5ac8a5 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -138,3 +138,16 @@
h = hashlib.sha1()
h.update(''.join(checksums).encode(encoding='utf-8'))
package.verification_code = h.hexdigest()
+
+def encode_for_spdxid(s):
+ """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+ result = ''
+ for c in s:
+ if c.isalnum() or c in '.-':
+ result += c
+ elif c in '_@/':
+ result += '-'
+ else:
+ result += '0x' + c.encode('utf-8').hex()
+
+ return result.lstrip('-')
\ No newline at end of file