Merge "Avoid too much reserved_size for erofs image"
diff --git a/core/Makefile b/core/Makefile
index 63ec1a6..4b2a331 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2430,6 +2430,7 @@
#
# Note: it's intentional to skip signing for boot-debug.img, because it
# can only be used if the device is unlocked with verification error.
+ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
@@ -2488,6 +2489,7 @@
endif # TARGET_NO_KERNEL
endif # INSTALLED_BOOTIMAGE_TARGET
+endif # BUILDING_VENDOR_BOOT_IMAGE is not true
ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
ifeq ($(BUILDING_RAMDISK_IMAGE),true)
@@ -2633,6 +2635,7 @@
#
# Note: it's intentional to skip signing for boot-test-harness.img, because it
# can only be used if the device is unlocked with verification error.
+ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
@@ -2675,6 +2678,7 @@
endif # TARGET_NO_KERNEL
endif # INSTALLED_BOOTIMAGE_TARGET
+endif # BUILDING_VENDOR_BOOT_IMAGE is not true
endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
diff --git a/core/autogen_test_config.mk b/core/autogen_test_config.mk
index 137b118..798dd5f 100644
--- a/core/autogen_test_config.mk
+++ b/core/autogen_test_config.mk
@@ -22,8 +22,7 @@
# autogen_test_config_file: Path to the test config file generated.
autogen_test_config_file := $(dir $(LOCAL_BUILT_MODULE))$(LOCAL_MODULE).config
-# TODO: (b/167308193) Switch to /data/local/tests/unrestricted as the default install base.
-autogen_test_install_base := /data/local/tmp
+autogen_test_install_base := /data/local/tests/unrestricted
# Automatically setup test root for native test.
ifeq (true,$(is_native))
ifeq (true,$(LOCAL_VENDOR_MODULE))
diff --git a/core/native_benchmark_test_config_template.xml b/core/native_benchmark_test_config_template.xml
index d1f0199..8a89241 100644
--- a/core/native_benchmark_test_config_template.xml
+++ b/core/native_benchmark_test_config_template.xml
@@ -22,10 +22,10 @@
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
- <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.GoogleBenchmarkTest" >
- <option name="native-benchmark-device-path" value="/data/local/tmp" />
+ <option name="native-benchmark-device-path" value="/data/local/tests/unrestricted" />
<option name="benchmark-module-name" value="{MODULE}" />
</test>
</configuration>
diff --git a/core/rbe.mk b/core/rbe.mk
index 19c0e42..370d4bd 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -22,6 +22,18 @@
rbe_dir := prebuilts/remoteexecution-client/live/
endif
+ ifdef RBE_CXX_POOL
+ cxx_pool := $(RBE_CXX_POOL)
+ else
+ cxx_pool := default
+ endif
+
+ ifdef RBE_JAVA_POOL
+ java_pool := $(RBE_JAVA_POOL)
+ else
+ java_pool := java16
+ endif
+
ifdef RBE_CXX_EXEC_STRATEGY
cxx_rbe_exec_strategy := $(RBE_CXX_EXEC_STRATEGY)
else
@@ -59,8 +71,8 @@
endif
platform := container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62
- cxx_platform := $(platform),Pool=default
- java_r8_d8_platform := $(platform),Pool=java16
+ cxx_platform := $(platform),Pool=$(cxx_pool)
+ java_r8_d8_platform := $(platform),Pool=$(java_pool)
RBE_WRAPPER := $(rbe_dir)/rewrapper
RBE_CXX := --labels=type=compile,lang=cpp,compiler=clang --env_var_allowlist=PWD --exec_strategy=$(cxx_rbe_exec_strategy) --platform=$(cxx_platform) --compare=$(cxx_compare)
diff --git a/core/rust_device_benchmark_config_template.xml b/core/rust_device_benchmark_config_template.xml
index 2055df2..a117fc4 100644
--- a/core/rust_device_benchmark_config_template.xml
+++ b/core/rust_device_benchmark_config_template.xml
@@ -17,11 +17,11 @@
<configuration description="Config to run {MODULE} rust benchmark tests.">
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="false" />
- <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.rust.RustBinaryTest" >
- <option name="test-device-path" value="/data/local/tmp" />
+ <option name="test-device-path" value="/data/local/tests/unrestricted" />
<option name="module-name" value="{MODULE}" />
<option name="is-benchmark" value="true" />
</test>
diff --git a/core/rust_device_test_config_template.xml b/core/rust_device_test_config_template.xml
index 9429d38..536f57e 100644
--- a/core/rust_device_test_config_template.xml
+++ b/core/rust_device_test_config_template.xml
@@ -17,11 +17,11 @@
<configuration description="Config to run {MODULE} device tests.">
<target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
<option name="cleanup" value="true" />
- <option name="push" value="{MODULE}->/data/local/tmp/{MODULE}" />
+ <option name="push" value="{MODULE}->/data/local/tests/unrestricted/{MODULE}" />
</target_preparer>
<test class="com.android.tradefed.testtype.rust.RustBinaryTest" >
- <option name="test-device-path" value="/data/local/tmp" />
+ <option name="test-device-path" value="/data/local/tests/unrestricted" />
<option name="module-name" value="{MODULE}" />
</test>
</configuration>
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a39707e..9eb02b2 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -165,6 +165,7 @@
$(call add_json_list, VendorSnapshotDirsExcluded, $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
$(call add_json_list, RecoverySnapshotDirsIncluded, $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
$(call add_json_list, RecoverySnapshotDirsExcluded, $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
+$(call add_json_bool, HostFakeSnapshotEnabled, $(HOST_FAKE_SNAPSHOT_ENABLE))
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index fdd9591..966ceb7 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -25,3 +25,57 @@
.PHONY: cts_v2
cts_v2: cts
+
+# platform version check (b/32056228)
+# ============================================================
+ifneq (,$(wildcard cts/))
+ cts_platform_version_path := cts/tests/tests/os/assets/platform_versions.txt
+ cts_platform_version_string := $(shell cat $(cts_platform_version_path))
+ cts_platform_release_path := cts/tests/tests/os/assets/platform_releases.txt
+ cts_platform_release_string := $(shell cat $(cts_platform_release_path))
+
+ ifeq (,$(findstring $(PLATFORM_VERSION),$(cts_platform_version_string)))
+ define error_msg
+ ============================================================
+ Could not find version "$(PLATFORM_VERSION)" in CTS platform version file:
+ $(cts_platform_version_path)
+ Most likely PLATFORM_VERSION in build/core/version_defaults.mk
+ has changed and a new version must be added to this CTS file.
+ ============================================================
+ endef
+ $(error $(error_msg))
+ endif
+ ifeq (,$(findstring $(PLATFORM_VERSION_LAST_STABLE),$(cts_platform_release_string)))
+ define error_msg
+ ============================================================
+ Could not find version "$(PLATFORM_VERSION_LAST_STABLE)" in CTS platform release file:
+ $(cts_platform_release_path)
+ Most likely PLATFORM_VERSION_LAST_STABLE in build/core/version_defaults.mk
+ has changed and a new version must be added to this CTS file.
+ ============================================================
+ endef
+ $(error $(error_msg))
+ endif
+endif
+
+# Creates a "cts-verifier" directory that will contain:
+#
+# 1. Out directory with a "android-cts-verifier" containing the CTS Verifier
+# and other binaries it needs.
+#
+# 2. Zipped version of the android-cts-verifier directory to be included with
+# the build distribution.
+##
+cts-dir := $(HOST_OUT)/cts-verifier
+verifier-dir-name := android-cts-verifier
+verifier-dir := $(cts-dir)/$(verifier-dir-name)
+verifier-zip-name := $(verifier-dir-name).zip
+verifier-zip := $(cts-dir)/$(verifier-zip-name)
+
+cts : $(verifier-zip)
+$(verifier-zip): PRIVATE_DIR := $(cts-dir)
+$(verifier-zip): $(SOONG_ANDROID_CTS_VERIFIER_ZIP)
+ rm -rf $(PRIVATE_DIR)
+ mkdir -p $(PRIVATE_DIR)
+ unzip -q -d $(PRIVATE_DIR) $<
+ $(copy-file-to-target)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index c8fa49f..dbb1def 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,51 +40,10 @@
endif
DEFAULT_PLATFORM_VERSION := TP1A
+.KATI_READONLY := DEFAULT_PLATFORM_VERSION
MIN_PLATFORM_VERSION := TP1A
MAX_PLATFORM_VERSION := TP1A
-ALLOWED_VERSIONS := $(call allowed-platform-versions,\
- $(MIN_PLATFORM_VERSION),\
- $(MAX_PLATFORM_VERSION),\
- $(DEFAULT_PLATFORM_VERSION))
-
-ifndef TARGET_PLATFORM_VERSION
- TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
-endif
-
-ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
- $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
- $(error $(ALLOWED_VERSIONS))
-endif
-ALLOWED_VERSIONS :=
-MIN_PLATFORM_VERSION :=
-MAX_PLATFORM_VERSION :=
-
-.KATI_READONLY := \
- DEFAULT_PLATFORM_VERSION \
- TARGET_PLATFORM_VERSION
-
-# Default versions for each TARGET_PLATFORM_VERSION
-# TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
-# on this
-
-# This is the canonical definition of the platform version,
-# which is the version that we reveal to the end user.
-# Update this value when the platform version changes (rather
-# than overriding it somewhere else). Can be an arbitrary string.
-
-# When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
-# please add that PLATFORM_VERSION as well as clean up obsolete PLATFORM_VERSION's
-# in the following text file:
-# cts/tests/tests/os/assets/platform_versions.txt
-
-# Note that there should be one PLATFORM_VERSION and PLATFORM_VERSION_CODENAME
-# entry for each unreleased API level, regardless of
-# MIN_PLATFORM_VERSION/MAX_PLATFORM_VERSION. PLATFORM_VERSION is used to
-# generate the range of allowed SDK versions, so it must have an entry for every
-# unreleased API level targetable by this branch, not just those that are valid
-# lunch targets for this branch.
-
# The last stable version name of the platform that was released. During
# development, this stays at that previous version, while the codename indicates
# further work based on the previous version.
@@ -96,49 +55,6 @@
PLATFORM_VERSION_CODENAME.SP1A := S
PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
-ifndef PLATFORM_VERSION_CODENAME
- PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
- ifndef PLATFORM_VERSION_CODENAME
- # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
- PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
- endif
-
- # This is all of the *active* development codenames.
- # This confusing name is needed because
- # all_codenames has been baked into build.prop for ages.
- #
- # Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
- # list of additional codenames after PLATFORM_VERSION_CODENAME.
- PLATFORM_VERSION_ALL_CODENAMES :=
-
- # Build a list of all active code names. Avoid duplicates, and stop when we
- # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
- # that is not included in our build).
- _versions_in_target := \
- $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
- $(foreach version,$(_versions_in_target),\
- $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
- $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
- $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
-
- # And convert from space separated to comma separated.
- PLATFORM_VERSION_ALL_CODENAMES := \
- $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
-
-endif
-.KATI_READONLY := \
- PLATFORM_VERSION_CODENAME \
- PLATFORM_VERSION_ALL_CODENAMES
-
-ifndef PLATFORM_VERSION
- ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- PLATFORM_VERSION := $(PLATFORM_VERSION_LAST_STABLE)
- else
- PLATFORM_VERSION := $(PLATFORM_VERSION_CODENAME)
- endif
-endif
-.KATI_READONLY := PLATFORM_VERSION
-
ifndef PLATFORM_SDK_VERSION
# This is the canonical definition of the SDK version, which defines
# the set of APIs and functionality available in the platform. It
@@ -157,89 +73,13 @@
.KATI_READONLY := PLATFORM_SDK_VERSION
# This is the sdk extension version of this tree.
-PLATFORM_SDK_EXTENSION_VERSION :=$= 0
+PLATFORM_SDK_EXTENSION_VERSION := 0
+.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
+
# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION :=$= 0
-
-ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- PLATFORM_PREVIEW_SDK_VERSION := 0
-else
- ifndef PLATFORM_PREVIEW_SDK_VERSION
- # This is the definition of a preview SDK version over and above the current
- # platform SDK version. Unlike the platform SDK version, a higher value
- # for preview SDK version does NOT mean that all prior preview APIs are
- # included. Packages reading this value to determine compatibility with
- # known APIs should check that this value is precisely equal to the preview
- # SDK version the package was built for, otherwise it should fall back to
- # assuming the device can only support APIs as of the previous official
- # public release.
- # This value will always be forced to 0 for release builds by the logic
- # in the "ifeq" block above, so the value below will be used on any
- # non-release builds, and it should always be at least 1, to indicate that
- # APIs may have changed since the claimed PLATFORM_SDK_VERSION.
- PLATFORM_PREVIEW_SDK_VERSION := 1
- endif
-endif
-.KATI_READONLY := PLATFORM_PREVIEW_SDK_VERSION
-
-ifndef DEFAULT_APP_TARGET_SDK
- # This is the default minSdkVersion and targetSdkVersion to use for
- # all .apks created by the build system. It can be overridden by explicitly
- # setting these in the .apk's AndroidManifest.xml. It is either the code
- # name of the development build or, if this is a release build, the official
- # SDK version of this release.
- ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- DEFAULT_APP_TARGET_SDK := $(PLATFORM_SDK_VERSION)
- else
- DEFAULT_APP_TARGET_SDK := $(PLATFORM_VERSION_CODENAME)
- endif
-endif
-.KATI_READONLY := DEFAULT_APP_TARGET_SDK
-
-ifndef PLATFORM_VNDK_VERSION
- # This is the definition of the VNDK version for the current VNDK libraries.
- # The version is only available when PLATFORM_VERSION_CODENAME == REL.
- # Otherwise, it will be set to a CODENAME version. The ABI is allowed to be
- # changed only before the Android version is released. Once
- # PLATFORM_VNDK_VERSION is set to actual version, the ABI for this version
- # will be frozon and emit build errors if any ABI for the VNDK libs are
- # changed.
- # After that the snapshot of the VNDK with this version will be generated.
- #
- # The VNDK version follows PLATFORM_SDK_VERSION.
- ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
- else
- PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
- endif
-endif
-.KATI_READONLY := PLATFORM_VNDK_VERSION
-
-ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
- # This is the oldest version of system SDK that the platform supports. Contrary
- # to the public SDK where platform essentially supports all previous SDK versions,
- # platform supports only a few number of recent system SDK versions as some of
- # old system APIs are gradually deprecated, removed and then deleted.
- PLATFORM_SYSTEMSDK_MIN_VERSION := 28
-endif
-.KATI_READONLY := PLATFORM_SYSTEMSDK_MIN_VERSION
-
-# This is the list of system SDK versions that the current platform supports.
-PLATFORM_SYSTEMSDK_VERSIONS :=
-ifneq (,$(PLATFORM_SYSTEMSDK_MIN_VERSION))
- $(if $(call math_is_number,$(PLATFORM_SYSTEMSDK_MIN_VERSION)),,\
- $(error PLATFORM_SYSTEMSDK_MIN_VERSION must be a number, but was $(PLATFORM_SYSTEMSDK_MIN_VERSION)))
- PLATFORM_SYSTEMSDK_VERSIONS := $(call int_range_list,$(PLATFORM_SYSTEMSDK_MIN_VERSION),$(PLATFORM_SDK_VERSION))
-endif
-# Platform always supports the current version
-ifeq (REL,$(PLATFORM_VERSION_CODENAME))
- PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
-else
- PLATFORM_SYSTEMSDK_VERSIONS += $(subst $(comma),$(space),$(PLATFORM_VERSION_ALL_CODENAMES))
-endif
-PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
-.KATI_READONLY := PLATFORM_SYSTEMSDK_VERSIONS
+PLATFORM_BASE_SDK_EXTENSION_VERSION := 0
+.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
ifndef PLATFORM_SECURITY_PATCH
# Used to indicate the security patch that has been applied to the device.
@@ -251,65 +91,5 @@
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
-ifndef PLATFORM_SECURITY_PATCH_TIMESTAMP
- # Used to indicate the matching timestamp for the security patch string in PLATFORM_SECURITY_PATCH.
- PLATFORM_SECURITY_PATCH_TIMESTAMP := $(shell date -d 'TZ="GMT" $(PLATFORM_SECURITY_PATCH)' +%s)
-endif
-.KATI_READONLY := PLATFORM_SECURITY_PATCH_TIMESTAMP
+include $(BUILD_SYSTEM)/version_util.mk
-ifndef PLATFORM_BASE_OS
- # Used to indicate the base os applied to the device.
- # Can be an arbitrary string, but must be a single word.
- #
- # If there is no $PLATFORM_BASE_OS set, keep it empty.
- PLATFORM_BASE_OS :=
-endif
-.KATI_READONLY := PLATFORM_BASE_OS
-
-ifndef BUILD_ID
- # Used to signify special builds. E.g., branches and/or releases,
- # like "M5-RC7". Can be an arbitrary string, but must be a single
- # word and a valid file name.
- #
- # If there is no BUILD_ID set, make it obvious.
- BUILD_ID := UNKNOWN
-endif
-.KATI_READONLY := BUILD_ID
-
-ifndef BUILD_DATETIME
- # Used to reproduce builds by setting the same time. Must be the number
- # of seconds since the Epoch.
- BUILD_DATETIME := $(shell date +%s)
-endif
-
-DATE := date -d @$(BUILD_DATETIME)
-.KATI_READONLY := DATE
-
-# Everything should be using BUILD_DATETIME_FROM_FILE instead.
-# BUILD_DATETIME and DATE can be removed once BUILD_NUMBER moves
-# to soong_ui.
-$(KATI_obsolete_var BUILD_DATETIME,Use BUILD_DATETIME_FROM_FILE)
-
-HAS_BUILD_NUMBER := true
-ifndef BUILD_NUMBER
- # BUILD_NUMBER should be set to the source control value that
- # represents the current state of the source code. E.g., a
- # perforce changelist number or a git hash. Can be an arbitrary string
- # (to allow for source control that uses something other than numbers),
- # but must be a single word and a valid file name.
- #
- # If no BUILD_NUMBER is set, create a useful "I am an engineering build
- # from this date/time" value. Make it start with a non-digit so that
- # anyone trying to parse it as an integer will probably get "0".
- BUILD_NUMBER := eng.$(shell echo $${BUILD_USERNAME:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
- HAS_BUILD_NUMBER := false
-endif
-.KATI_READONLY := BUILD_NUMBER HAS_BUILD_NUMBER
-
-ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
- # Used to set minimum supported target sdk version. Apps targeting sdk
- # version lower than the set value will result in a warning being shown
- # when any activity from the app is started.
- PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := 23
-endif
-.KATI_READONLY := PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
diff --git a/core/version_util.mk b/core/version_util.mk
new file mode 100644
index 0000000..b7c4e48
--- /dev/null
+++ b/core/version_util.mk
@@ -0,0 +1,245 @@
+#
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+
+ALLOWED_VERSIONS := $(call allowed-platform-versions,\
+ $(MIN_PLATFORM_VERSION),\
+ $(MAX_PLATFORM_VERSION),\
+ $(DEFAULT_PLATFORM_VERSION))
+
+ifndef TARGET_PLATFORM_VERSION
+ TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+endif
+
+ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
+ $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
+ $(error $(ALLOWED_VERSIONS))
+endif
+ALLOWED_VERSIONS :=
+MIN_PLATFORM_VERSION :=
+MAX_PLATFORM_VERSION :=
+
+.KATI_READONLY := TARGET_PLATFORM_VERSION
+
+# Default versions for each TARGET_PLATFORM_VERSION
+# TODO: PLATFORM_VERSION, PLATFORM_SDK_VERSION, etc. should be conditional
+# on this
+
+# This is the canonical definition of the platform version,
+# which is the version that we reveal to the end user.
+# Update this value when the platform version changes (rather
+# than overriding it somewhere else). Can be an arbitrary string.
+
+# When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION
+# please add that PLATFORM_VERSION as well as clean up obsolete PLATFORM_VERSION's
+# in the following text file:
+# cts/tests/tests/os/assets/platform_versions.txt
+
+# Note that there should be one PLATFORM_VERSION and PLATFORM_VERSION_CODENAME
+# entry for each unreleased API level, regardless of
+# MIN_PLATFORM_VERSION/MAX_PLATFORM_VERSION. PLATFORM_VERSION is used to
+# generate the range of allowed SDK versions, so it must have an entry for every
+# unreleased API level targetable by this branch, not just those that are valid
+# lunch targets for this branch.
+
+ifndef PLATFORM_VERSION_CODENAME
+ PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
+ ifndef PLATFORM_VERSION_CODENAME
+ # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
+ PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
+ endif
+
+ # This is all of the *active* development codenames.
+ # This confusing name is needed because
+ # all_codenames has been baked into build.prop for ages.
+ #
+ # Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
+ # list of additional codenames after PLATFORM_VERSION_CODENAME.
+ PLATFORM_VERSION_ALL_CODENAMES :=
+
+ # Build a list of all active code names. Avoid duplicates, and stop when we
+ # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+ # that is not included in our build).
+ _versions_in_target := \
+ $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+ $(foreach version,$(_versions_in_target),\
+ $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+ $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+ $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+ # And convert from space separated to comma separated.
+ PLATFORM_VERSION_ALL_CODENAMES := \
+ $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
+endif
+.KATI_READONLY := \
+ PLATFORM_VERSION_CODENAME \
+ PLATFORM_VERSION_ALL_CODENAMES
+
+ifndef PLATFORM_VERSION
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ PLATFORM_VERSION := $(PLATFORM_VERSION_LAST_STABLE)
+ else
+ PLATFORM_VERSION := $(PLATFORM_VERSION_CODENAME)
+ endif
+endif
+.KATI_READONLY := PLATFORM_VERSION
+
+
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ PLATFORM_PREVIEW_SDK_VERSION := 0
+else
+ ifndef PLATFORM_PREVIEW_SDK_VERSION
+ # This is the definition of a preview SDK version over and above the current
+ # platform SDK version. Unlike the platform SDK version, a higher value
+ # for preview SDK version does NOT mean that all prior preview APIs are
+ # included. Packages reading this value to determine compatibility with
+ # known APIs should check that this value is precisely equal to the preview
+ # SDK version the package was built for, otherwise it should fall back to
+ # assuming the device can only support APIs as of the previous official
+ # public release.
+ # This value will always be forced to 0 for release builds by the logic
+ # in the "ifeq" block above, so the value below will be used on any
+ # non-release builds, and it should always be at least 1, to indicate that
+ # APIs may have changed since the claimed PLATFORM_SDK_VERSION.
+ PLATFORM_PREVIEW_SDK_VERSION := 1
+ endif
+endif
+.KATI_READONLY := PLATFORM_PREVIEW_SDK_VERSION
+
+ifndef DEFAULT_APP_TARGET_SDK
+ # This is the default minSdkVersion and targetSdkVersion to use for
+ # all .apks created by the build system. It can be overridden by explicitly
+ # setting these in the .apk's AndroidManifest.xml. It is either the code
+ # name of the development build or, if this is a release build, the official
+ # SDK version of this release.
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ DEFAULT_APP_TARGET_SDK := $(PLATFORM_SDK_VERSION)
+ else
+ DEFAULT_APP_TARGET_SDK := $(PLATFORM_VERSION_CODENAME)
+ endif
+endif
+.KATI_READONLY := DEFAULT_APP_TARGET_SDK
+
+ifndef PLATFORM_VNDK_VERSION
+ # This is the definition of the VNDK version for the current VNDK libraries.
+ # The version is only available when PLATFORM_VERSION_CODENAME == REL.
+ # Otherwise, it will be set to a CODENAME version. The ABI is allowed to be
+ # changed only before the Android version is released. Once
+ # PLATFORM_VNDK_VERSION is set to actual version, the ABI for this version
+ # will be frozon and emit build errors if any ABI for the VNDK libs are
+ # changed.
+ # After that the snapshot of the VNDK with this version will be generated.
+ #
+ # The VNDK version follows PLATFORM_SDK_VERSION.
+ ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ PLATFORM_VNDK_VERSION := $(PLATFORM_SDK_VERSION)
+ else
+ PLATFORM_VNDK_VERSION := $(PLATFORM_VERSION_CODENAME)
+ endif
+endif
+.KATI_READONLY := PLATFORM_VNDK_VERSION
+
+ifndef PLATFORM_SYSTEMSDK_MIN_VERSION
+ # This is the oldest version of system SDK that the platform supports. Contrary
+ # to the public SDK where platform essentially supports all previous SDK versions,
+ # platform supports only a few number of recent system SDK versions as some of
+ # old system APIs are gradually deprecated, removed and then deleted.
+ PLATFORM_SYSTEMSDK_MIN_VERSION := 28
+endif
+.KATI_READONLY := PLATFORM_SYSTEMSDK_MIN_VERSION
+
+# This is the list of system SDK versions that the current platform supports.
+PLATFORM_SYSTEMSDK_VERSIONS :=
+ifneq (,$(PLATFORM_SYSTEMSDK_MIN_VERSION))
+ $(if $(call math_is_number,$(PLATFORM_SYSTEMSDK_MIN_VERSION)),,\
+ $(error PLATFORM_SYSTEMSDK_MIN_VERSION must be a number, but was $(PLATFORM_SYSTEMSDK_MIN_VERSION)))
+ PLATFORM_SYSTEMSDK_VERSIONS := $(call int_range_list,$(PLATFORM_SYSTEMSDK_MIN_VERSION),$(PLATFORM_SDK_VERSION))
+endif
+# Platform always supports the current version
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+ PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
+else
+ PLATFORM_SYSTEMSDK_VERSIONS += $(subst $(comma),$(space),$(PLATFORM_VERSION_ALL_CODENAMES))
+endif
+PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
+.KATI_READONLY := PLATFORM_SYSTEMSDK_VERSIONS
+
+.KATI_READONLY := PLATFORM_SECURITY_PATCH
+
+ifndef PLATFORM_SECURITY_PATCH_TIMESTAMP
+ # Used to indicate the matching timestamp for the security patch string in PLATFORM_SECURITY_PATCH.
+ PLATFORM_SECURITY_PATCH_TIMESTAMP := $(shell date -d 'TZ="GMT" $(PLATFORM_SECURITY_PATCH)' +%s)
+endif
+.KATI_READONLY := PLATFORM_SECURITY_PATCH_TIMESTAMP
+
+ifndef PLATFORM_BASE_OS
+ # Used to indicate the base os applied to the device.
+ # Can be an arbitrary string, but must be a single word.
+ #
+ # If there is no $PLATFORM_BASE_OS set, keep it empty.
+ PLATFORM_BASE_OS :=
+endif
+.KATI_READONLY := PLATFORM_BASE_OS
+
+ifndef BUILD_ID
+ # Used to signify special builds. E.g., branches and/or releases,
+ # like "M5-RC7". Can be an arbitrary string, but must be a single
+ # word and a valid file name.
+ #
+ # If there is no BUILD_ID set, make it obvious.
+ BUILD_ID := UNKNOWN
+endif
+.KATI_READONLY := BUILD_ID
+
+ifndef BUILD_DATETIME
+ # Used to reproduce builds by setting the same time. Must be the number
+ # of seconds since the Epoch.
+ BUILD_DATETIME := $(shell date +%s)
+endif
+
+DATE := date -d @$(BUILD_DATETIME)
+.KATI_READONLY := DATE
+
+# Everything should be using BUILD_DATETIME_FROM_FILE instead.
+# BUILD_DATETIME and DATE can be removed once BUILD_NUMBER moves
+# to soong_ui.
+$(KATI_obsolete_var BUILD_DATETIME,Use BUILD_DATETIME_FROM_FILE)
+
+HAS_BUILD_NUMBER := true
+ifndef BUILD_NUMBER
+ # BUILD_NUMBER should be set to the source control value that
+ # represents the current state of the source code. E.g., a
+ # perforce changelist number or a git hash. Can be an arbitrary string
+ # (to allow for source control that uses something other than numbers),
+ # but must be a single word and a valid file name.
+ #
+ # If no BUILD_NUMBER is set, create a useful "I am an engineering build
+ # from this date/time" value. Make it start with a non-digit so that
+ # anyone trying to parse it as an integer will probably get "0".
+ BUILD_NUMBER := eng.$(shell echo $${BUILD_USERNAME:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+ HAS_BUILD_NUMBER := false
+endif
+.KATI_READONLY := BUILD_NUMBER HAS_BUILD_NUMBER
+
+ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
+ # Used to set minimum supported target sdk version. Apps targeting sdk
+ # version lower than the set value will result in a warning being shown
+ # when any activity from the app is started.
+ PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION := 23
+endif
+.KATI_READONLY := PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 1133564..8d8555c 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -57,12 +57,6 @@
BOARD_KERNEL-4.19-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
BOARD_KERNEL-4.19-GZ-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 47185920
-BOARD_KERNEL-5.4_BOOTIMAGE_PARTITION_SIZE := 67108864
-BOARD_KERNEL-5.4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
-BOARD_KERNEL-5.4-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
-BOARD_KERNEL-5.4-GZ-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 47185920
-BOARD_KERNEL-5.4-LZ4_BOOTIMAGE_PARTITION_SIZE := 53477376
-BOARD_KERNEL-5.4-LZ4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 53477376
BOARD_KERNEL-5.10_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-5.10-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-5.10-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920
@@ -77,14 +71,12 @@
BOARD_KERNEL_BINARIES := \
kernel-4.19-gz \
- kernel-5.4 kernel-5.4-gz kernel-5.4-lz4 \
kernel-5.10 kernel-5.10-gz kernel-5.10-lz4 \
kernel-mainline kernel-mainline-gz kernel-mainline-lz4 \
ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
BOARD_KERNEL_BINARIES += \
kernel-4.19-gz-allsyms \
- kernel-5.4-allsyms kernel-5.4-gz-allsyms kernel-5.4-lz4-allsyms \
kernel-5.10-allsyms kernel-5.10-gz-allsyms kernel-5.10-lz4-allsyms \
endif
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 0064aec..fe56fd3 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -16,9 +16,6 @@
PRODUCT_COPY_FILES += \
kernel/prebuilts/4.19/arm64/kernel-4.19-gz:kernel-4.19-gz \
- kernel/prebuilts/5.4/arm64/kernel-5.4:kernel-5.4 \
- kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz \
- kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4 \
kernel/prebuilts/5.10/arm64/kernel-5.10:kernel-5.10 \
kernel/prebuilts/5.10/arm64/kernel-5.10-gz:kernel-5.10-gz \
kernel/prebuilts/5.10/arm64/kernel-5.10-lz4:kernel-5.10-lz4 \
@@ -27,16 +24,12 @@
kernel/prebuilts/mainline/arm64/kernel-mainline-lz4-allsyms:kernel-mainline-lz4 \
$(call dist-for-goals, dist_files, kernel/prebuilts/4.19/arm64/prebuilt-info.txt:kernel/4.19/prebuilt-info.txt)
-$(call dist-for-goals, dist_files, kernel/prebuilts/5.4/arm64/prebuilt-info.txt:kernel/5.4/prebuilt-info.txt)
$(call dist-for-goals, dist_files, kernel/prebuilts/5.10/arm64/prebuilt-info.txt:kernel/5.10/prebuilt-info.txt)
$(call dist-for-goals, dist_files, kernel/prebuilts/mainline/arm64/prebuilt-info.txt:kernel/mainline/prebuilt-info.txt)
ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
PRODUCT_COPY_FILES += \
kernel/prebuilts/4.19/arm64/kernel-4.19-gz-allsyms:kernel-4.19-gz-allsyms \
- kernel/prebuilts/5.4/arm64/kernel-5.4-allsyms:kernel-5.4-allsyms \
- kernel/prebuilts/5.4/arm64/kernel-5.4-gz-allsyms:kernel-5.4-gz-allsyms \
- kernel/prebuilts/5.4/arm64/kernel-5.4-lz4-allsyms:kernel-5.4-lz4-allsyms \
kernel/prebuilts/5.10/arm64/kernel-5.10-allsyms:kernel-5.10-allsyms \
kernel/prebuilts/5.10/arm64/kernel-5.10-gz-allsyms:kernel-5.10-gz-allsyms \
kernel/prebuilts/5.10/arm64/kernel-5.10-lz4-allsyms:kernel-5.10-lz4-allsyms \
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index bdc862e..640216c 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -28,19 +28,16 @@
include build/make/target/board/BoardConfigGkiCommon.mk
BOARD_KERNEL-5.4_BOOTIMAGE_PARTITION_SIZE := 67108864
-BOARD_KERNEL-5.4-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-5.10_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_KERNEL-5.10-ALLSYMS_BOOTIMAGE_PARTITION_SIZE := 67108864
BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
BOARD_KERNEL_BINARIES := \
- kernel-5.4 \
kernel-5.10 \
ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
BOARD_KERNEL_BINARIES += \
- kernel-5.4-allsyms \
kernel-5.10-allsyms \
endif
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index f31a491..d28ace7 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -15,15 +15,12 @@
#
PRODUCT_COPY_FILES += \
- kernel/prebuilts/5.4/x86_64/kernel-5.4:kernel-5.4 \
kernel/prebuilts/5.10/x86_64/kernel-5.10:kernel-5.10 \
-$(call dist-for-goals, dist_files, kernel/prebuilts/5.4/x86_64/prebuilt-info.txt:kernel/5.4/prebuilt-info.txt)
$(call dist-for-goals, dist_files, kernel/prebuilts/5.10/x86_64/prebuilt-info.txt:kernel/5.10/prebuilt-info.txt)
ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
PRODUCT_COPY_FILES += \
- kernel/prebuilts/5.4/x86_64/kernel-5.4-allsyms:kernel-5.4-allsyms \
kernel/prebuilts/5.10/x86_64/kernel-5.10-allsyms:kernel-5.10-allsyms \
endif
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index bf958fb..5e3010f 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
#
# Copyright (C) 2012 The Android Open Source Project
#
@@ -30,20 +30,18 @@
import os
import os.path
import re
+import struct
import sys
MD5_BLOCKSIZE = 1024 * 1024
HTML_ESCAPE_TABLE = {
- "&": "&",
- '"': """,
- "'": "'",
- ">": ">",
- "<": "<",
+ b"&": b"&",
+ b'"': b""",
+ b"'": b"'",
+ b">": b">",
+ b"<": b"<",
}
-def hexify(s):
- return ("%02x"*len(s)) % tuple(map(ord, s))
-
def md5sum(filename):
"""Calculate an MD5 of the file given by FILENAME,
and return hex digest as a string.
@@ -57,20 +55,26 @@
break
sum.update(block)
f.close()
- return hexify(sum.digest())
+ return sum.hexdigest()
def html_escape(text):
"""Produce entities within text."""
- return "".join(HTML_ESCAPE_TABLE.get(c,c) for c in text)
+ # Using for i in text doesn't work since i will be an int, not a byte.
+ # There are multiple ways to solve this, but the most performant way
+ # to iterate over a byte array is to use unpack. Using the
+ # for i in range(len(text)) and using that to get a byte using array
+ # slices is twice as slow as this method.
+ return b"".join(HTML_ESCAPE_TABLE.get(i,i) for i in struct.unpack(str(len(text)) + 'c', text))
-HTML_OUTPUT_CSS="""
+HTML_OUTPUT_CSS=b"""
<style type="text/css">
body { padding: 0; font-family: sans-serif; }
.same-license { background-color: #eeeeee; border-top: 20px solid white; padding: 10px; }
.label { font-weight: bold; }
.file-list { margin-left: 1em; color: blue; }
</style>
+
"""
def combine_notice_files_html(file_hash, input_dirs, output_filename):
@@ -90,13 +94,13 @@
# Open the output file, and output the header pieces
output_file = open(output_filename, "wb")
- print >> output_file, "<html><head>"
- print >> output_file, HTML_OUTPUT_CSS
- print >> output_file, '</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">'
+ output_file.write(b"<html><head>\n")
+ output_file.write(HTML_OUTPUT_CSS)
+ output_file.write(b'</head><body topmargin="0" leftmargin="0" rightmargin="0" bottommargin="0">\n')
# Output our table of contents
- print >> output_file, '<div class="toc">'
- print >> output_file, "<ul>"
+ output_file.write(b'<div class="toc">\n')
+ output_file.write(b"<ul>\n")
# Flatten the list of lists into a single list of filenames
sorted_filenames = sorted(itertools.chain.from_iterable(file_hash))
@@ -104,31 +108,29 @@
# Print out a nice table of contents
for filename in sorted_filenames:
stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
- print >> output_file, '<li><a href="#id%d">%s</a></li>' % (id_table.get(filename), stripped_filename)
+ output_file.write(('<li><a href="#id%d">%s</a></li>\n' % (id_table.get(filename), stripped_filename)).encode())
- print >> output_file, "</ul>"
- print >> output_file, "</div><!-- table of contents -->"
+ output_file.write(b"</ul>\n")
+ output_file.write(b"</div><!-- table of contents -->\n")
# Output the individual notice file lists
- print >>output_file, '<table cellpadding="0" cellspacing="0" border="0">'
+ output_file.write(b'<table cellpadding="0" cellspacing="0" border="0">\n')
for value in file_hash:
- print >> output_file, '<tr id="id%d"><td class="same-license">' % id_table.get(value[0])
- print >> output_file, '<div class="label">Notices for file(s):</div>'
- print >> output_file, '<div class="file-list">'
+ output_file.write(b'<tr id="id%d"><td class="same-license">\n' % id_table.get(value[0]))
+ output_file.write(b'<div class="label">Notices for file(s):</div>\n')
+ output_file.write(b'<div class="file-list">\n')
for filename in value:
- print >> output_file, "%s <br/>" % (SRC_DIR_STRIP_RE.sub(r"\1", filename))
- print >> output_file, "</div><!-- file-list -->"
- print >> output_file
- print >> output_file, '<pre class="license-text">'
- print >> output_file, html_escape(open(value[0]).read())
- print >> output_file, "</pre><!-- license-text -->"
- print >> output_file, "</td></tr><!-- same-license -->"
- print >> output_file
- print >> output_file
- print >> output_file
+ output_file.write(("%s <br/>\n" % SRC_DIR_STRIP_RE.sub(r"\1", filename)).encode())
+ output_file.write(b"</div><!-- file-list -->\n")
+ output_file.write(b"\n")
+ output_file.write(b'<pre class="license-text">\n')
+ with open(value[0], "rb") as notice_file:
+ output_file.write(html_escape(notice_file.read()))
+ output_file.write(b"\n</pre><!-- license-text -->\n")
+ output_file.write(b"</td></tr><!-- same-license -->\n\n\n\n")
# Finish off the file output
- print >> output_file, "</table>"
- print >> output_file, "</body></html>"
+ output_file.write(b"</table>\n")
+ output_file.write(b"</body></html>\n")
output_file.close()
def combine_notice_files_text(file_hash, input_dirs, output_filename, file_title):
@@ -136,14 +138,18 @@
SRC_DIR_STRIP_RE = re.compile("(?:" + "|".join(input_dirs) + ")(/.*).txt")
output_file = open(output_filename, "wb")
- print >> output_file, file_title
+ output_file.write(file_title.encode())
+ output_file.write(b"\n")
for value in file_hash:
- print >> output_file, "============================================================"
- print >> output_file, "Notices for file(s):"
- for filename in value:
- print >> output_file, SRC_DIR_STRIP_RE.sub(r"\1", filename)
- print >> output_file, "------------------------------------------------------------"
- print >> output_file, open(value[0]).read()
+ output_file.write(b"============================================================\n")
+ output_file.write(b"Notices for file(s):\n")
+ for filename in value:
+ output_file.write(SRC_DIR_STRIP_RE.sub(r"\1", filename).encode())
+ output_file.write(b"\n")
+ output_file.write(b"------------------------------------------------------------\n")
+ with open(value[0], "rb") as notice_file:
+ output_file.write(notice_file.read())
+ output_file.write(b"\n")
output_file.close()
def combine_notice_files_xml(files_with_same_hash, input_dirs, output_filename):
@@ -154,15 +160,15 @@
# Set up a filename to row id table (anchors inside tables don't work in
# most browsers, but href's to table row ids do)
id_table = {}
- for file_key in files_with_same_hash.keys():
- for filename in files_with_same_hash[file_key]:
+ for file_key, files in files_with_same_hash.items():
+ for filename in files:
id_table[filename] = file_key
# Open the output file, and output the header pieces
output_file = open(output_filename, "wb")
- print >> output_file, '<?xml version="1.0" encoding="utf-8"?>'
- print >> output_file, "<licenses>"
+ output_file.write(b'<?xml version="1.0" encoding="utf-8"?>\n')
+ output_file.write(b"<licenses>\n")
# Flatten the list of lists into a single list of filenames
sorted_filenames = sorted(id_table.keys())
@@ -170,10 +176,8 @@
# Print out a nice table of contents
for filename in sorted_filenames:
stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename)
- print >> output_file, '<file-name contentId="%s">%s</file-name>' % (id_table.get(filename), stripped_filename)
-
- print >> output_file
- print >> output_file
+ output_file.write(('<file-name contentId="%s">%s</file-name>\n' % (id_table.get(filename), stripped_filename)).encode())
+ output_file.write(b"\n\n")
processed_file_keys = []
# Output the individual notice file lists
@@ -183,11 +187,13 @@
continue
processed_file_keys.append(file_key)
- print >> output_file, '<file-content contentId="%s"><![CDATA[%s]]></file-content>' % (file_key, html_escape(open(filename).read()))
- print >> output_file
+ output_file.write(('<file-content contentId="%s"><![CDATA[' % file_key).encode())
+ with open(filename, "rb") as notice_file:
+ output_file.write(html_escape(notice_file.read()))
+ output_file.write(b"]]></file-content>\n\n")
# Finish off the file output
- print >> output_file, "</licenses>"
+ output_file.write(b"</licenses>\n")
output_file.close()
def get_args():
@@ -254,7 +260,7 @@
file_md5sum = md5sum(filename)
files_with_same_hash[file_md5sum].append(filename)
- filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(files_with_same_hash.keys())]
+ filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(list(files_with_same_hash))]
combine_notice_files_text(filesets, input_dirs, txt_output_file, file_title)
if html_output_file is not None:
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index f3b58f8..01cc233 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -589,7 +589,7 @@
AssertionError: If it can't find an image.
"""
for partition in ab_partitions:
- img_name = partition.strip() + ".img"
+ img_name = partition + ".img"
# Assert that the image is present under IMAGES/ now.
if output_zip:
@@ -856,39 +856,23 @@
if output_zip:
recovery_two_step_image.AddToZip(output_zip)
- if has_system:
- banner("system")
- partitions['system'] = AddSystem(
- output_zip, recovery_img=recovery_image, boot_img=boot_image)
+ def add_partition(partition, has_partition, add_func, add_args):
+ if has_partition:
+ banner(partition)
+ partitions[partition] = add_func(output_zip, *add_args)
- if has_vendor:
- banner("vendor")
- partitions['vendor'] = AddVendor(
- output_zip, recovery_img=recovery_image, boot_img=boot_image)
-
- if has_product:
- banner("product")
- partitions['product'] = AddProduct(output_zip)
-
- if has_system_ext:
- banner("system_ext")
- partitions['system_ext'] = AddSystemExt(output_zip)
-
- if has_odm:
- banner("odm")
- partitions['odm'] = AddOdm(output_zip)
-
- if has_vendor_dlkm:
- banner("vendor_dlkm")
- partitions['vendor_dlkm'] = AddVendorDlkm(output_zip)
-
- if has_odm_dlkm:
- banner("odm_dlkm")
- partitions['odm_dlkm'] = AddOdmDlkm(output_zip)
-
- if has_system_other:
- banner("system_other")
- AddSystemOther(output_zip)
+ add_partition_calls = (
+ ("system", has_system, AddSystem, [recovery_image, boot_image]),
+ ("vendor", has_vendor, AddVendor, [recovery_image, boot_image]),
+ ("product", has_product, AddProduct, []),
+ ("system_ext", has_system_ext, AddSystemExt, []),
+ ("odm", has_odm, AddOdm, []),
+ ("vendor_dlkm", has_vendor_dlkm, AddVendorDlkm, []),
+ ("odm_dlkm", has_odm_dlkm, AddOdmDlkm, []),
+ ("system_other", has_system_other, AddSystemOther, []),
+ )
+ for call in add_partition_calls:
+ add_partition(*call)
AddApexInfo(output_zip)
@@ -902,13 +886,10 @@
banner("partition-table")
AddPartitionTable(output_zip)
- if OPTIONS.info_dict.get("has_dtbo") == "true":
- banner("dtbo")
- partitions['dtbo'] = AddDtbo(output_zip)
-
- if OPTIONS.info_dict.get("has_pvmfw") == "true":
- banner("pvmfw")
- partitions['pvmfw'] = AddPvmfw(output_zip)
+ add_partition("dtbo",
+ OPTIONS.info_dict.get("has_dtbo") == "true", AddDtbo, [])
+ add_partition("pvmfw",
+ OPTIONS.info_dict.get("has_pvmfw") == "true", AddPvmfw, [])
# Custom images.
custom_partitions = OPTIONS.info_dict.get(
@@ -965,7 +946,7 @@
"ab_partitions.txt")
if os.path.exists(ab_partitions_txt):
with open(ab_partitions_txt) as f:
- ab_partitions = f.readlines()
+ ab_partitions = f.read().splitlines()
# For devices using A/B update, make sure we have all the needed images
# ready under IMAGES/ or RADIO/.
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index c6800e8..5affa32 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1925,14 +1925,14 @@
RunAndCheckOutput(cmd)
-def UnzipTemp(filename, pattern=None):
+def UnzipTemp(filename, patterns=None):
"""Unzips the given archive into a temporary directory and returns the name.
Args:
filename: If filename is of the form "foo.zip+bar.zip", unzip foo.zip into
a temp dir, then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
- pattern: Files to unzip from the archive. If omitted, will unzip the entire
+ patterns: Files to unzip from the archive. If omitted, will unzip the entire
archvie.
Returns:
@@ -1942,11 +1942,11 @@
tmp = MakeTempDir(prefix="targetfiles-")
m = re.match(r"^(.*[.]zip)\+(.*[.]zip)$", filename, re.IGNORECASE)
if m:
- UnzipToDir(m.group(1), tmp, pattern)
- UnzipToDir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"), pattern)
+ UnzipToDir(m.group(1), tmp, patterns)
+ UnzipToDir(m.group(2), os.path.join(tmp, "BOOTABLE_IMAGES"), patterns)
filename = m.group(1)
else:
- UnzipToDir(filename, tmp, pattern)
+ UnzipToDir(filename, tmp, patterns)
return tmp
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 0842af9..d219ed6 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -189,6 +189,8 @@
OPTIONS.gki_signing_algorithm = None
OPTIONS.gki_signing_extra_args = None
OPTIONS.android_jar_path = None
+OPTIONS.vendor_partitions = set()
+OPTIONS.vendor_otatools = None
AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -216,6 +218,10 @@
if partition not in AVB_FOOTER_ARGS_BY_PARTITION:
raise RuntimeError("Missing {} in AVB_FOOTER_ARGS".format(partition))
+# Partitions that can be regenerated after signing using a separate
+# vendor otatools package.
+ALLOWED_VENDOR_PARTITIONS = set(["vendor", "odm"])
+
def IsApexFile(filename):
return filename.endswith(".apex") or filename.endswith(".capex")
@@ -1180,6 +1186,63 @@
return keys
+def BuildVendorPartitions(output_zip_path):
+ """Builds OPTIONS.vendor_partitions using OPTIONS.vendor_otatools."""
+ if OPTIONS.vendor_partitions.difference(ALLOWED_VENDOR_PARTITIONS):
+ logger.warning("Allowed --vendor_partitions: %s",
+ ",".join(ALLOWED_VENDOR_PARTITIONS))
+ OPTIONS.vendor_partitions = ALLOWED_VENDOR_PARTITIONS.intersection(
+ OPTIONS.vendor_partitions)
+
+ logger.info("Building vendor partitions using vendor otatools.")
+ vendor_tempdir = common.UnzipTemp(output_zip_path, [
+ "META/*",
+ ] + ["{}/*".format(p.upper()) for p in OPTIONS.vendor_partitions])
+
+ # Disable various partitions that build based on misc_info fields.
+ # Only partitions in ALLOWED_VENDOR_PARTITIONS can be rebuilt using
+ # vendor otatools. These other partitions will be rebuilt using the main
+ # otatools if necessary.
+ vendor_misc_info_path = os.path.join(vendor_tempdir, "META/misc_info.txt")
+ vendor_misc_info = common.LoadDictionaryFromFile(vendor_misc_info_path)
+ vendor_misc_info["no_boot"] = "true" # boot
+ vendor_misc_info["vendor_boot"] = "false" # vendor_boot
+ vendor_misc_info["no_recovery"] = "true" # recovery
+ vendor_misc_info["board_bpt_enable"] = "false" # partition-table
+ vendor_misc_info["has_dtbo"] = "false" # dtbo
+ vendor_misc_info["has_pvmfw"] = "false" # pvmfw
+ vendor_misc_info["avb_custom_images_partition_list"] = "" # custom images
+ vendor_misc_info["avb_enable"] = "false" # vbmeta
+ vendor_misc_info["use_dynamic_partitions"] = "false" # super_empty
+ vendor_misc_info["build_super_partition"] = "false" # super split
+ with open(vendor_misc_info_path, "w") as output:
+ for key in sorted(vendor_misc_info):
+ output.write("{}={}\n".format(key, vendor_misc_info[key]))
+
+ # Disable care_map.pb as not all ab_partitions are available when
+ # vendor otatools regenerates vendor images.
+ os.remove(os.path.join(vendor_tempdir, "META/ab_partitions.txt"))
+
+ # Build vendor images using vendor otatools.
+ vendor_otatools_dir = common.MakeTempDir(prefix="vendor_otatools_")
+ common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
+ cmd = [
+ os.path.join(vendor_otatools_dir, "bin", "add_img_to_target_files"),
+ "--is_signing",
+ "--verbose",
+ vendor_tempdir,
+ ]
+ common.RunAndCheckOutput(cmd, verbose=True)
+
+ logger.info("Writing vendor partitions to output archive.")
+ with zipfile.ZipFile(
+ output_zip_path, "a", compression=zipfile.ZIP_DEFLATED,
+ allowZip64=True) as output_zip:
+ for p in OPTIONS.vendor_partitions:
+ path = "IMAGES/{}.img".format(p)
+ common.ZipWrite(output_zip, os.path.join(vendor_tempdir, path), path)
+
+
def main(argv):
key_mapping_options = []
@@ -1289,6 +1352,10 @@
OPTIONS.gki_signing_algorithm = a
elif o == "--gki_signing_extra_args":
OPTIONS.gki_signing_extra_args = a
+ elif o == "--vendor_otatools":
+ OPTIONS.vendor_otatools = a
+ elif o == "--vendor_partitions":
+ OPTIONS.vendor_partitions = set(a.split(","))
else:
return False
return True
@@ -1339,6 +1406,8 @@
"gki_signing_key=",
"gki_signing_algorithm=",
"gki_signing_extra_args=",
+ "vendor_partitions=",
+ "vendor_otatools=",
],
extra_option_handler=option_handler)
@@ -1384,8 +1453,11 @@
common.ZipClose(input_zip)
common.ZipClose(output_zip)
+ if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
+ BuildVendorPartitions(args[1])
+
# Skip building userdata.img and cache.img when signing the target files.
- new_args = ["--is_signing"]
+ new_args = ["--is_signing", "--add_missing", "--verbose"]
# add_img_to_target_files builds the system image from scratch, so the
# recovery patch is guaranteed to be regenerated there.
if OPTIONS.rebuild_recovery:
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 1e3c413..f2f65a6 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -530,7 +530,7 @@
// If the alignment is not what was requested, add some padding in the extra
// so the payload ends up where is requested.
uint64_t alignDiff = alignTo - (expectedPayloadOffset % alignTo);
- if (alignDiff == 0)
+ if (alignDiff == alignTo)
return OK;
return pEntry->addPadding(alignDiff);
@@ -654,7 +654,7 @@
{
ZipEntry* pEntry = NULL;
status_t result;
- long lfhPosn, startPosn, endPosn, uncompressedLen;
+ long lfhPosn, uncompressedLen;
if (mReadOnly)
return INVALID_OPERATION;
@@ -690,7 +690,6 @@
*/
lfhPosn = ftell(mZipFp);
pEntry->mLFH.write(mZipFp);
- startPosn = ftell(mZipFp);
/*
* Copy the data over.
@@ -741,18 +740,13 @@
}
/*
- * Update file offsets.
- */
- endPosn = ftell(mZipFp);
-
- /*
* Success! Fill out new values.
*/
pEntry->setLFHOffset(lfhPosn);
mEOCD.mNumEntries++;
mEOCD.mTotalNumEntries++;
mEOCD.mCentralDirSize = 0; // mark invalid; set by flush()
- mEOCD.mCentralDirOffset = endPosn;
+ mEOCD.mCentralDirOffset = ftell(mZipFp);
/*
* Go back and write the LFH.
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
index c79e791..96d4f73 100644
--- a/tools/zipalign/tests/src/align_test.cpp
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -9,6 +9,7 @@
#include <android-base/file.h>
using namespace android;
+using namespace base;
static std::string GetTestPath(const std::string& filename) {
static std::string test_data_dir = android::base::GetExecutableDirectory() + "/tests/data/";
@@ -26,6 +27,34 @@
ASSERT_EQ(0, verified);
}
+TEST(Align, DoubleAligment) {
+ const std::string src = GetTestPath("unaligned.zip");
+ const std::string tmp = GetTestPath("da_aligned.zip");
+ const std::string dst = GetTestPath("da_d_aligner.zip");
+
+ int processed = process(src.c_str(), tmp.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+
+ int verified = verify(tmp.c_str(), 4, true, false);
+ ASSERT_EQ(0, verified);
+
+ // Align the result of the previous run. Essentially double aligning.
+ processed = process(tmp.c_str(), dst.c_str(), 4, true, false, 4096);
+ ASSERT_EQ(0, processed);
+
+ verified = verify(dst.c_str(), 4, true, false);
+ ASSERT_EQ(0, verified);
+
+ // Nothing should have changed between tmp and dst.
+ std::string tmp_content;
+ ASSERT_EQ(true, ReadFileToString(tmp, &tmp_content));
+
+ std::string dst_content;
+ ASSERT_EQ(true, ReadFileToString(dst, &dst_content));
+
+ ASSERT_EQ(tmp_content, dst_content);
+}
+
// Align a zip featuring a hole at the beginning. The
// hole in the archive is a delete entry in the Central
// Directory.