Merge "Revert^2 "Set PRODUCT_SHIPPING_API_LEVEL in gsi_release.mk"""
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..ab2564e
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,49 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["build_make_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+ name: "build_make_license",
+ visibility: [":__subpackages__"],
+ license_kinds: [
+ "SPDX-license-identifier-Apache-2.0",
+ "SPDX-license-identifier-BSD",
+ "SPDX-license-identifier-CC-BY",
+ "SPDX-license-identifier-GPL",
+ "SPDX-license-identifier-GPL-2.0",
+ "SPDX-license-identifier-LGPL",
+ "SPDX-license-identifier-MIT",
+ "legacy_not_a_contribution",
+ "legacy_restricted",
+ ],
+ // large-scale-change unable to identify any license_text files
+}
diff --git a/Changes.md b/Changes.md
index 84c8d95..1ab005f 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,49 @@
# Build System Changes for Android.mk Writers
+## Dexpreopt starts enforcing `<uses-library>` checks (for Java modules)
+
+In order to construct correct class loader context for dexpreopt, build system
+needs to know about the shared library dependencies of Java modules listed in
+the `<uses-library>` tags in the manifest. Since the build system does not have
+access to the manifest contents, that information must be present in the build
+files. In simple cases Soong is able to infer it from its knowledge of Java SDK
+libraries and the `libs` property in Android.bp, but in more complex cases it is
+necessary to add the missing information in Android.bp/Android.mk manually.
+
+To specify a list of libraries for a given modules, use:
+
+* Android.bp properties: `uses_libs`, `optional_uses_libs`
+* Android.mk variables: `LOCAL_USES_LIBRARIES`, `LOCAL_OPTIONAL_USES_LIBRARIES`
+
+If a library is in `libs`, it usually should *not* be added to the above
+properties, and Soong should be able to infer the `<uses-library>` tag. But
+sometimes a library also needs additional information in its
+Android.bp/Android.mk file (e.g. when it is a `java_library` rather than a
+`java_sdk_library`, or when the library name is different from its module name,
+or when the module is defined in Android.mk rather than Android.bp). In such
+cases it is possible to tell the build system that the library provides a
+`<uses-library>` with a given name (however, this is discouraged and will be
+deprecated in the future, and it is recommended to fix the underlying problem):
+
+* Android.bp property: `provides_uses_lib`
+* Android.mk variable: `LOCAL_PROVIDES_USES_LIBRARY`
+
+It is possible to disable the check on a per-module basis. When doing that it is
+also recommended to disable dexpreopt, as disabling a failed check will result
+in incorrect class loader context recorded in the .odex file, which will cause
+class loader context mismatch and dexopt at first boot.
+
+* Android.bp property: `enforce_uses_lib`
+* Android.mk variable: `LOCAL_ENFORCE_USES_LIBRARIES`
+
+Finally, it is possible to globally disable the check:
+
+* For a given product: `PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true`
+* On the command line: `RELAX_USES_LIBRARY_CHECK=true`
+
+The environment variable overrides the product variable, so it is possible to
+disable the check for a product, but quickly re-enable it for a local build.
+
## `LOCAL_REQUIRED_MODULES` requires listed modules to exist {#BUILD_BROKEN_MISSING_REQUIRED_MODULES}
Modules listed in `LOCAL_REQUIRED_MODULES`, `LOCAL_HOST_REQUIRED_MODULES` and
@@ -17,9 +61,9 @@
System properties for each of the partition is supposed to be set via following
product config variables.
-For system partititon,
+For system partition,
-* `PRODUCT_SYSTEM_PROPERITES`
+* `PRODUCT_SYSTEM_PROPERTIES`
* `PRODUCT_SYSTEM_DEFAULT_PROPERTIES` is highly discouraged. Will be deprecated.
For vendor partition,
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 41defb2..3beadff 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -754,6 +754,8 @@
# Workaround for Soong not being able to rebuild the host binary if its
# JNI dependencies change: b/170389375
$(call add-clean-step, rm -rf $(OUT_DIR)/soong/host/*/lib*/libconscrypt_openjdk_jni.so)
+# vendor-ramdisk renamed to vendor_ramdisk
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor-ramdisk)
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..814cb00
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,8 @@
+third_party {
+ # would be NOTICE save for GPL in:
+ # core/LINUX_KERNEL_COPYING
+ # tools/droiddoc/templates-pdk/assets/jquery-1.6.2.min.js
+ # tools/droiddoc/templates-pdk/assets/jquery-history.js
+ # tools/droiddoc/templates-pdk/assets/jquery-resizable.min.js
+ license_type: RESTRICTED
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
new file mode 100644
index 0000000..ce75150
--- /dev/null
+++ b/PREUPLOAD.cfg
@@ -0,0 +1,2 @@
+[Hook Scripts]
+do_not_use_DO_NOT_MERGE = ${REPO_ROOT}/build/soong/scripts/check_do_not_merge.sh ${PREUPLOAD_COMMIT}
diff --git a/common/json.mk b/common/json.mk
index ba8ffa7..e376aab 100644
--- a/common/json.mk
+++ b/common/json.mk
@@ -24,7 +24,10 @@
add_json_csv =$= $(call add_json_val,$(1),$(call csv_to_json_list,$(strip $(2))))
add_json_bool =$= $(call add_json_val,$(1),$(if $(strip $(2)),true,false))
add_json_map =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": {$$(newline))$(json_increase_indent)
+add_json_map_anon =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent){$$(newline))$(json_increase_indent)
end_json_map =$= $(json_decrease_indent)$(eval _json_contents := $$(_json_contents)$$(if $$(filter %$$(comma),$$(lastword $$(_json_contents))),__SV_END)$$(_json_indent)},$$(newline))
+add_json_array =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": [$$(newline))$(json_increase_indent)
+end_json_array =$= $(json_decrease_indent)$(eval _json_contents := $$(_json_contents)$$(if $$(filter %$$(comma),$$(lastword $$(_json_contents))),__SV_END)$$(_json_indent)],$$(newline))
# Clears _json_contents to start a new json file
json_start =$= $(eval _json_contents := {$$(newline))$(eval _json_indent := $$(4space))
diff --git a/core/Makefile b/core/Makefile
index 95bac40..dca5011 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -211,6 +211,44 @@
$(hide) mv $@.tmp $@
# -----------------------------------------------------------------
+# Declare vendor ramdisk fragments
+INTERNAL_VENDOR_RAMDISK_FRAGMENTS :=
+
+# Validation check and assign default --ramdisk_type.
+$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ $(if $(and $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
+ $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)), \
+ $(error Must not specify KERNEL_MODULE_DIRS for prebuilt vendor ramdisk fragment "$(vendor_ramdisk_fragment)": $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS))) \
+ $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR := $(call intermediates-dir-for,PACKAGING,vendor_ramdisk_fragment-stage-$(vendor_ramdisk_fragment))) \
+ $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).FILES :=) \
+ $(if $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
+ $(if $(filter --ramdisk_type,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)),, \
+ $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_type DLKM))) \
+)
+
+# Create the "kernel module directory" to "vendor ramdisk fragment" inverse mapping.
+$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ $(foreach kmd,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
+ $(eval kmd_vrf := KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(kmd)) \
+ $(if $($(kmd_vrf)),$(error Kernel module directory "$(kmd)" belongs to multiple vendor ramdisk fragments: "$($(kmd_vrf))" "$(vendor_ramdisk_fragment)", each kernel module directory should belong to exactly one or none vendor ramdisk fragment)) \
+ $(eval $(kmd_vrf) := $(vendor_ramdisk_fragment)) \
+ ) \
+)
+INTERNAL_VENDOR_RAMDISK_FRAGMENTS += $(BOARD_VENDOR_RAMDISK_FRAGMENTS)
+
+# Strip the list in case of any whitespace.
+INTERNAL_VENDOR_RAMDISK_FRAGMENTS := \
+ $(strip $(INTERNAL_VENDOR_RAMDISK_FRAGMENTS))
+
+# Assign --ramdisk_name for each vendor ramdisk fragment.
+$(foreach vendor_ramdisk_fragment,$(INTERNAL_VENDOR_RAMDISK_FRAGMENTS), \
+ $(if $(filter --ramdisk_name,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)), \
+ $(error Must not specify --ramdisk_name for vendor ramdisk fragment: $(vendor_ramdisk_fragment))) \
+ $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_name $(vendor_ramdisk_fragment)) \
+ $(eval .KATI_READONLY := BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS) \
+)
+
+# -----------------------------------------------------------------
# kernel modules
# Depmod requires a well-formed kernel version so 0.0 is used as a placeholder.
@@ -230,7 +268,7 @@
# $(7): module archive
# $(8): staging dir for stripped modules
# $(9): module directory name
-# Returns the a list of src:dest pairs to install the modules using copy-many-files.
+# Returns a list of src:dest pairs to install the modules using copy-many-files.
define build-image-kernel-modules
$(if $(9), \
$(eval _dir := $(9)/), \
@@ -315,6 +353,26 @@
@echo '$$(strip $$(notdir $$(PRIVATE_LOAD_MODULES)))' | tr ' ' '\n' > $$(@)
endef
+# $(1): source blocklist file
+# $(2): destination pathname
+# Returns a build rule that checks the syntax of and installs a kernel modules
+# blocklist file. Strip and squeeze any extra space in the blocklist.
+# For use via $(eval).
+define build-image-kernel-modules-blocklist-file
+$(2): $(1)
+ @echo "modprobe blocklist $$(@)"
+ $(hide) mkdir -p "$$(dir $$@)"
+ $(hide) rm -f "$$@"
+ $(hide) awk <"$$<" >"$$@" \
+ '/^#/ { print; next } \
+ NF == 0 { next } \
+ NF != 2 || $$$$1 != "blocklist" \
+ { print "Invalid blocklist line " FNR ": " $$$$0 >"/dev/stderr"; \
+ exit_status = 1; next } \
+ { $$$$1 = $$$$1; print } \
+ END { exit exit_status }'
+endef
+
# $(1): image name
# $(2): build output directory (TARGET_OUT_VENDOR, TARGET_RECOVERY_ROOT_OUT, etc)
# $(3): mount point
@@ -331,7 +389,12 @@
$(if $(strip $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver))$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver))),\
$(if $(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),,\
$(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)))) \
- $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver))))
+ $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver)))) \
+$(if $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
+ $(eval $(call build-image-kernel-modules-blocklist-file, \
+ $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
+ $(2)/lib/modules/modules.blocklist)) \
+ $(2)/lib/modules/modules.blocklist)
endef
# $(1): kernel module directory name (top is an out of band value for no directory)
@@ -390,38 +453,24 @@
VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR :=
endif
-# Create the "kernel module directory" to "vendor ramdisk fragment" inverse mapping.
-$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
- $(if $(and $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
- $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)), \
- $(error Must not specify KERNEL_MODULE_DIRS for prebuilt vendor ramdisk fragment "$(vendor_ramdisk_fragment)": $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS))) \
- $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR := $(call intermediates-dir-for,PACKAGING,vendor_ramdisk_fragment-dlkm-$(vendor_ramdisk_fragment))) \
- $(eval VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).FILES :=) \
- $(foreach dir,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).KERNEL_MODULE_DIRS), \
- $(eval kmd_vrf := KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(dir)) \
- $(if $($(kmd_vrf)),$(error Kernel module directory "$(dir)" belongs to multiple vendor ramdisk fragments: "$($(kmd_vrf))" "$(vendor_ramdisk_fragment)", each kernel module directory should belong to exactly one or none vendor ramdisk fragment)) \
- $(eval $(kmd_vrf) := $(vendor_ramdisk_fragment)) \
- ) \
-)
-
BOARD_KERNEL_MODULE_DIRS += top
-$(foreach dir,$(BOARD_KERNEL_MODULE_DIRS), \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,,$(dir))) \
- $(eval vendor_ramdisk_fragment := $(KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(dir))) \
+$(foreach kmd,$(BOARD_KERNEL_MODULE_DIRS), \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,,$(kmd))) \
+ $(eval vendor_ramdisk_fragment := $(KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(kmd))) \
$(if $(vendor_ramdisk_fragment), \
$(eval output_dir := $(VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR)) \
$(eval result_var := VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).FILES) \
$(eval ### else ###), \
$(eval output_dir := $(TARGET_VENDOR_RAMDISK_OUT)) \
$(eval result_var := ALL_DEFAULT_INSTALLED_MODULES)) \
- $(eval $(result_var) += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(output_dir),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(dir))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(dir))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(dir))) \
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(dir))) \
+ $(eval $(result_var) += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(output_dir),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(dir))),\
- $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,,$(dir)))))
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,,$(kmd)))))
# -----------------------------------------------------------------
# Cert-to-package mapping. Used by the post-build signing tools.
@@ -669,10 +718,10 @@
$(INSTALLED_FILES_FILE_ROOT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ROOT)
$(INSTALLED_FILES_FILE_ROOT) : $(INTERNAL_ROOT_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
@@ -698,11 +747,11 @@
$(INSTALLED_FILES_FILE_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RAMDISK)
$(INSTALLED_FILES_FILE_RAMDISK) : $(INTERNAL_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(TARGET_RAMDISK_OUT)
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(TARGET_RAMDISK_OUT)
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
$(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
@@ -721,7 +770,7 @@
# We just build this directly to the install location.
INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
- $(call pretty,"Target ram disk: $@")
+ $(call pretty,"Target ramdisk: $@")
$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
.PHONY: ramdisk-nodeps
@@ -775,7 +824,7 @@
endif
# $1: boot image file name
-# $2: boot image variant (boot, boot-debug)
+# $2: boot image variant (boot, boot-debug, boot-test-harness)
define get-bootimage-partition-size
$(BOARD_$(call to-upper,$(subst .img,,$(subst $(2),kernel,$(notdir $(1)))))_BOOTIMAGE_PARTITION_SIZE)
endef
@@ -831,6 +880,23 @@
--os_version $(PLATFORM_VERSION_LAST_STABLE) \
--os_patch_level $(PLATFORM_SECURITY_PATCH)
+ifdef BOARD_GKI_SIGNING_KEY_PATH
+ifndef BOARD_GKI_SIGNING_ALGORITHM
+$(error BOARD_GKI_SIGNING_ALGORITHM should be defined with BOARD_GKI_SIGNING_KEY_PATH)
+endif
+INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS := \
+ --gki_signing_key $(BOARD_GKI_SIGNING_KEY_PATH) \
+ --gki_signing_algorithm $(BOARD_GKI_SIGNING_ALGORITHM) \
+ --gki_signing_avbtool_path $(AVBTOOL)
+endif
+
+# Using double quote to pass BOARD_GKI_SIGNING_SIGNATURE_ARGS as a single string
+# to MKBOOTIMG, although it may contain multiple args.
+ifdef BOARD_GKI_SIGNING_SIGNATURE_ARGS
+INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS += \
+ --gki_signing_signature_args "$(BOARD_GKI_SIGNING_SIGNATURE_ARGS)"
+endif
+
# Define these only if we are building boot
ifdef BUILDING_BOOT_IMAGE
INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
@@ -845,7 +911,8 @@
# $1: boot image target
define build_boot_board_avb_enabled
- $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+ $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
+ $(INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
$(AVBTOOL) add_hash_footer \
--image $(1) \
@@ -854,12 +921,12 @@
$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
endef
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(BOARD_GKI_SIGNING_KEY_PATH)
$(call pretty,"Target boot image: $@")
$(call build_boot_board_avb_enabled,$@)
.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
+bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH) $(BOARD_GKI_SIGNING_KEY_PATH)
@echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
@@ -955,7 +1022,7 @@
$(ALL_GENERATED_SOURCES) \
$(ALL_DEFAULT_INSTALLED_MODULES))
-INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor-boot)/vendor-ramdisk.cpio$(RAMDISK_EXT)
+INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot)/vendor_ramdisk.cpio$(RAMDISK_EXT)
ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
$(INTERNAL_VENDOR_RAMDISK_TARGET): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
@@ -965,16 +1032,23 @@
$(INTERNAL_VENDOR_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
+ifeq (true,$(BOARD_BUILD_VENDOR_RAMDISK_IMAGE))
+INSTALLED_VENDOR_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor_ramdisk.img
+$(INSTALLED_VENDOR_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET)
+ $(call pretty,"Target vendor ramdisk: $@")
+ $(copy-file-to-target)
+endif
+
INSTALLED_FILES_FILE_VENDOR_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk.txt
INSTALLED_FILES_JSON_VENDOR_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_RAMDISK:.txt=.json)
$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_RAMDISK)
$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_TARGET)
$(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
- echo Installed file list: $@
+ @echo Installed file list: $@
mkdir -p $(dir $@)
rm -f $@
- $(hide) $(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ $(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
@@ -989,6 +1063,20 @@
INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
endif
+ifdef INTERNAL_BOOTCONFIG
+ ifneq (,$(findstring androidboot.hardware=, $(INTERNAL_BOOTCONFIG)))
+ $(error "androidboot.hardware" BOOTCONFIG parameter is not supported due \
+ to bootconfig limitations. Use "hardware" instead. INTERNAL_BOOTCONFIG: \
+ $(INTERNAL_BOOTCONFIG))
+ endif
+ INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img
+ $(INTERNAL_VENDOR_BOOTCONFIG_TARGET):
+ rm -f $@
+ $(foreach param,$(INTERNAL_BOOTCONFIG), \
+ printf "%s\n" $(param) >> $@;)
+ INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+endif
+
# $(1): Build target name
# $(2): Staging dir to be compressed
# $(3): Build dependencies
@@ -1019,24 +1107,19 @@
INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS :=
INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS :=
-$(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+$(foreach vendor_ramdisk_fragment,$(INTERNAL_VENDOR_RAMDISK_FRAGMENTS), \
$(eval prebuilt_vendor_ramdisk_fragment_file := $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)) \
$(if $(prebuilt_vendor_ramdisk_fragment_file), \
$(eval vendor_ramdisk_fragment_target := $(call build-prebuilt-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment),$(prebuilt_vendor_ramdisk_fragment_file))) \
$(eval ### else ###), \
- $(eval vendor_ramdisk_fragment_target := $(call build-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment))) \
- $(if $(filter --ramdisk_type,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)),, \
- $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_type DLKM))) \
- $(if $(filter --ramdisk_name,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)), \
- $(error Must not specify --ramdisk_name for vendor ramdisk fragment: $(vendor_ramdisk_fragment))) \
- $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_name $(vendor_ramdisk_fragment)) \
+ $(eval vendor_ramdisk_fragment_target := $(call build-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment)))) \
$(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS += $(vendor_ramdisk_fragment_target)) \
$(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS += $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS) --vendor_ramdisk_fragment $(vendor_ramdisk_fragment_target)) \
)
INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET)
-$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS) $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
ifeq ($(BOARD_AVB_ENABLE),true)
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOTIMAGE_KEY_PATH)
$(call pretty,"Target vendor_boot image: $@")
@@ -1223,7 +1306,7 @@
license_modules_rehomed += $(filter $(PRODUCT_OUT)/data/%,$(license_modules_rest))
license_modules_rehomed += $(filter $(PRODUCT_OUT)/ramdisk/%,$(license_modules_rest))
license_modules_rehomed += $(filter $(PRODUCT_OUT)/debug_ramdisk/%,$(license_modules_rest))
-license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor-ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor_ramdisk/%,$(license_modules_rest))
license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist/%,$(license_modules_rest))
license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist.img,$(license_modules_rest))
license_modules_rehomed += $(filter $(PRODUCT_OUT)/system_other/%,$(license_modules_rest))
@@ -1758,10 +1841,10 @@
$(INSTALLED_FILES_FILE_RECOVERY): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RECOVERY)
$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
recovery_sepolicy := \
$(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
@@ -2103,8 +2186,8 @@
$(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
--output $(1).unsigned, \
$(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
- $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \
- --output $(1))
+ $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS) \
+ $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
$(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
$(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
@@ -2132,6 +2215,9 @@
ifeq (true,$(BOARD_AVB_ENABLE))
recoveryimage-deps += $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
endif
+ifdef BOARD_GKI_SIGNING_KEY_PATH
+ recoveryimage-deps += $(BOARD_GKI_SIGNING_KEY_PATH) $(AVBTOOL)
+endif
ifdef BOARD_INCLUDE_RECOVERY_DTBO
ifdef BOARD_PREBUILT_RECOVERY_DTBOIMAGE
recoveryimage-deps += $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE)
@@ -2215,7 +2301,7 @@
# Need to depend on the built ramdisk-debug.img, to get a complete list of the installed files.
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INSTALLED_DEBUG_RAMDISK_TARGET)
$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
- echo Installed file list: $@
+ @echo Installed file list: $@
mkdir -p $(dir $@)
rm -f $@
$(FILESLIST) $(DEBUG_RAMDISK_ROOT_DIR) > $(@:.txt=.json)
@@ -2240,7 +2326,7 @@
$(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
endif # BOARD_USES_RECOVERY_AS_BOOT
$(INSTALLED_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
- $(call pretty,"Target debug ram disk: $@")
+ $(call pretty,"Target debug ramdisk: $@")
mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
@@ -2293,30 +2379,32 @@
BOARD_AVB_BOOT_TEST_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS := --algorithm SHA256_RSA2048 --key $(BOARD_AVB_BOOT_TEST_KEY_PATH)
# $(1): the bootimage to sign
+# $(2): boot image variant (boot, boot-debug, boot-test-harness)
define test-key-sign-bootimage
-$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot-debug)))
+$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),$(2))))
$(AVBTOOL) add_hash_footer \
--image $(1) \
- --partition_size $(call get-bootimage-partition-size,$(1),boot-debug)\
+ --partition_size $(call get-bootimage-partition-size,$(1),$(2))\
--partition_name boot $(INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS) \
$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
-$(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot-debug))
+$(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),$(2)))
endef
# $(1): output file
define build-debug-bootimage-target
$(MKBOOTIMG) --kernel $(PRODUCT_OUT)/$(subst .img,,$(subst boot-debug,kernel,$(notdir $(1)))) \
- $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $1
- $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$1))
+ $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
+ $(INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $1
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$1,boot-debug))
endef
# Depends on original boot.img and ramdisk-debug.img, to build the new boot-debug.img
-$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET) $(BOARD_GKI_SIGNING_KEY_PATH) $(AVBTOOL)
$(call pretty,"Target boot debug image: $@")
$(call build-debug-bootimage-target, $@)
.PHONY: bootimage_debug-nodeps
-bootimage_debug-nodeps: $(MKBOOTIMG)
+bootimage_debug-nodeps: $(MKBOOTIMG) $(BOARD_GKI_SIGNING_KEY_PATH) $(AVBTOOL)
echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call build-debug-bootimage-target,$b))
@@ -2328,7 +2416,7 @@
# -----------------------------------------------------------------
# vendor debug ramdisk
# Combines vendor ramdisk files and debug ramdisk files to build the vendor debug ramdisk.
-INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor-ramdisk-debug.cpio$(RAMDISK_EXT)
+INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor_ramdisk-debug.cpio$(RAMDISK_EXT)
$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
@@ -2348,13 +2436,19 @@
$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
- $(call pretty,"Target vendor debug ram disk: $@")
mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)
touch $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/force_debuggable
$(foreach debug_file,$(DEBUG_RAMDISK_FILES), \
cp -f $(debug_file) $(patsubst $(DEBUG_RAMDISK_DIR)/%,$(VENDOR_DEBUG_RAMDISK_DIR)/%,$(debug_file)) &&) true
$(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_RAMDISK_DIR) $(VENDOR_DEBUG_RAMDISK_DIR) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
+ifeq (true,$(BOARD_BUILD_VENDOR_RAMDISK_IMAGE))
+INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor_ramdisk-debug.img
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
+ $(call pretty,"Target vendor debug ramdisk: $@")
+ $(copy-file-to-target)
+endif
+
INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK)
@@ -2364,7 +2458,7 @@
# so we have to wait for the vendor debug ramdisk to be built before generating the installed file list.
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
- echo Installed file list: $@
+ @echo Installed file list: $@
mkdir -p $(dir $@)
rm -f $@
$(FILESLIST) $(VENDOR_DEBUG_RAMDISK_DIR) > $(@:.txt=.json)
@@ -2433,8 +2527,8 @@
$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(INSTALLED_DEBUG_RAMDISK_TARGET)
$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_TEST_HARNESS_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
- $(call pretty,"Target test harness ram disk: $@")
- rsync -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
+ $(call pretty,"Target test harness ramdisk: $@")
+ rsync --chmod=u+w -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
$(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
$(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $@
@@ -2452,6 +2546,7 @@
#
# Note: it's intentional to skip signing for boot-test-harness.img, because it
# can only be used if the device is unlocked with verification error.
+ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
ifneq ($(strip $(BOARD_KERNEL_BINARIES)),)
@@ -2475,21 +2570,24 @@
# $(1): output file
define build-boot-test-harness-target
$(MKBOOTIMG) --kernel $(PRODUCT_OUT)/$(subst .img,,$(subst boot-test-harness,kernel,$(notdir $(1)))) \
- $(INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@))
+ $(INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
+ $(INTERNAL_MKBOOTIMG_GKI_SINGING_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+ $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@,boot-test-harness))
endef
# Build the new boot-test-harness.img, based on boot-debug.img and ramdisk-test-harness.img.
-$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
+$(BOARD_GKI_SIGNING_KEY_PATH) $(AVBTOOL)
$(call pretty,"Target boot test harness image: $@")
$(call build-boot-test-harness-target,$@)
.PHONY: bootimage_test_harness-nodeps
-bootimage_test_harness-nodeps: $(MKBOOTIMG)
+bootimage_test_harness-nodeps: $(MKBOOTIMG) $(BOARD_GKI_SIGNING_KEY_PATH) $(AVBTOOL)
echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET),$(call build-boot-test-harness-target,$b))
endif # TARGET_NO_KERNEL
+endif # INSTALLED_BOOTIMAGE_TARGET
endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true
# Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
@@ -2569,10 +2667,10 @@
$(INSTALLED_FILES_FILE): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON)
$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
.PHONY: installed-file-list
installed-file-list: $(INSTALLED_FILES_FILE)
@@ -2813,10 +2911,10 @@
$(INSTALLED_FILES_FILE_SYSTEMOTHER): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEMOTHER)
$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
# Determines partition size for system_other.img.
ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
@@ -2897,10 +2995,10 @@
$(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
vendorimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor)
@@ -2949,10 +3047,10 @@
$(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
productimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,product)
@@ -3000,10 +3098,10 @@
$(INSTALLED_FILES_FILE_SYSTEM_EXT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEM_EXT)
$(INSTALLED_FILES_FILE_SYSTEM_EXT) : $(INTERNAL_SYSTEM_EXTIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
system_extimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,system_ext)
@@ -3071,10 +3169,10 @@
$(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
$(INSTALLED_FILES_FILE_ODM) : $(INTERNAL_ODMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
odmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,odm)
@@ -3122,10 +3220,10 @@
$(INSTALLED_FILES_FILE_VENDOR_DLKM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DLKM)
$(INSTALLED_FILES_FILE_VENDOR_DLKM) : $(INTERNAL_VENDOR_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
vendor_dlkmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,vendor_dlkm)
@@ -3173,10 +3271,10 @@
$(INSTALLED_FILES_FILE_ODM_DLKM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM_DLKM)
$(INSTALLED_FILES_FILE_ODM_DLKM) : $(INTERNAL_ODM_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
@echo Installed file list: $@
- @mkdir -p $(dir $@)
- @rm -f $@
- $(hide) $(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
- $(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+ mkdir -p $(dir $@)
+ rm -f $@
+ $(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
+ $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
odm_dlkmimage_intermediates := \
$(call intermediates-dir-for,PACKAGING,odm_dlkm)
@@ -3793,13 +3891,13 @@
ifneq ($(check_vintf_system_deps),)
check_vintf_has_system := true
-check_vintf_system_log := $(intermediates)/check_vintf_system_log
+check_vintf_system_log := $(intermediates)/check_vintf_system.log
check_vintf_all_deps += $(check_vintf_system_log)
$(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
check_vintf_system_log :=
-vintffm_log := $(intermediates)/vintffm_log
+vintffm_log := $(intermediates)/vintffm.log
check_vintf_all_deps += $(vintffm_log)
$(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
@( $< --check --dirmap /system:$(TARGET_OUT) \
@@ -3812,7 +3910,7 @@
check_vintf_vendor_deps := $(filter $(TARGET_OUT_VENDOR)/etc/vintf/%, $(check_vintf_common_srcs))
ifneq ($(check_vintf_vendor_deps),)
check_vintf_has_vendor := true
-check_vintf_vendor_log := $(intermediates)/check_vintf_vendor_log
+check_vintf_vendor_log := $(intermediates)/check_vintf_vendor.log
check_vintf_all_deps += $(check_vintf_vendor_log)
# Check vendor SKU=(empty) case when:
# - DEVICE_MANIFEST_FILE is not empty; OR
@@ -3830,24 +3928,6 @@
endif # check_vintf_vendor_deps
check_vintf_vendor_deps :=
-# -- Check VINTF compatibility of build.
-# Skip partial builds; only check full builds. Only check if:
-# - PRODUCT_ENFORCE_VINTF_MANIFEST is true
-# - system / vendor VINTF metadata exists
-# - Building product / system_ext / odm images if board has product / system_ext / odm images
-ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
-ifeq ($(check_vintf_has_system),true)
-ifeq ($(check_vintf_has_vendor),true)
-ifeq ($(filter true,$(BUILDING_ODM_IMAGE)),$(filter true,$(BOARD_USES_ODMIMAGE)))
-ifeq ($(filter true,$(BUILDING_PRODUCT_IMAGE)),$(filter true,$(BOARD_USES_PRODUCTIMAGE)))
-ifeq ($(filter true,$(BUILDING_SYSTEM_EXT_IMAGE)),$(filter true,$(BOARD_USES_SYSTEM_EXTIMAGE)))
-
-check_vintf_compatible_log := $(intermediates)/check_vintf_compatible_log
-check_vintf_all_deps += $(check_vintf_compatible_log)
-
-check_vintf_compatible_args :=
-check_vintf_compatible_deps := $(check_vintf_common_srcs)
-
# -- Kernel version and configurations.
ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),true)
@@ -3878,6 +3958,9 @@
or (2) extracting kernel configuration and defining BOARD_KERNEL_CONFIG_FILE and \
BOARD_KERNEL_VERSION manually; or (3) unsetting PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS \
manually.)
+# Clear their values to indicate that these two files does not exist.
+BUILT_KERNEL_CONFIGS_FILE :=
+BUILT_KERNEL_VERSION_FILE :=
else
# Tools for decompression that is not in PATH.
@@ -3901,9 +3984,31 @@
endif # INSTALLED_KERNEL_TARGET
+endif # PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
+
+# -- Check VINTF compatibility of build.
+# Skip partial builds; only check full builds. Only check if:
+# - PRODUCT_ENFORCE_VINTF_MANIFEST is true
+# - system / vendor VINTF metadata exists
+# - Building product / system_ext / odm images if board has product / system_ext / odm images
+ifeq ($(PRODUCT_ENFORCE_VINTF_MANIFEST),true)
+ifeq ($(check_vintf_has_system),true)
+ifeq ($(check_vintf_has_vendor),true)
+ifeq ($(filter true,$(BUILDING_ODM_IMAGE)),$(filter true,$(BOARD_USES_ODMIMAGE)))
+ifeq ($(filter true,$(BUILDING_PRODUCT_IMAGE)),$(filter true,$(BOARD_USES_PRODUCTIMAGE)))
+ifeq ($(filter true,$(BUILDING_SYSTEM_EXT_IMAGE)),$(filter true,$(BOARD_USES_SYSTEM_EXTIMAGE)))
+
+check_vintf_compatible_log := $(intermediates)/check_vintf_compatible.log
+check_vintf_all_deps += $(check_vintf_compatible_log)
+
+check_vintf_compatible_args :=
+check_vintf_compatible_deps := $(check_vintf_common_srcs)
+
+ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),true)
+ifneq (,$(BUILT_KERNEL_VERSION_FILE)$(BUILT_KERNEL_CONFIGS_FILE))
check_vintf_compatible_args += --kernel $(BUILT_KERNEL_VERSION_FILE):$(BUILT_KERNEL_CONFIGS_FILE)
check_vintf_compatible_deps += $(BUILT_KERNEL_CONFIGS_FILE) $(BUILT_KERNEL_VERSION_FILE)
-
+endif # BUILT_KERNEL_VERSION_FILE != "" || BUILT_KERNEL_CONFIGS_FILE != ""
endif # PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS
check_vintf_compatible_args += \
@@ -3934,7 +4039,8 @@
$(if $(DEVICE_MANIFEST_SKUS),,EMPTY_VENDOR_SKU_PLACEHOLDER)) \
$(DEVICE_MANIFEST_SKUS)
$(check_vintf_compatible_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_compatible_deps)
- @echo -n -e 'Deps: \n ' > $@
+ @echo "PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS=$(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS)" > $@
+ @echo -n -e 'Deps: \n ' >> $@
@sed 's/ /\n /g' <<< "$(PRIVATE_CHECK_VINTF_DEPS)" >> $@
@echo -n -e 'Args: \n ' >> $@
@cat <<< "$(PRIVATE_CHECK_VINTF_ARGS)" >> $@
@@ -3990,7 +4096,7 @@
$(CHECK_PARTITION_SIZES) $(if $(2),--logfile $(2),-v) $(1)
endef
-check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes_log
+check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes.log
droid_targets: $(check_all_partition_sizes_log)
$(call dist-for-goals, droid_targets, $(check_all_partition_sizes_log))
@@ -4110,6 +4216,8 @@
mksquashfsimage.sh \
mkuserimg_mke2fs \
ota_from_target_files \
+ repack_bootimg \
+ secilc \
sefcontext_compile \
sgdisk \
shflags \
@@ -4268,6 +4376,13 @@
$(hide) echo 'mkbootimg_args=$(BOARD_MKBOOTIMG_ARGS)' >> $@
$(hide) echo 'recovery_mkbootimg_args=$(BOARD_RECOVERY_MKBOOTIMG_ARGS)' >> $@
$(hide) echo 'mkbootimg_version_args=$(INTERNAL_MKBOOTIMG_VERSION_ARGS)' >> $@
+ifdef BOARD_GKI_SIGNING_KEY_PATH
+ $(hide) echo 'gki_signing_key_path=$(BOARD_GKI_SIGNING_KEY_PATH)' >> $@
+ $(hide) echo 'gki_signing_algorithm=$(BOARD_GKI_SIGNING_ALGORITHM)' >> $@
+endif
+ifdef BOARD_GKI_SIGNING_SIGNATURE_ARGS
+ $(hide) echo 'gki_signing_signature_args=$(BOARD_GKI_SIGNING_SIGNATURE_ARGS)' >> $@
+endif
$(hide) echo "multistage_support=1" >> $@
$(hide) echo "blockimgdiff_versions=3,4" >> $@
ifeq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
@@ -4499,11 +4614,26 @@
(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) -R "$(2)"
endef
-# Filter out vendor from the list for AOSP targets.
-# $(1): list
define filter-out-missing-vendor
$(if $(INSTALLED_VENDORIMAGE_TARGET),$(1),$(filter-out vendor,$(1)))
endef
+define filter-out-missing-vendor_dlkm
+$(if $(INSTALLED_VENDOR_DLKMIMAGE_TARGET),$(1),$(filter-out vendor_dlkm,$(1)))
+endef
+define filter-out-missing-odm
+$(if $(INSTALLED_ODMIMAGE_TARGET),$(1),$(filter-out odm,$(1)))
+endef
+define filter-out-missing-odm_dlkm
+$(if $(INSTALLED_ODM_DLKMIMAGE_TARGET),$(1),$(filter-out odm_dlkm,$(1)))
+endef
+# Filter out vendor,vendor_dlkm,odm,odm_dlkm from the list for AOSP targets.
+# $(1): list
+define filter-out-missing-partitions
+$(call filter-out-missing-vendor,\
+ $(call filter-out-missing-vendor_dlkm,\
+ $(call filter-out-missing-odm,\
+ $(call filter-out-missing-odm_dlkm,$(1)))))
+endef
# Information related to dynamic partitions and virtual A/B. This information
# is needed for building the super image (see dump-super-image-info) and
@@ -4517,6 +4647,8 @@
echo "lpmake=$(notdir $(LPMAKE))" >> $(1)
$(if $(filter true,$(PRODUCT_BUILD_SUPER_PARTITION)), $(if $(BOARD_SUPER_PARTITION_SIZE), \
echo "build_super_partition=true" >> $(1)))
+ $(if $(BUILDING_SUPER_EMPTY_IMAGE), \
+ echo "build_super_empty_partition=true" >> $(1))
$(if $(filter true,$(BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE)), \
echo "build_retrofit_dynamic_partitions_ota_package=true" >> $(1))
echo "super_metadata_device=$(BOARD_SUPER_PARTITION_METADATA_DEVICE)" >> $(1)
@@ -4525,13 +4657,13 @@
$(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
echo "super_$(device)_device_size=$(BOARD_SUPER_PARTITION_$(call to-upper,$(device))_DEVICE_SIZE)" >> $(1);)
$(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
- echo "dynamic_partition_list=$(call filter-out-missing-vendor,$(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
+ echo "dynamic_partition_list=$(call filter-out-missing-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
$(if $(BOARD_SUPER_PARTITION_GROUPS),
echo "super_partition_groups=$(BOARD_SUPER_PARTITION_GROUPS)" >> $(1))
$(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
echo "super_$(group)_group_size=$(BOARD_$(call to-upper,$(group))_SIZE)" >> $(1); \
$(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
- echo "super_$(group)_partition_list=$(call filter-out-missing-vendor,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
+ echo "super_$(group)_partition_list=$(call filter-out-missing-partitions,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)), \
echo "build_non_sparse_super_partition=true" >> $(1))
$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)), \
@@ -4572,11 +4704,15 @@
ifdef BUILDING_VENDOR_BOOT_IMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+ # The vendor ramdisk may be built from the recovery ramdisk.
+ ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+ $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+ endif
endif
ifdef BUILDING_RECOVERY_IMAGE
# TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
- # BUILD_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
+ # BUILT_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
# commands in build-recoveryimage-target, which would touch the files under
# TARGET_RECOVERY_OUT and race with packaging target-files.zip.
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -4624,13 +4760,13 @@
ifdef BUILDING_VENDOR_DLKM_IMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
-else ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
endif
ifdef BUILDING_ODM_DLKM_IMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
-else ifdef BOARD_ODM_VENDOR_DLKIMMAGE
+else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
endif
@@ -4767,6 +4903,9 @@
ifdef INSTALLED_DTBIMAGE_TARGET
cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/VENDOR_BOOT/dtb
endif
+ifdef INTERNAL_VENDOR_BOOTCONFIG_TARGET
+ cp $(INTERNAL_VENDOR_BOOTCONFIG_TARGET) $(zip_root)/VENDOR_BOOT/vendor_bootconfig
+endif
ifdef BOARD_KERNEL_BASE
echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/VENDOR_BOOT/base
endif
@@ -4774,9 +4913,9 @@
echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/VENDOR_BOOT/pagesize
endif
echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/VENDOR_BOOT/vendor_cmdline
-ifdef BOARD_VENDOR_RAMDISK_FRAGMENTS
- echo "$(BOARD_VENDOR_RAMDISK_FRAGMENTS)" > "$(zip_root)/VENDOR_BOOT/vendor_ramdisk_fragments"
- $(foreach vendor_ramdisk_fragment,$(BOARD_VENDOR_RAMDISK_FRAGMENTS), \
+ifdef INTERNAL_VENDOR_RAMDISK_FRAGMENTS
+ echo "$(INTERNAL_VENDOR_RAMDISK_FRAGMENTS)" > "$(zip_root)/VENDOR_BOOT/vendor_ramdisk_fragments"
+ $(foreach vendor_ramdisk_fragment,$(INTERNAL_VENDOR_RAMDISK_FRAGMENTS), \
mkdir -p $(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment); \
echo "$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)" > "$(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment)/mkbootimg_args"; \
$(eval prebuilt_ramdisk := $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).PREBUILT)) \
@@ -4786,7 +4925,7 @@
$(VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).STAGING_DIR), \
$(zip_root)/VENDOR_BOOT/RAMDISK_FRAGMENTS/$(vendor_ramdisk_fragment)/RAMDISK); \
))
-endif # BOARD_VENDOR_RAMDISK_FRAGMENTS != ""
+endif # INTERNAL_VENDOR_RAMDISK_FRAGMENTS != ""
endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET
ifdef BUILDING_SYSTEM_IMAGE
@# Contents of the system image
@@ -5028,7 +5167,7 @@
@# help early validation of the .zip file while uploading it.
$(hide) find $(zip_root)/META | sort >$@.list
$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
- $(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -l $@.list
+ $(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -r $@.list
.PHONY: target-files-package
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
@@ -5218,10 +5357,18 @@
# Any dependencies are set up later in build/make/core/main.mk.
JACOCO_REPORT_CLASSES_ALL := $(PRODUCT_OUT)/jacoco-report-classes-all.jar
+$(JACOCO_REPORT_CLASSES_ALL): PRIVATE_TARGET_JACOCO_DIR := $(call intermediates-dir-for,PACKAGING,jacoco)
+$(JACOCO_REPORT_CLASSES_ALL): PRIVATE_HOST_JACOCO_DIR := $(call intermediates-dir-for,PACKAGING,jacoco,HOST)
+$(JACOCO_REPORT_CLASSES_ALL): PRIVATE_TARGET_PROGUARD_USAGE_DIR := $(call intermediates-dir-for,PACKAGING,proguard_usage)
+$(JACOCO_REPORT_CLASSES_ALL): PRIVATE_HOST_PROGUARD_USAGE_DIR := $(call intermediates-dir-for,PACKAGING,proguard_usage,HOST)
$(JACOCO_REPORT_CLASSES_ALL) :
@echo "Collecting uninstrumented classes"
- find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" -o -name "proguard_usage.zip" 2>/dev/null | sort > $@.list
- $(SOONG_ZIP) -o $@ -L 0 -C $(OUT_DIR) -P out -l $@.list
+ mkdir -p $(PRIVATE_TARGET_JACOCO_DIR) $(PRIVATE_HOST_JACOCO_DIR) $(PRIVATE_TARGET_PROGUARD_USAGE_DIR) $(PRIVATE_HOST_PROGUARD_USAGE_DIR)
+ $(SOONG_ZIP) -o $@ -L 0 \
+ -C $(PRIVATE_TARGET_JACOCO_DIR) -P out/target/common/obj -D $(PRIVATE_TARGET_JACOCO_DIR) \
+ -C $(PRIVATE_HOST_JACOCO_DIR) -P out/target/common/obj -D $(PRIVATE_HOST_JACOCO_DIR) \
+ -C $(PRIVATE_TARGET_PROGUARD_USAGE_DIR) -P out/target/common/obj -D $(PRIVATE_TARGET_PROGUARD_USAGE_DIR) \
+ -C $(PRIVATE_HOST_PROGUARD_USAGE_DIR) -P out/target/common/obj -D $(PRIVATE_HOST_PROGUARD_USAGE_DIR)
ifeq (,$(TARGET_BUILD_UNBUNDLED))
$(JACOCO_REPORT_CLASSES_ALL): $(INTERNAL_ALLIMAGES_FILES)
@@ -5237,13 +5384,11 @@
ifeq (,$(TARGET_BUILD_UNBUNDLED))
$(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
endif
-$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard)/filelist
+$(PROGUARD_DICT_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary)
$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
@echo "Packaging Proguard obfuscation dictionary files."
- mkdir -p $(dir $@) $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS $(dir $(PRIVATE_LIST_FILE))
- find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary | \
- sed -e 's/\(.*\)\/proguard_dictionary/\0\n\1\/classes.jar/' > $(PRIVATE_LIST_FILE)
- $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+ mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR)
+ $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -P out/target/common/obj -D $(PRIVATE_PACKAGING_DIR)
#------------------------------------------------------------------
# A zip of Proguard usage files.
@@ -5264,11 +5409,12 @@
$(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(updater_dep)
endif
-$(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage)/filelist
+$(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage.zip)/filelist
+$(PROGUARD_USAGE_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_usage)
$(PROGUARD_USAGE_ZIP): $(MERGE_ZIPS)
@echo "Packaging Proguard usage files."
- mkdir -p $(dir $@) $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS $(dir $(PRIVATE_LIST_FILE))
- find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
+ mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
+ find $(PRIVATE_PACKAGING_DIR) -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
$(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
@@ -5379,9 +5525,7 @@
# -----------------------------------------------------------------
# super empty image
-
-ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
-ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+ifdef BUILDING_SUPER_EMPTY_IMAGE
INSTALLED_SUPERIMAGE_EMPTY_TARGET := $(PRODUCT_OUT)/super_empty.img
$(INSTALLED_SUPERIMAGE_EMPTY_TARGET): intermediates := $(call intermediates-dir-for,PACKAGING,super_empty)
@@ -5395,8 +5539,7 @@
$(call dist-for-goals,dist_files,$(INSTALLED_SUPERIMAGE_EMPTY_TARGET))
-endif # BOARD_SUPER_PARTITION_SIZE != ""
-endif # PRODUCT_USE_DYNAMIC_PARTITIONS == "true"
+endif # BUILDING_SUPER_EMPTY_IMAGE
# -----------------------------------------------------------------
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 3a0c0f1..c9fcf47 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -33,7 +33,27 @@
$(call add_soong_config_namespace,art_module)
SOONG_CONFIG_art_module += source_build
endif
-SOONG_CONFIG_art_module_source_build ?= true
+ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
+ # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
+ SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(filter true,$(NATIVE_COVERAGE) $(CLANG_COVERAGE)))
+ # Always build ART APEXes from source in coverage builds since the prebuilts
+ # aren't built with instrumentation.
+ # TODO(b/172480617): Find another solution for this.
+ SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(SANITIZE_TARGET)$(SANITIZE_HOST))
+ # Prebuilts aren't built with sanitizers either.
+ SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(PRODUCT_FUCHSIA))
+ # Fuchsia picks out ART internal packages that aren't available in the
+ # prebuilt.
+ SOONG_CONFIG_art_module_source_build := true
+else
+ # This sets the default for building ART APEXes from source rather than
+ # prebuilts (in packages/modules/ArtPrebuilt and prebuilt/module_sdk/art) in
+ # all other platform builds.
+ SOONG_CONFIG_art_module_source_build ?= true
+endif
# Apex build mode variables
ifdef APEX_BUILD_FOR_PRE_S_DEVICES
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 5767996..fe04b84 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -92,34 +92,6 @@
endif
endif
-# Verify LOCAL_USES_LIBRARIES/LOCAL_OPTIONAL_USES_LIBRARIES
-# If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
-# LOCAL_OPTIONAL_USES_LIBRARIES are specified.
-# Will change the default to true unconditionally in the future.
-ifndef LOCAL_ENFORCE_USES_LIBRARIES
- ifneq (,$(strip $(LOCAL_USES_LIBRARIES)$(LOCAL_OPTIONAL_USES_LIBRARIES)))
- LOCAL_ENFORCE_USES_LIBRARIES := true
- endif
-endif
-
-my_enforced_uses_libraries :=
-ifdef LOCAL_ENFORCE_USES_LIBRARIES
- my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.timestamp
- $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(LOCAL_USES_LIBRARIES)
- $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(LOCAL_OPTIONAL_USES_LIBRARIES)
- $(my_enforced_uses_libraries): $(BUILD_SYSTEM)/verify_uses_libraries.sh $(AAPT)
- $(my_enforced_uses_libraries): $(my_prebuilt_src_file)
- @echo Verifying uses-libraries: $<
- aapt_binary=$(AAPT) \
- uses_library_names="$(strip $(PRIVATE_USES_LIBRARIES))" \
- optional_uses_library_names="$(strip $(PRIVATE_OPTIONAL_USES_LIBRARIES))" \
- $(BUILD_SYSTEM)/verify_uses_libraries.sh $<
- touch $@
- $(built_module) : $(my_enforced_uses_libraries)
-endif
-
-dex_preopt_profile_src_file := $(my_prebuilt_src_file)
-
rs_compatibility_jni_libs :=
include $(BUILD_SYSTEM)/install_jni_libs.mk
@@ -197,6 +169,8 @@
endif
my_dex_jar := $(my_prebuilt_src_file)
+my_manifest_or_apk := $(my_prebuilt_src_file)
+dex_preopt_profile_src_file := $(my_prebuilt_src_file)
#######################################
# defines built_odex along with rule to install odex
@@ -237,6 +211,7 @@
$(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR)
$(transform-prebuilt-to-target)
$(uncompress-prebuilt-embedded-jni-libs)
+ $(remove-unwanted-prebuilt-embedded-jni-libs)
ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
$(uncompress-dexs)
endif # LOCAL_UNCOMPRESS_DEX
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 68f880f..c973997 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -533,13 +533,17 @@
ifndef LOCAL_IS_HOST_MODULE
# Rule to install the module's companion init.rc.
-my_init_rc := $(LOCAL_INIT_RC_$(my_32_64_bit_suffix)) $(LOCAL_INIT_RC)
+ifneq ($(strip $(LOCAL_FULL_INIT_RC)),)
+my_init_rc := $(LOCAL_FULL_INIT_RC)
+else
+my_init_rc := $(foreach rc,$(LOCAL_INIT_RC_$(my_32_64_bit_suffix)) $(LOCAL_INIT_RC),$(LOCAL_PATH)/$(rc))
+endif
ifneq ($(strip $(my_init_rc)),)
# Make doesn't support recovery as an output partition, but some Soong modules installed in recovery
# have init.rc files that need to be installed alongside them. Manually handle the case where the
# output file is in the recovery partition.
my_init_rc_path := $(if $(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)),$(TARGET_RECOVERY_ROOT_OUT)/system/etc,$(TARGET_OUT$(partition_tag)_ETC))
-my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(LOCAL_PATH)/$(rc):$(my_init_rc_path)/init/$(notdir $(rc)))
+my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(rc):$(my_init_rc_path)/init/$(notdir $(rc)))
my_init_rc_installed := $(foreach rc,$(my_init_rc_pairs),$(call word-colon,2,$(rc)))
# Make sure we only set up the copy rules once, even if another arch variant
diff --git a/core/binary.mk b/core/binary.mk
index fa36d64..cf47374 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -266,10 +266,7 @@
endif
endif
- ifneq (,$(filter armeabi armeabi-v7a,$(my_cpu_variant)))
- my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
- endif
-
+ my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a
my_ldlibs += -ldl
else # LOCAL_NDK_STL_VARIANT must be none
# Do nothing.
@@ -311,6 +308,15 @@
my_api_level := $(call codename-or-sdk-to-sdk,$(BOARD_VNDK_VERSION))
endif
my_cflags += -D__ANDROID_VNDK__
+ ifneq ($(LOCAL_USE_VNDK_VENDOR),)
+ # Vendor modules have LOCAL_USE_VNDK_VENDOR when
+ # BOARD_VNDK_VERSION is defined.
+ my_cflags += -D__ANDROID_VENDOR__
+ else ifneq ($(LOCAL_USE_VNDK_PRODUCT),)
+ # Product modules have LOCAL_USE_VNDK_PRODUCT when
+ # PRODUCT_PRODUCT_VNDK_VERSION is defined.
+ my_cflags += -D__ANDROID_PRODUCT__
+ endif
endif
ifndef LOCAL_IS_HOST_MODULE
@@ -465,27 +471,6 @@
my_soong_problems += dotdot_incs
endif
-####################################################
-## Add FDO flags if FDO is turned on and supported
-## Please note that we will do option filtering during FDO build.
-## i.e. Os->O2, remove -fno-early-inline and -finline-limit.
-##################################################################
-my_fdo_build :=
-ifneq ($(filter true always, $(LOCAL_FDO_SUPPORT)),)
- ifeq ($(BUILD_FDO_INSTRUMENT),true)
- my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_INSTRUMENT_CFLAGS)
- my_ldflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_INSTRUMENT_LDFLAGS)
- my_fdo_build := true
- else ifneq ($(filter true,$(BUILD_FDO_OPTIMIZE))$(filter always,$(LOCAL_FDO_SUPPORT)),)
- my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_OPTIMIZE_CFLAGS)
- my_fdo_build := true
- endif
- # Disable ccache (or other compiler wrapper) except gomacc, which
- # can handle -fprofile-use properly.
- my_cc_wrapper := $(filter $(GOMA_CC) $(RBE_WRAPPER),$(my_cc_wrapper))
- my_cxx_wrapper := $(filter $(GOMA_CC) $(RBE_WRAPPER),$(my_cxx_wrapper))
-endif
-
###########################################################
## Explicitly declare assembly-only __ASSEMBLY__ macro for
## assembly source
@@ -1473,12 +1458,6 @@
my_asflags := $(call convert-to-clang-flags,$(my_asflags))
my_ldflags := $(call convert-to-clang-flags,$(my_ldflags))
-ifeq ($(my_fdo_build), true)
- my_cflags := $(patsubst -Os,-O2,$(my_cflags))
- fdo_incompatible_flags := -fno-early-inlining -finline-limit=%
- my_cflags := $(filter-out $(fdo_incompatible_flags),$(my_cflags))
-endif
-
# No one should ever use this flag. On GCC it's mere presence will disable all
# warnings, even those that are specified after it (contrary to typical warning
# flag behavior). This circumvents CFLAGS_NO_OVERRIDE from forcibly enabling the
diff --git a/core/board_config.mk b/core/board_config.mk
index 725c0a5..9ae597e 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -25,6 +25,8 @@
_board_strip_readonly_list += BOARD_HAVE_BLUETOOTH
_board_strip_readonly_list += BOARD_INSTALLER_CMDLINE
_board_strip_readonly_list += BOARD_KERNEL_CMDLINE
+_board_strip_readonly_list += BOARD_BOOT_HEADER_VERSION
+_board_strip_readonly_list += BOARD_BOOTCONFIG
_board_strip_readonly_list += BOARD_KERNEL_BASE
_board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
_board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
@@ -129,6 +131,7 @@
BUILD_BROKEN_PREBUILT_ELF_FILES \
BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
BUILD_BROKEN_USES_NETWORK \
+ BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
_build_broken_var_list += \
@@ -221,6 +224,10 @@
.KATI_READONLY := $(_board_strip_readonly_list)
INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
+ifneq (,$(BOARD_BOOTCONFIG))
+ INTERNAL_KERNEL_CMDLINE += bootconfig
+ INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
+endif
ifneq ($(filter %64,$(TARGET_ARCH)),)
TARGET_IS_64_BIT := true
@@ -323,7 +330,8 @@
###########################################
# Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT) \
- $(BOARD_GKI_NONAB_COMPAT) $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
+ $(BOARD_GKI_NONAB_COMPAT) $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT) \
+ $(BOARD_USES_GENERIC_KERNEL_IMAGE)))
TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk/first_stage_ramdisk
TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
@@ -454,6 +462,25 @@
endif
.KATI_READONLY := BUILDING_VBMETA_IMAGE
+# Are we building a super_empty image
+BUILDING_SUPER_EMPTY_IMAGE :=
+ifeq ($(PRODUCT_BUILD_SUPER_EMPTY_IMAGE),)
+ ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
+ ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
+ BUILDING_SUPER_EMPTY_IMAGE := true
+ endif
+ endif
+else ifeq ($(PRODUCT_BUILD_SUPER_EMPTY_IMAGE),true)
+ ifneq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
+ $(error PRODUCT_BUILD_SUPER_EMPTY_IMAGE set to true, but PRODUCT_USE_DYNAMIC_PARTITIONS is not true)
+ endif
+ ifeq ($(BOARD_SUPER_PARTITION_SIZE),)
+ $(error PRODUCT_BUILD_SUPER_EMPTY_IMAGE set to true, but BOARD_SUPER_PARTITION_SIZE is not defined)
+ endif
+ BUILDING_SUPER_EMPTY_IMAGE := true
+endif
+.KATI_READONLY := BUILDING_SUPER_EMPTY_IMAGE
+
###########################################
# Now we can substitute with the real value of TARGET_COPY_OUT_VENDOR
ifeq ($(TARGET_COPY_OUT_VENDOR),$(_vendor_path_placeholder))
@@ -802,7 +829,14 @@
ifdef BOARD_VENDOR_RAMDISK_FRAGMENTS
$(error Should not set BOARD_VENDOR_RAMDISK_FRAGMENTS if not building vendor_boot image)
endif
-endif
+else # BUILDING_VENDOR_BOOT_IMAGE
+ ifneq (,$(call math_lt,$(BOARD_BOOT_HEADER_VERSION),4))
+ ifdef BOARD_VENDOR_RAMDISK_FRAGMENTS
+ $(error Should not set BOARD_VENDOR_RAMDISK_FRAGMENTS if \
+ BOARD_BOOT_HEADER_VERSION is less than 4)
+ endif
+ endif
+endif # BUILDING_VENDOR_BOOT_IMAGE
ifneq ($(words $(BOARD_VENDOR_RAMDISK_FRAGMENTS)),$(words $(sort $(BOARD_VENDOR_RAMDISK_FRAGMENTS))))
$(error BOARD_VENDOR_RAMDISK_FRAGMENTS has duplicate entries: $(BOARD_VENDOR_RAMDISK_FRAGMENTS))
diff --git a/core/build_id.rbc b/core/build_id.rbc
new file mode 100644
index 0000000..4f33833
--- /dev/null
+++ b/core/build_id.rbc
@@ -0,0 +1,21 @@
+
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file has been manually converted from build_id.mk
+def init(g):
+
+ # BUILD_ID is usually used to specify the branch name (like "MAIN") or a branch name and a release candidate
+ # (like "CRB01"). It must be a single word, and is capitalized by convention.
+ g["BUILD_ID"]="AOSP.MASTER"
\ No newline at end of file
diff --git a/core/clang/tidy.mk b/core/clang/tidy.mk
index 868f7bc..8a40878 100644
--- a/core/clang/tidy.mk
+++ b/core/clang/tidy.mk
@@ -36,7 +36,7 @@
)
endef
-# Default filter contains current directory $1 and DEFAULT_TIDY_HEADER_DIRS.
+# Default filter contains current directory $1 and optional DEFAULT_TIDY_HEADER_DIRS.
define default_tidy_header_filter
- -header-filter="($(subst $(space),,$1|$(DEFAULT_TIDY_HEADER_DIRS)))"
+ -header-filter=$(if $(DEFAULT_TIDY_HEADER_DIRS),"($1/|$(DEFAULT_TIDY_HEADER_DIRS))",$1/)
endef
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 019892e..faca97a 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -100,12 +100,12 @@
LOCAL_EXTRA_FULL_TEST_CONFIGS:=
LOCAL_EXTRACT_APK:=
LOCAL_EXTRACT_DPI_APK:=
-LOCAL_FDO_SUPPORT:=
LOCAL_FILE_CONTEXTS:=
LOCAL_FINDBUGS_FLAGS:=
LOCAL_FORCE_STATIC_EXECUTABLE:=
LOCAL_FULL_CLASSES_JACOCO_JAR:=
LOCAL_FULL_CLASSES_PRE_JACOCO_JAR:=
+LOCAL_FULL_INIT_RC:=
LOCAL_FULL_LIBS_MANIFEST_FILES:=
LOCAL_FULL_MANIFEST_FILE:=
LOCAL_FULL_TEST_CONFIG:=
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index e45c1a6..11c1944 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -64,7 +64,6 @@
endif
include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-include $(BUILD_SYSTEM)/combo/fdo.mk
define $(combo_var_prefix)transform-shared-lib-to-toc
$(call _gen_toc_command_for_elf,$(1),$(2))
diff --git a/core/combo/TARGET_linux-arm64.mk b/core/combo/TARGET_linux-arm64.mk
index a3f59a7..5d481cb 100644
--- a/core/combo/TARGET_linux-arm64.mk
+++ b/core/combo/TARGET_linux-arm64.mk
@@ -39,7 +39,6 @@
endif
include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-include $(BUILD_SYSTEM)/combo/fdo.mk
define $(combo_var_prefix)transform-shared-lib-to-toc
$(call _gen_toc_command_for_elf,$(1),$(2))
diff --git a/core/combo/TARGET_linux-x86.mk b/core/combo/TARGET_linux-x86.mk
index 2c4614b..acbae51 100644
--- a/core/combo/TARGET_linux-x86.mk
+++ b/core/combo/TARGET_linux-x86.mk
@@ -32,7 +32,6 @@
endif
include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-include $(BUILD_SYSTEM)/combo/fdo.mk
define $(combo_var_prefix)transform-shared-lib-to-toc
$(call _gen_toc_command_for_elf,$(1),$(2))
diff --git a/core/combo/TARGET_linux-x86_64.mk b/core/combo/TARGET_linux-x86_64.mk
index d2172d6..9e7e363 100644
--- a/core/combo/TARGET_linux-x86_64.mk
+++ b/core/combo/TARGET_linux-x86_64.mk
@@ -32,7 +32,6 @@
endif
include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-include $(BUILD_SYSTEM)/combo/fdo.mk
define $(combo_var_prefix)transform-shared-lib-to-toc
$(call _gen_toc_command_for_elf,$(1),$(2))
diff --git a/core/combo/fdo.mk b/core/combo/fdo.mk
deleted file mode 100644
index 8fb8fd3..0000000
--- a/core/combo/fdo.mk
+++ /dev/null
@@ -1,33 +0,0 @@
-#
-# Copyright (C) 2006 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Setup FDO related flags.
-
-$(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS:=
-
-# Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-# The profile will be generated on /sdcard/fdo_profile on the device.
-$(combo_2nd_arch_prefix)TARGET_FDO_INSTRUMENT_CFLAGS := -fprofile-generate=/sdcard/fdo_profile -DANDROID_FDO
-$(combo_2nd_arch_prefix)TARGET_FDO_INSTRUMENT_LDFLAGS := -lgcov -lgcc
-
-# Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)),)
- $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH := vendor/google_data/fdo_profile
-endif
-
-$(combo_2nd_arch_prefix)TARGET_FDO_OPTIMIZE_CFLAGS := \
- -fprofile-use=$($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH) \
- -DANDROID_FDO -fprofile-correction -Wcoverage-mismatch -Wno-error
diff --git a/core/config.mk b/core/config.mk
index ed6429a..3c493df 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -149,15 +149,16 @@
2ND_TARGET_PROJECT_SYSTEM_INCLUDES \
,Project include variables have been removed)
$(KATI_obsolete_var TARGET_PREFER_32_BIT TARGET_PREFER_32_BIT_APPS TARGET_PREFER_32_BIT_EXECUTABLES)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST.)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST.)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)
$(KATI_obsolete_var COVERAGE_PATHS,Use NATIVE_COVERAGE_PATHS instead)
$(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
-$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
-$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
-$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.)
-$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.)
+$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported)
+$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead)
+$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
+$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
$(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
+$(KATI_obsolete_var PRODUCT_CHECK_ELF_FILES,Use BUILD_BROKEN_PREBUILT_ELF_FILES instead)
# Used to force goals to build. Only use for conditionally defined goals.
.PHONY: FORCE
@@ -1152,8 +1153,11 @@
dont_bother_goals := out \
product-graph dump-products
-ifeq ($(CALLED_FROM_SETUP),true)
+# Make ANDROID Soong config variables visible to Android.mk files, for
+# consistency with those defined in BoardConfig.mk files.
include $(BUILD_SYSTEM)/android_soong_config_vars.mk
+
+ifeq ($(CALLED_FROM_SETUP),true)
include $(BUILD_SYSTEM)/ninja_config.mk
include $(BUILD_SYSTEM)/soong_config.mk
endif
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index f39b84a..e0b8fde 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -153,12 +153,6 @@
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
-# Disable CFI for arm32 (b/35157333).
-ifneq ($(filter arm,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
- my_sanitize := $(filter-out cfi,$(my_sanitize))
- my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
-endif
-
# Also disable CFI if ASAN is enabled.
ifneq ($(filter address,$(my_sanitize)),)
my_sanitize := $(filter-out cfi,$(my_sanitize))
@@ -217,10 +211,12 @@
ifneq ($(filter memtag_heap,$(my_sanitize)),)
# Add memtag ELF note.
- ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
- my_whole_static_libraries += note_memtag_heap_sync
- else
- my_whole_static_libraries += note_memtag_heap_async
+ ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+ ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+ my_whole_static_libraries += note_memtag_heap_sync
+ else
+ my_whole_static_libraries += note_memtag_heap_async
+ endif
endif
# This is all that memtag_heap does - it is not an actual -fsanitize argument.
# Remove it from the list.
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index f71ef72..0d557c7 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -82,15 +82,7 @@
endif
endif
else ifeq ($(my_cxx_stl),ndk)
- # Using an NDK STL. Handled in binary.mk, except for the unwinder.
- # TODO: Switch the NDK over to the LLVM unwinder for non-arm32 architectures.
- ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
- my_static_libraries += libunwind_llvm
- my_ldflags += -Wl,--exclude-libs,libunwind_llvm.a
- else
- my_static_libraries += libgcc_stripped
- my_ldflags += -Wl,--exclude-libs,libgcc_stripped.a
- endif
+ # Using an NDK STL. Handled in binary.mk.
else ifeq ($(my_cxx_stl),libstdc++)
$(error $(LOCAL_PATH): $(LOCAL_MODULE): libstdc++ is not supported)
else ifeq ($(my_cxx_stl),none)
diff --git a/core/definitions.mk b/core/definitions.mk
index 033ab30..b15ce84 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -556,7 +556,7 @@
$(foreach m, $(ALL_MODULES), \
$(eval ALL_MODULES.$(m).NOTICE_DEPS := \
$(sort \
- $(foreach d,$(ALL_MODULES.$(m).NOTICE_DEPS), \
+ $(foreach d,$(sort $(ALL_MODULES.$(m).NOTICE_DEPS)), \
$(_lookup.$(d)) \
) \
) \
@@ -578,7 +578,9 @@
define license-metadata-rule
$(strip $(eval _dir := $(call license-metadata-dir)))
$(strip $(eval _deps := $(sort $(filter-out $(_dir)/$(1).meta_lic,$(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS), $(_dir)/$(d).meta_lic)))))
-$(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)),
+$(strip $(eval _notices := $(sort $(ALL_MODULES.$(1).NOTICES))))
+$(strip $(eval _tgts := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))))
+$(foreach b,$(_tgts),
$(_dir)/$(b).meta_module ::
mkdir -p $$(dir $$@)
echo $(_dir)/$(1).meta_lic >> $$@
@@ -587,31 +589,46 @@
)
$(_dir)/$(1).meta_lic: PRIVATE_KINDS := $(sort $(ALL_MODULES.$(1).LICENSE_KINDS))
$(_dir)/$(1).meta_lic: PRIVATE_CONDITIONS := $(sort $(ALL_MODULES.$(1).LICENSE_CONDITIONS))
-$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(sort $(ALL_MODULES.$(1).NOTICES))
+$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(_notices)
$(_dir)/$(1).meta_lic: PRIVATE_NOTICE_DEPS := $(_deps)
-$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))
-$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(sort $(ALL_MODULES.$(1).IS_CONTAINER))
-$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME)
+$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(_tgts)
+$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(ALL_MODULES.$(1).IS_CONTAINER)
+$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(strip $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME))
$(_dir)/$(1).meta_lic: PRIVATE_INSTALL_MAP := $(sort $(ALL_MODULES.$(1).LICENSE_INSTALL_MAP))
-$(_dir)/$(1).meta_lic : $(_deps) $(ALL_MODULES.$(1).NOTICES) $(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
+$(_dir)/$(1).meta_lic : $(_deps) $(_notices) $(foreach b,$(_tgts), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
rm -f $$@
mkdir -p $$(dir $$@)
- build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(filter-out false,$$(PRIVATE_IS_CONTAINER)),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
-
-$(1) : $(_dir)/$(1).meta_lic
-
-$(if $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE),$(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE) : $(_dir)/$(1).meta_lic)
+ build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(PRIVATE_IS_CONTAINER),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
.PHONY: $(1).meta_lic
$(1).meta_lic : $(_dir)/$(1).meta_lic
+$(strip $(eval _mifs := $(sort $(ALL_MODULES.$(1).MODULE_INSTALLED_FILENAMES))))
+$(strip $(eval _infs := $(sort $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE))))
+
+# Emit each installed notice file rule if it references the current module
+$(if $(_infs),$(foreach inf,$(_infs),
+$(if $(strip $(filter $(1),$(INSTALLED_NOTICE_FILES.$(inf).MODULE))),
+$(strip $(eval _mif := $(firstword $(foreach m,$(_mifs),$(if $(filter %/src/$(m).txt,$(inf)),$(m))))))
+
+$(inf) : $(_dir)/$(1).meta_lic
+$(inf): PRIVATE_INSTALLED_MODULE := $(_mif)
+$(inf) : PRIVATE_NOTICES := $(_notices)
+
+$(inf): $(_notices)
+ @echo Notice file: $$< -- $$@
+ mkdir -p $$(dir $$@)
+ awk 'FNR==1 && NR > 1 {print "\n"} {print}' $$(PRIVATE_NOTICES) > $$@
+
+)))
+
endef
###########################################################
## Declares a license metadata build rule for ALL_MODULES
###########################################################
define build-license-metadata
-$(foreach m,$(ALL_MODULES),$(eval $(call license-metadata-rule,$(m))))
+$(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m))))
endef
###########################################################
@@ -728,6 +745,42 @@
endef
###########################################################
+## The packaging directory for a module. Similar to intermedates, but
+## in a location that will be wiped by an m installclean.
+###########################################################
+
+# $(1): subdir in PACKAGING
+# $(2): target class, like "APPS"
+# $(3): target name, like "NotePad"
+# $(4): { HOST, HOST_CROSS, <empty (TARGET)>, <other non-empty (HOST)> }
+define packaging-dir-for
+$(strip \
+ $(eval _pdfClass := $(strip $(2))) \
+ $(if $(_pdfClass),, \
+ $(error $(LOCAL_PATH): Class not defined in call to generated-sources-dir-for)) \
+ $(eval _pdfName := $(strip $(3))) \
+ $(if $(_pdfName),, \
+ $(error $(LOCAL_PATH): Name not defined in call to generated-sources-dir-for)) \
+ $(call intermediates-dir-for,PACKAGING,$(1),$(4))/$(_pdfClass)/$(_pdfName)_intermediates \
+)
+endef
+
+# Uses LOCAL_MODULE_CLASS, LOCAL_MODULE, and LOCAL_IS_HOST_MODULE
+# to determine the packaging directory.
+#
+# $(1): subdir in PACKAGING
+define local-packaging-dir
+$(strip \
+ $(if $(strip $(LOCAL_MODULE_CLASS)),, \
+ $(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined before call to local-generated-sources-dir)) \
+ $(if $(strip $(LOCAL_MODULE)),, \
+ $(error $(LOCAL_PATH): LOCAL_MODULE not defined before call to local-generated-sources-dir)) \
+ $(call packaging-dir-for,$(1),$(LOCAL_MODULE_CLASS),$(LOCAL_MODULE),$(if $(strip $(LOCAL_IS_HOST_MODULE)),HOST)) \
+)
+endef
+
+
+###########################################################
## Convert a list of short module names (e.g., "framework", "Browser")
## into the list of files that are built for those modules.
## NOTE: this won't return reliable results until after all
@@ -1695,7 +1748,6 @@
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--start-group) \
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
- $(if $(filter true,$(NATIVE_COVERAGE)),-lgcov) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
-o $@ \
@@ -1735,7 +1787,6 @@
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
$(PRIVATE_TARGET_LIBCRT_BUILTINS) \
- $(PRIVATE_TARGET_LIBATOMIC) \
$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
$(PRIVATE_LDFLAGS) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
@@ -1770,7 +1821,6 @@
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
$(PRIVATE_TARGET_LIBCRT_BUILTINS) \
- $(PRIVATE_TARGET_LIBATOMIC) \
$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
$(PRIVATE_LDFLAGS) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
@@ -1814,7 +1864,6 @@
$(filter %libc.a %libc.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
$(filter %libc_nomalloc.a %libc_nomalloc.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
- $(PRIVATE_TARGET_LIBATOMIC) \
$(filter %libcompiler_rt.a %libcompiler_rt.hwasan.a,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
$(PRIVATE_TARGET_LIBCRT_BUILTINS) \
-Wl,--end-group \
@@ -1842,7 +1891,6 @@
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--start-group) \
$(PRIVATE_ALL_STATIC_LIBRARIES) \
$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
- $(if $(filter true,$(NATIVE_COVERAGE)),-lgcov) \
$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
$(PRIVATE_ALL_SHARED_LIBRARIES) \
$(foreach path,$(PRIVATE_RPATHS), \
@@ -2138,6 +2186,17 @@
$(hide) $(call commit-change-for-toc,$@)
endef
+# Runs jarjar on an input file. Jarjar doesn't exit with a nonzero return code
+# when there is a syntax error in a rules file and doesn't write the output
+# file, so removes the output file before running jarjar and check if it exists
+# after running jarjar.
+define transform-jarjar
+echo $($(PRIVATE_PREFIX)DISPLAY) JarJar: $@
+rm -f $@
+$(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+[ -e $@ ] || (echo "Missing output file"; exit 1)
+endef
+
# Moves $1.tmp to $1 if necessary. This is designed to be used with
# .KATI_RESTAT. For kati, this function doesn't update the timestamp
# of $1 when $1.tmp is identical to $1 so that ninja won't rebuild
@@ -2385,14 +2444,19 @@
#
define uncompress-prebuilt-embedded-jni-libs
if (zipinfo $@ 'lib/*.so' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
- $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' \
- $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
- -x 'lib/**/*.so' \
- $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS))) && \
- mv -f $@.tmp $@ ; \
+ $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' && mv -f $@.tmp $@ ; \
fi
endef
+# Remove unwanted shared JNI libraries embedded in an apk.
+#
+define remove-unwanted-prebuilt-embedded-jni-libs
+ $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
+ $(ZIP2ZIP) -i $@ -o $@.tmp \
+ -x 'lib/**/*.so' $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS)) && \
+ mv -f $@.tmp $@)
+endef
+
# TODO(joeo): If we can ever upgrade to post 3.81 make and get the
# new prebuilt rules to work, we should change this to copy the
# resources to the out directory and then copy the resources.
@@ -2754,7 +2818,8 @@
$(R8_DEBUG_MODE) \
$(PRIVATE_PROGUARD_FLAGS) \
$(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR)) \
- $(PRIVATE_DX_FLAGS)
+ $(PRIVATE_DX_FLAGS) \
+ -ignorewarnings
$(hide) touch $(PRIVATE_PROGUARD_DICTIONARY)
endef
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 06e2fb7..2762b44 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -20,6 +20,22 @@
# The default value for LOCAL_DEX_PREOPT
DEX_PREOPT_DEFAULT ?= $(ENABLE_PREOPT)
+# Whether to fail immediately if verify_uses_libraries check fails, or to keep
+# going and restrict dexpreopt to not compile any code for the failed module.
+#
+# The intended use case for this flag is to have a smoother migration path for
+# the Java modules that need to add <uses-library> information in their build
+# files. The flag allows to quickly silence build errors. This flag should be
+# used with caution and only as a temporary measure, as it masks real errors
+# and affects performance.
+ifndef RELAX_USES_LIBRARY_CHECK
+ RELAX_USES_LIBRARY_CHECK := $(if \
+ $(filter true,$(PRODUCT_BROKEN_VERIFY_USES_LIBRARIES)),true,false)
+else
+ # Let the environment variable override PRODUCT_BROKEN_VERIFY_USES_LIBRARIES.
+endif
+.KATI_READONLY := RELAX_USES_LIBRARY_CHECK
+
# The default filter for which files go into the system_other image (if it is
# being used). Note that each pattern p here matches both '/<p>' and /system/<p>'.
# To bundle everything one should set this to '%'.
@@ -31,6 +47,9 @@
product/app/% \
product/priv-app/% \
+# Global switch to control if updatable boot jars are included in dexpreopt.
+DEX_PREOPT_WITH_UPDATABLE_BCP := true
+
# Conditional to building on linux, as dex2oat currently does not work on darwin.
ifeq ($(HOST_OS),linux)
ifeq (eng,$(TARGET_BUILD_VARIANT))
@@ -76,6 +95,7 @@
$(call add_json_bool, DisablePreoptBootImages, $(call invert_bool,$(ENABLE_PREOPT_BOOT_IMAGES)))
$(call add_json_list, DisablePreoptModules, $(DEXPREOPT_DISABLED_MODULES))
$(call add_json_bool, OnlyPreoptBootImageAndSystemServer, $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
+ $(call add_json_bool, PreoptWithUpdatableBcp, $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)))
$(call add_json_bool, UseArtImage, $(filter true,$(DEXPREOPT_USE_ART_IMAGE)))
$(call add_json_bool, DontUncompressPrivAppsDex, $(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS)))
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
@@ -105,6 +125,7 @@
$(call add_json_bool, IsEng, $(filter eng,$(TARGET_BUILD_VARIANT)))
$(call add_json_bool, SanitizeLite, $(SANITIZE_LITE))
$(call add_json_bool, DefaultAppImages, $(WITH_DEX_PREOPT_APP_IMAGE))
+ $(call add_json_bool, RelaxUsesLibraryCheck, $(filter true,$(RELAX_USES_LIBRARY_CHECK)))
$(call add_json_str, Dex2oatXmx, $(DEX2OAT_XMX))
$(call add_json_str, Dex2oatXms, $(DEX2OAT_XMS))
$(call add_json_str, EmptyDirectory, $(OUT_DIR)/empty)
diff --git a/core/dex_preopt_config_merger.py b/core/dex_preopt_config_merger.py
index ebb99e1..4efcc17 100755
--- a/core/dex_preopt_config_merger.py
+++ b/core/dex_preopt_config_merger.py
@@ -67,26 +67,25 @@
# the loop in case this changes in the future.
for sdk_ver in clc_map:
clcs = clc_map[sdk_ver]
- clcs2 = OrderedDict()
- for lib in clcs:
- clc = clcs[lib]
+ clcs2 = []
+ for clc in clcs:
+ lib = clc['Name']
if lib in uses_libs:
ulib = uses_libs[lib]
- # On-host (build) path to the dependency DEX jar file.
- clc['Host'] = ulib['BuildPath']
+ # The real <uses-library> name (may be different from the module name).
+ clc['Name'] = ulib['ProvidesUsesLibrary']
# On-device (install) path to the dependency DEX jar file.
clc['Device'] = ulib['DexLocation']
# CLC of the dependency becomes a subcontext. We only need sub-CLC for
# 'any' version because all other versions are for compatibility
# libraries, which exist only for apps and not for libraries.
clc['Subcontexts'] = ulib['ClassLoaderContexts'].get('any')
- # Patch the library name in the CLC as well.
- clcs2[ulib['ProvidesUsesLibrary']] = clc
else:
# dexpreopt.config for this <uses-library> is not among the script
# arguments, which may be the case with compatibility libraries that
# don't need patching anyway. Just use the original CLC.
- clcs2[lib] = clc
+ pass
+ clcs2.append(clc)
clc_map2[sdk_ver] = clcs2
# Overwrite the original class loader context with the patched one.
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 6f6ac28..e0f94bd 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -1,5 +1,6 @@
# dexpreopt_odex_install.mk is used to define odex creation rules for JARs and APKs
# This file depends on variables set in base_rules.mk
+# Input variables: my_manifest_or_apk
# Output variables: LOCAL_DEX_PREOPT, LOCAL_UNCOMPRESS_DEX
ifeq (true,$(LOCAL_USE_EMBEDDED_DEX))
@@ -30,8 +31,9 @@
LOCAL_DEX_PREOPT :=
endif
-# Only enable preopt for non tests.
+# Disable <uses-library> checks and preopt for tests.
ifneq (,$(filter $(LOCAL_MODULE_TAGS),tests))
+ LOCAL_ENFORCE_USES_LIBRARIES := false
LOCAL_DEX_PREOPT :=
endif
@@ -50,11 +52,25 @@
LOCAL_DEX_PREOPT :=
endif
+# Disable <uses-library> checks if dexpreopt is globally disabled.
+# Without dexpreopt the check is not necessary, and although it is good to have,
+# it is difficult to maintain on non-linux build platforms where dexpreopt is
+# generally disabled (the check may fail due to various unrelated reasons, such
+# as a failure to get manifest from an APK).
+ifneq (true,$(WITH_DEXPREOPT))
+ LOCAL_ENFORCE_USES_LIBRARIES := false
+endif
+ifeq (true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY))
+ LOCAL_ENFORCE_USES_LIBRARIES := false
+endif
+
ifdef LOCAL_UNINSTALLABLE_MODULE
LOCAL_DEX_PREOPT :=
endif
-ifeq (,$(strip $(built_dex)$(my_prebuilt_src_file)$(LOCAL_SOONG_DEX_JAR))) # contains no java code
+# Disable <uses-library> checks and preopt if the app contains no java code.
+ifeq (,$(strip $(built_dex)$(my_prebuilt_src_file)$(LOCAL_SOONG_DEX_JAR)))
+ LOCAL_ENFORCE_USES_LIBRARIES := false
LOCAL_DEX_PREOPT :=
endif
@@ -108,6 +124,137 @@
endif
endif
+################################################################################
+# Local module variables and functions used in dexpreopt and manifest_check.
+################################################################################
+
+my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
+ $(LOCAL_OPTIONAL_USES_LIBRARIES))
+
+# TODO(b/132357300): This may filter out too much, as PRODUCT_PACKAGES doesn't
+# include all packages (the full list is unknown until reading all Android.mk
+# makefiles). As a consequence, a library may be present but not included in
+# dexpreopt, which will result in class loader context mismatch and a failure
+# to load dexpreopt code on device. We should fix this, either by deferring
+# dependency computation until the full list of product packages is known, or
+# by adding product-specific lists of missing libraries.
+my_filtered_optional_uses_libraries := $(filter $(PRODUCT_PACKAGES), \
+ $(my_filtered_optional_uses_libraries))
+
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
+ # compatibility libraries are added to class loader context of an app only if
+ # targetSdkVersion in the app's manifest is lower than the given SDK version
+
+ my_dexpreopt_libs_compat_28 := \
+ org.apache.http.legacy
+
+ my_dexpreopt_libs_compat_29 := \
+ android.hidl.manager-V1.0-java \
+ android.hidl.base-V1.0-java
+
+ my_dexpreopt_libs_compat_30 := \
+ android.test.base \
+ android.test.mock
+
+ my_dexpreopt_libs_compat := \
+ $(my_dexpreopt_libs_compat_28) \
+ $(my_dexpreopt_libs_compat_29) \
+ $(my_dexpreopt_libs_compat_30)
+else
+ my_dexpreopt_libs_compat :=
+endif
+
+my_dexpreopt_libs := \
+ $(LOCAL_USES_LIBRARIES) \
+ $(my_filtered_optional_uses_libraries)
+
+# Module dexpreopt.config depends on dexpreopt.config files of each
+# <uses-library> dependency, because these libraries may be processed after
+# the current module by Make (there's no topological order), so the dependency
+# information (paths, class loader context) may not be ready yet by the time
+# this dexpreopt.config is generated. So it's necessary to add file-level
+# dependencies between dexpreopt.config files.
+my_dexpreopt_dep_configs := $(foreach lib, \
+ $(filter-out $(my_dexpreopt_libs_compat),$(LOCAL_USES_LIBRARIES) $(my_filtered_optional_uses_libraries)), \
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,)/dexpreopt.config)
+
+# 1: SDK version
+# 2: list of libraries
+#
+# Make does not process modules in topological order wrt. <uses-library>
+# dependencies, therefore we cannot rely on variables to get the information
+# about dependencies (in particular, their on-device path and class loader
+# context). This information is communicated via dexpreopt.config files: each
+# config depends on configs for <uses-library> dependencies of this module,
+# and the dex_preopt_config_merger.py script reads all configs and inserts the
+# missing bits from dependency configs into the module config.
+#
+# By default on-device path is /system/framework/*.jar, and class loader
+# subcontext is empty. These values are correct for compatibility libraries,
+# which are special and not handled by dex_preopt_config_merger.py.
+#
+add_json_class_loader_context = \
+ $(call add_json_array, $(1)) \
+ $(foreach lib, $(2),\
+ $(call add_json_map_anon) \
+ $(call add_json_str, Name, $(lib)) \
+ $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
+ $(call add_json_str, Device, /system/framework/$(lib).jar) \
+ $(call add_json_val, Subcontexts, null) \
+ $(call end_json_map)) \
+ $(call end_json_array)
+
+################################################################################
+# Verify <uses-library> coherence between the build system and the manifest.
+################################################################################
+
+# Verify LOCAL_USES_LIBRARIES/LOCAL_OPTIONAL_USES_LIBRARIES
+# If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
+# LOCAL_OPTIONAL_USES_LIBRARIES are specified.
+# Will change the default to true unconditionally in the future.
+ifndef LOCAL_ENFORCE_USES_LIBRARIES
+ ifneq (,$(strip $(LOCAL_USES_LIBRARIES)$(LOCAL_OPTIONAL_USES_LIBRARIES)))
+ LOCAL_ENFORCE_USES_LIBRARIES := true
+ endif
+endif
+
+my_enforced_uses_libraries :=
+ifeq (true,$(LOCAL_ENFORCE_USES_LIBRARIES))
+ my_verify_script := build/soong/scripts/manifest_check.py
+ my_uses_libs_args := $(patsubst %,--uses-library %,$(LOCAL_USES_LIBRARIES))
+ my_optional_uses_libs_args := $(patsubst %,--optional-uses-library %, \
+ $(LOCAL_OPTIONAL_USES_LIBRARIES))
+ my_relax_check_arg := $(if $(filter true,$(RELAX_USES_LIBRARY_CHECK)), \
+ --enforce-uses-libraries-relax,)
+ my_dexpreopt_config_args := $(patsubst %,--dexpreopt-config %,$(my_dexpreopt_dep_configs))
+
+ my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
+ $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(my_uses_libs_args)
+ $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
+ $(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
+ $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
+ $(my_enforced_uses_libraries): $(AAPT)
+ $(my_enforced_uses_libraries): $(my_verify_script)
+ $(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
+ $(my_enforced_uses_libraries): $(my_manifest_or_apk)
+ @echo Verifying uses-libraries: $<
+ rm -f $@
+ $(my_verify_script) \
+ --enforce-uses-libraries \
+ --enforce-uses-libraries-status $@ \
+ --aapt $(AAPT) \
+ $(PRIVATE_USES_LIBRARIES) \
+ $(PRIVATE_OPTIONAL_USES_LIBRARIES) \
+ $(PRIVATE_DEXPREOPT_CONFIGS) \
+ $(PRIVATE_RELAX_CHECK) \
+ $<
+ $(built_module) : $(my_enforced_uses_libraries)
+endif
+
+################################################################################
+# Dexpreopt command.
+################################################################################
+
my_dexpreopt_archs :=
my_dexpreopt_images :=
my_dexpreopt_images_deps :=
@@ -186,72 +333,6 @@
my_dexpreopt_image_locations += $(DEXPREOPT_IMAGE_LOCATIONS_$(my_dexpreopt_infix))
- my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
- $(LOCAL_OPTIONAL_USES_LIBRARIES))
-
- # TODO(b/132357300): This may filter out too much, as PRODUCT_PACKAGES doesn't
- # include all packages (the full list is unknown until reading all Android.mk
- # makefiles). As a consequence, a library may be present but not included in
- # dexpreopt, which will result in class loader context mismatch and a failure
- # to load dexpreopt code on device. We should fix this, either by deferring
- # dependency computation until the full list of product packages is known, or
- # by adding product-specific lists of missing libraries.
- my_filtered_optional_uses_libraries := $(filter $(my_filtered_optional_uses_libraries), \
- $(PRODUCT_PACKAGES))
-
- ifeq ($(LOCAL_MODULE_CLASS),APPS)
- # compatibility libraries are added to class loader context of an app only if
- # targetSdkVersion in the app's manifest is lower than the given SDK version
-
- my_dexpreopt_libs_compat_28 := \
- org.apache.http.legacy
-
- my_dexpreopt_libs_compat_29 := \
- android.hidl.base-V1.0-java \
- android.hidl.manager-V1.0-java
-
- my_dexpreopt_libs_compat_30 := \
- android.test.base \
- android.test.mock
-
- my_dexpreopt_libs_compat := \
- $(my_dexpreopt_libs_compat_28) \
- $(my_dexpreopt_libs_compat_29) \
- $(my_dexpreopt_libs_compat_30)
- else
- my_dexpreopt_libs_compat :=
- endif
-
- my_dexpreopt_libs := $(sort \
- $(LOCAL_USES_LIBRARIES) \
- $(my_filtered_optional_uses_libraries) \
- )
-
- # 1: SDK version
- # 2: list of libraries
- #
- # Make does not process modules in topological order wrt. <uses-library>
- # dependencies, therefore we cannot rely on variables to get the information
- # about dependencies (in particular, their on-device path and class loader
- # context). This information is communicated via dexpreopt.config files: each
- # config depends on configs for <uses-library> dependencies of this module,
- # and the dex_preopt_config_merger.py script reads all configs and inserts the
- # missing bits from dependency configs into the module config.
- #
- # By default on-device path is /system/framework/*.jar, and class loader
- # subcontext is empty. These values are correct for compatibility libraries,
- # which are special and not handled by dex_preopt_config_merger.py.
- #
- add_json_class_loader_context = \
- $(call add_json_map, $(1)) \
- $(foreach lib, $(2),\
- $(call add_json_map, $(lib)) \
- $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
- $(call add_json_str, Device, /system/framework/$(lib).jar) \
- $(call add_json_map, Subcontexts, ${$}) $(call end_json_map) \
- $(call end_json_map)) \
- $(call end_json_map)
-
# Record dex-preopt config.
DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
@@ -278,6 +359,7 @@
$(call add_json_list, PreoptFlags, $(LOCAL_DEX_PREOPT_FLAGS))
$(call add_json_str, ProfileClassListing, $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE)))
$(call add_json_bool, ProfileIsTextListing, $(my_profile_is_text_listing))
+ $(call add_json_str, EnforceUsesLibrariesStatusFile, $(my_enforced_uses_libraries))
$(call add_json_bool, EnforceUsesLibraries, $(LOCAL_ENFORCE_USES_LIBRARIES))
$(call add_json_str, ProvidesUsesLibrary, $(firstword $(LOCAL_PROVIDES_USES_LIBRARY) $(LOCAL_MODULE)))
$(call add_json_map, ClassLoaderContexts)
@@ -303,16 +385,6 @@
my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
my_dexpreopt_config_merger := $(BUILD_SYSTEM)/dex_preopt_config_merger.py
- # Module dexpreopt.config depends on dexpreopt.config files of each
- # <uses-library> dependency, because these libraries may be processed after
- # the current module by Make (there's no topological order), so the dependency
- # information (paths, class loader context) may not be ready yet by the time
- # this dexpreopt.config is generated. So it's necessary to add file-level
- # dependencies between dexpreopt.config files.
- my_dexpreopt_dep_configs := $(foreach lib, \
- $(filter-out $(my_dexpreopt_libs_compat),$(LOCAL_USES_LIBRARIES) $(my_filtered_optional_uses_libraries)), \
- $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,)/dexpreopt.config)
-
$(my_dexpreopt_config): $(my_dexpreopt_dep_configs) $(my_dexpreopt_config_merger)
$(my_dexpreopt_config): PRIVATE_MODULE := $(LOCAL_MODULE)
$(my_dexpreopt_config): PRIVATE_CONTENTS := $(json_contents)
@@ -345,6 +417,9 @@
$(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
my_dexpreopt_deps += $(my_dexpreopt_images_deps)
my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+ ifeq ($(LOCAL_ENFORCE_USES_LIBRARIES),true)
+ my_dexpreopt_deps += $(intermediates.COMMON)/enforce_uses_libraries.status
+ endif
$(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
$(my_dexpreopt_zip): $(my_dexpreopt_deps)
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
new file mode 100644
index 0000000..9b1f2c2
--- /dev/null
+++ b/core/dumpconfig.mk
@@ -0,0 +1,144 @@
+# Read and dump the product configuration.
+
+# Called from the product-config tool, not from the main build system.
+
+#
+# Ensure we are being called correctly
+#
+ifndef KATI
+ $(warning Kati must be used to call dumpconfig.mk, not make.)
+ $(error stopping)
+endif
+
+ifdef DEFAULT_GOAL
+ $(warning Calling dumpconfig.mk from inside the make build system is not)
+ $(warning supported. It is only meant to be called via kati by product-confing.)
+ $(error stopping)
+endif
+
+ifndef TARGET_PRODUCT
+ $(warning dumpconfig.mk requires TARGET_PRODUCT to be set)
+ $(error stopping)
+endif
+
+ifndef TARGET_BUILD_VARIANT
+ $(warning dumpconfig.mk requires TARGET_BUILD_VARIANT to be set)
+ $(error stopping)
+endif
+
+ifneq (build/make/core/config.mk,$(wildcard build/make/core/config.mk))
+ $(warning dumpconfig must be called from the root of the source tree)
+ $(error stopping)
+endif
+
+ifeq (,$(DUMPCONFIG_FILE))
+ $(warning dumpconfig requires DUMPCONFIG_FILE to be set)
+ $(error stopping)
+endif
+
+# Skip the second inclusion of all of the product config files, because
+# we will do these checks in the product_config tool.
+SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK := true
+
+# Before we do anything else output the format version.
+$(file > $(DUMPCONFIG_FILE),dumpconfig_version,1)
+$(file >> $(DUMPCONFIG_FILE),dumpconfig_file,$(DUMPCONFIG_FILE))
+
+# Default goal for dumpconfig
+dumpconfig:
+ $(file >> $(DUMPCONFIG_FILE),***DONE***)
+ @echo ***DONE***
+
+# TODO(Remove): These need to be set externally
+OUT_DIR := out
+TMPDIR = /tmp/build-temp
+BUILD_DATETIME_FILE := $(OUT_DIR)/build_date.txt
+
+# Escape quotation marks for CSV, and wraps in quotation marks.
+define escape-for-csv
+"$(subst ","",$1)"
+endef
+
+# Args:
+# $(1): include stack
+define dump-import-start
+$(eval $(file >> $(DUMPCONFIG_FILE),import,$(strip $(1))))
+endef
+
+# Args:
+# $(1): include stack
+define dump-import-done
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+endef
+
+# Args:
+# $(1): Current file
+# $(2): Inherited file
+define dump-inherit
+$(eval $(file >> $(DUMPCONFIG_FILE),inherit,$(strip $(1)),$(strip $(2))))
+endef
+
+# Args:
+# $(1): Config phase (PRODUCT, EXPAND, or DEVICE)
+# $(2): Root nodes to import
+# $(3): All variable names
+# $(4): Single-value variables
+# $(5): Makefile being processed
+define dump-phase-start
+$(eval $(file >> $(DUMPCONFIG_FILE),phase,$(strip $(1)),$(strip $(2)))) \
+$(foreach var,$(3), \
+ $(eval $(file >> $(DUMPCONFIG_FILE),var,$(if $(filter $(4),$(var)),single,list),$(var))) \
+) \
+$(call dump-config-vals,$(strip $(5)),initial)
+endef
+
+# Args:
+# $(1): Makefile being processed
+define dump-phase-end
+$(call dump-config-vals,$(strip $(1)),final)
+endef
+
+define dump-debug
+$(eval $(file >> $(DUMPCONFIG_FILE),debug,$(1)))
+endef
+
+# Skip these when dumping. They're not used and they cause a lot of noise in the dump.
+DUMPCONFIG_SKIP_VARS := \
+ .VARIABLES \
+ .KATI_SYMBOLS \
+ 1 \
+ 2 \
+ 3 \
+ 4 \
+ 5 \
+ 6 \
+ 7 \
+ 8 \
+ 9 \
+ LOCAL_PATH \
+ MAKEFILE_LIST \
+ PARENT_PRODUCT_FILES \
+ current_mk \
+ _eiv_ev \
+ _eiv_i \
+ _eiv_sv \
+ _eiv_tv \
+ inherit_var \
+ np \
+ _node_import_context \
+ _included \
+ _include_stack \
+ _in \
+ _nic.%
+
+# Args:
+# $(1): Makefile that was included
+# $(2): block (before,import,after,initial,final)
+define dump-config-vals
+$(foreach var,$(filter-out $(DUMPCONFIG_SKIP_VARS),$(.KATI_SYMBOLS)),\
+ $(eval $(file >> $(DUMPCONFIG_FILE),val,$(call escape-for-csv,$(1)),$(2),$(call escape-for-csv,$(var)),$(call escape-for-csv,$($(var))),$(call escape-for-csv,$(KATI_variable_location $(var))))) \
+)
+endef
+
+include build/make/core/config.mk
+
diff --git a/core/envsetup.mk b/core/envsetup.mk
index a5571ae..8c25086 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -275,7 +275,7 @@
_vendor_dlkm_path_placeholder := ||VENDOR_DLKM-PATH-PH||
_odm_dlkm_path_placeholder := ||ODM_DLKM-PATH-PH||
TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
-TARGET_COPY_OUT_VENDOR_RAMDISK := vendor-ramdisk
+TARGET_COPY_OUT_VENDOR_RAMDISK := vendor_ramdisk
TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
# TODO(b/135957588) TARGET_COPY_OUT_PRODUCT_SERVICES will copy the target to
# product
@@ -301,6 +301,10 @@
com.android.art:okhttp \
com.android.art:bouncycastle \
com.android.art:apache-xml
+# With EMMA_INSTRUMENT_FRAMEWORK=true the Core libraries depend on jacoco.
+ifeq (true,$(EMMA_INSTRUMENT_FRAMEWORK))
+ ART_APEX_JARS += com.android.art:jacocoagent
+endif
#################################################################
# Read the product specs so we can get TARGET_DEVICE and other
diff --git a/core/envsetup.rbc b/core/envsetup.rbc
new file mode 100644
index 0000000..451623b
--- /dev/null
+++ b/core/envsetup.rbc
@@ -0,0 +1,207 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":build_id.rbc|init", _build_id_init = "init")
+
+def _all_versions():
+ """Returns all known versions."""
+ versions = ["OPR1", "OPD1", "OPD2", "OPM1", "OPM2", "PPR1", "PPD1", "PPD2", "PPM1", "PPM2", "QPR1"]
+ for v in ("Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"):
+ for e in ("P1A", "P1B", "P2A", "P2B", "D1A", "D1B", "D2A", "D2B", "Q1A", "Q1B", "Q2A", "Q2B", "Q3A", "Q3B"):
+ versions.append(v + e)
+ return versions
+
+def _allowed_versions(all_versions, min_version, max_version, default_version):
+ """Checks that version range and default versions is valid, returns all versions in range."""
+ for v in (min_version, max_version, default_version):
+ if v not in all_versions:
+ fail("% is invalid" % v)
+
+ min_i = all_versions.index(min_version)
+ max_i = all_versions.index(max_version)
+ def_i = all_versions.index(default_version)
+ if min_i > max_i:
+ fail("%s should come before %s in the version list" % (min_version, max_version))
+ if def_i < min_i or def_i > max_i:
+ fail("%s should come between % and %s" % (default_version, min_version, max_version))
+ return all_versions[min_i:max_i + 1]
+
+# This function is a manual conversion of the version_defaults.mk
+def _versions_default(g, all_versions):
+ """Handle various build version information.
+
+ Guarantees that the following are defined:
+ PLATFORM_VERSION
+ PLATFORM_SDK_VERSION
+ PLATFORM_VERSION_CODENAME
+ DEFAULT_APP_TARGET_SDK
+ BUILD_ID
+ BUILD_NUMBER
+ PLATFORM_SECURITY_PATCH
+ PLATFORM_VNDK_VERSION
+ PLATFORM_SYSTEMSDK_VERSIONS
+ """
+
+ # If build_id.rbc exists, it may override some of the defaults.
+ # Note that build.prop target also wants INTERNAL_BUILD_ID_MAKEFILE to be set if the file exists.
+ if _build_id_init != None:
+ _build_id_init(g)
+ g["INTERNAL_BUILD_ID_MAKEFILE"] = "build/make/core/build_id"
+
+ allowed_versions = _allowed_versions(all_versions, v_min, v_max, v_default)
+ g.setdefault("TARGET_PLATFORM_VERSION", v_default)
+ if g["TARGET_PLATFORM_VERSION"] not in allowed_versions:
+ fail("% is not valid, must be one of %s" % (g["TARGET_PLATFORM_VERSION"], allowed_versions))
+
+ g["DEFAULT_PLATFORM_VERSION"] = v_default
+ g["PLATFORM_VERSION_LAST_STABLE"] = 11
+ g.setdefault("PLATFORM_VERSION_CODENAME", g["TARGET_PLATFORM_VERSION"])
+ # TODO(asmundak): set PLATFORM_VERSION_ALL_CODENAMES
+
+ g.setdefault("PLATFORM_SDK_VERSION", 30)
+ version_codename = g["PLATFORM_VERSION_CODENAME"]
+ if version_codename == "REL":
+ g.setdefault("PLATFORM_VERSION", g["PLATFORM_VERSION_LAST_STABLE"])
+ g["PLATFORM_PREVIEW_SDK_VERSION"] = 0
+ g.setdefault("DEFAULT_APP_TARGET_SDK", g["PLATFORM_SDK_VERSION"])
+ g.setdefault("PLATFORM_VNDK_VERSION", g["PLATFORM_SDK_VERSION"])
+ else:
+ g.setdefault("PLATFORM_VERSION", version_codename)
+ g.setdefault("PLATFORM_PREVIEW_SDK_VERSION", 1)
+ g.setdefault("DEFAULT_APP_TARGET_SDK", version_codename)
+ g.setdefault("PLATFORM_VNDK_VERSION", version_codename)
+
+ g.setdefault("PLATFORM_SYSTEMSDK_MIN_VERSION", 28)
+ versions = [str(i) for i in range(g["PLATFORM_SYSTEMSDK_MIN_VERSION"], g["PLATFORM_SDK_VERSION"] + 1)]
+ versions.append(version_codename)
+ g["PLATFORM_SYSTEMSDK_VERSIONS"] = sorted(versions)
+
+ # Used to indicate the security patch that has been applied to the device.
+ # It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
+ # It must be of the form "YYYY-MM-DD" on production devices.
+ # It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
+ # If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
+ g.setdefault("PLATFORM_SECURITY_PATCH", "2021-03-05")
+ dt = 'TZ="GMT" %s' % g["PLATFORM_SECURITY_PATCH"]
+ g.setdefault("PLATFORM_SECURITY_PATCH_TIMESTAMP", rblf_shell("date -d '%s' +%%s" % dt))
+
+ # Used to indicate the base os applied to the device. Can be an arbitrary string, but must be a single word.
+ # If there is no $PLATFORM_BASE_OS set, keep it empty.
+ g.setdefault("PLATFORM_BASE_OS", "")
+
+ # Used to signify special builds. E.g., branches and/or releases, like "M5-RC7". Can be an arbitrary string, but
+ # must be a single word and a valid file name. If there is no BUILD_ID set, make it obvious.
+ g.setdefault("BUILD_ID", "UNKNOWN")
+
+ # BUILD_NUMBER should be set to the source control value that represents the current state of the source code.
+ # E.g., a perforce changelist number or a git hash. Can be an arbitrary string (to allow for source control that
+ # uses something other than numbers), but must be a single word and a valid file name.
+ #
+ # If no BUILD_NUMBER is set, create a useful "I am an engineering build from this date/time" value. Make it start
+ # with a non-digit so that anyone trying to parse it as an integer will probably get "0".
+ g.setdefault("BUILD_NUMBER", "eng.%s.%s" % (g["USER"], "TIMESTAMP"))
+
+ # Used to set minimum supported target sdk version. Apps targeting SDK version lower than the set value will result
+ # in a warning being shown when any activity from the app is started.
+ g.setdefault("PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION", 23)
+
+def init(g):
+ """Initializes globals.
+
+ The code is the Starlark counterpart of the contents of the
+ envsetup.mk file.
+ Args:
+ g: globals dictionary
+ """
+ all_versions = _all_versions()
+ _versions_default(g, all_versions)
+ for v in all_versions:
+ g["IS_AT_LEAST" + v] = True
+ if v == g["TARGET_PLATFORM_VERSION"]:
+ break
+
+ # ---------------------------------------------------------------
+ # If you update the build system such that the environment setup or buildspec.mk need to be updated,
+ # increment this number, and people who haven't re-run those will have to do so before they can build.
+ # Make sure to also update the corresponding value in buildspec.mk.default and envsetup.sh.
+ g["CORRECT_BUILD_ENV_SEQUENCE_NUMBER"] = 13
+
+ g.setdefault("TARGET_PRODUCT", "aosp_arm")
+ g.setdefault("TARGET_BUILD_VARIANT", "eng")
+
+ g.setdefault("TARGET_BUILD_APPS", [])
+ g["TARGET_BUILD_UNBUNDLED"] = (g["TARGET_BUILD_APPS"] != []) or (getattr(g, "TARGET_BUILD_UNBUNDLED_IMAGE", "") != "")
+
+ # ---------------------------------------------------------------
+ # Set up configuration for host machine. We don't do cross-compiles except for arm, so the HOST
+ # is whatever we are running on.
+ host = rblf_shell("uname -sm")
+ if host.find("Linux") >= 0:
+ g["HOST_OS"] = "linux"
+ elif host.find("Darwin") >= 0:
+ g["HOST_OS"] = "darwin"
+ else:
+ fail("Cannot run on %s OS" % host)
+
+ # TODO(asmundak): set g.HOST_OS_EXTRA
+
+ g["BUILD_OS"] = g["HOST_OS"]
+
+ # TODO(asmundak): check cross-OS build
+
+ if host.find("x86_64") >= 0:
+ g["HOST_ARCH"] = "x86_64"
+ g["HOST_2ND_ARCH"] = "x86"
+ g["HOST_IS_64_BIT"] = True
+ elif host.find("i686") >= 0 or host.find("x86") >= 0:
+ fail("Building on a 32-bit x86 host is not supported: %s" % host)
+ elif g["HOST_OS"] == "darwin":
+ g["HOST_2ND_ARCH"] = ""
+
+ g["HOST_2ND_ARCH_VAR_PREFIX"] = "2ND_"
+ g["HOST_2ND_ARCH_MODULE_SUFFIX"] = "_32"
+ g["HOST_CROSS_2ND_ARCH_VAR_PREFIX"] = "2ND_"
+ g["HOST_CROSS_2ND_ARCH_MODULE_SUFFIX"] = "_64"
+ g["TARGET_2ND_ARCH_VAR_PREFIX"] = "2ND_"
+
+ # TODO(asmundak): envsetup.mk lines 216-226:
+ # convert combo-related stuff from combo/select.mk
+
+ # on windows, the tools have .exe at the end, and we depend on the
+ # host config stuff being done first
+ g["BUILD_ARCH"] = g["HOST_ARCH"]
+ g["BUILD_2ND_ARCH"] = g["HOST_2ND_ARCH"]
+
+ # the host build defaults to release, and it must be release or debug
+ g.setdefault("HOST_BUILD_TYPE", "release")
+ if g["HOST_BUILD_TYPE"] not in ["release", "debug"]:
+ fail("HOST_BUILD_TYPE must be either release or debug, not '%s'" % g["HOST_BUILD_TYPE"])
+
+ # TODO(asmundak): there is more stuff in envsetup.mk lines 249-292, but
+ # it does not seem to affect product configuration. Revisit this.
+
+ g["ART_APEX_JARS"] = [
+ "com.android.art:core-oj",
+ "com.android.art:core-libart",
+ "com.android.art:okhttp",
+ "com.android.art:bouncycastle",
+ "com.android.art:apache-xml",
+ ]
+
+ if g.get("TARGET_BUILD_TYPE", "") != "debug":
+ g["TARGET_BUILD_TYPE"] = "release"
+
+v_default = "SP1A"
+v_min = "SP1A"
+v_max = "SP1A"
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index c6a8faf..fb14cce 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -41,7 +41,6 @@
else
my_target_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
endif
-my_target_libatomic := $(call intermediates-dir-for,STATIC_LIBRARIES,libatomic,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libatomic.a
ifeq ($(LOCAL_NO_CRT),true)
my_target_crtbegin_dynamic_o :=
my_target_crtbegin_static_o :=
@@ -61,18 +60,17 @@
my_target_crtend_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_android.sdk.$(my_ndk_crt_version))
endif
$(linked_module): PRIVATE_TARGET_LIBCRT_BUILTINS := $(my_target_libcrt_builtins)
-$(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O := $(my_target_crtbegin_dynamic_o)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_STATIC_O := $(my_target_crtbegin_static_o)
$(linked_module): PRIVATE_TARGET_CRTEND_O := $(my_target_crtend_o)
$(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(my_target_libatomic) $(CLANG_CXX)
+$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(CLANG_CXX)
$(transform-o-to-static-executable)
$(PRIVATE_POST_LINK_CMD)
else
-$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(my_target_libatomic) $(CLANG_CXX)
+$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libcrt_builtins) $(CLANG_CXX)
$(transform-o-to-executable)
$(PRIVATE_POST_LINK_CMD)
endif
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index 4a0fcfa..8a4b8c3 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -19,35 +19,6 @@
ifeq ($(my_fuzzer),libFuzzer)
LOCAL_STATIC_LIBRARIES += libFuzzer
-else ifeq ($(my_fuzzer),honggfuzz)
-LOCAL_STATIC_LIBRARIES += honggfuzz_libhfuzz
-LOCAL_REQUIRED_MODULES += honggfuzz
-LOCAL_LDFLAGS += \
- "-Wl,--wrap=strcmp" \
- "-Wl,--wrap=strcasecmp" \
- "-Wl,--wrap=strncmp" \
- "-Wl,--wrap=strncasecmp" \
- "-Wl,--wrap=strstr" \
- "-Wl,--wrap=strcasestr" \
- "-Wl,--wrap=memcmp" \
- "-Wl,--wrap=bcmp" \
- "-Wl,--wrap=memmem" \
- "-Wl,--wrap=ap_cstr_casecmp" \
- "-Wl,--wrap=ap_cstr_casecmpn" \
- "-Wl,--wrap=ap_strcasestr" \
- "-Wl,--wrap=apr_cstr_casecmp" \
- "-Wl,--wrap=apr_cstr_casecmpn" \
- "-Wl,--wrap=CRYPTO_memcmp" \
- "-Wl,--wrap=OPENSSL_memcmp" \
- "-Wl,--wrap=OPENSSL_strcasecmp" \
- "-Wl,--wrap=OPENSSL_strncasecmp" \
- "-Wl,--wrap=xmlStrncmp" \
- "-Wl,--wrap=xmlStrcmp" \
- "-Wl,--wrap=xmlStrEqual" \
- "-Wl,--wrap=xmlStrcasecmp" \
- "-Wl,--wrap=xmlStrncasecmp" \
- "-Wl,--wrap=xmlStrstr" \
- "-Wl,--wrap=xmlStrcasestr"
else
$(call pretty-error, Unknown fuzz engine $(my_fuzzer))
endif
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index da32978..5eeb8ac 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -125,8 +125,7 @@
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
$(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- @echo Header JarJar: $@
- $(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+ $(call transform-jarjar)
else
full_classes_header_jarjar := $(full_classes_turbine_jar)
endif
@@ -149,8 +148,7 @@
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- @echo JarJar: $@
- $(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+ $(call transform-jarjar)
else
full_classes_jarjar_jar := $(full_classes_combined_jar)
endif
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index f9abe9b..0f95202 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -113,8 +113,7 @@
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- @echo JarJar: $@
- $(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+ $(call transform-jarjar)
else
full_classes_jarjar_jar := $(full_classes_combined_jar)
endif
diff --git a/core/jacoco.mk b/core/jacoco.mk
index e8fb89b..e8c74ee 100644
--- a/core/jacoco.mk
+++ b/core/jacoco.mk
@@ -71,7 +71,11 @@
zip -q $@ \
-r $(PRIVATE_UNZIPPED_PATH)
-
+# Make a rule to copy the jacoco-report-classes.jar to a packaging directory.
+$(eval $(call copy-one-file,$(my_classes_to_report_on_path),\
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar))
+$(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar)
# make a task that invokes instrumentation
my_instrumented_path := $(my_files)/work/instrumented/classes
diff --git a/core/java.mk b/core/java.mk
index 5fe8da5..123cbe8 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -176,7 +176,9 @@
#######################################
# defines built_odex along with rule to install odex
+my_manifest_or_apk := $(full_android_manifest)
include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+my_manifest_or_apk :=
#######################################
# Make sure there's something to build.
@@ -253,8 +255,7 @@
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
$(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- @echo Header JarJar: $@
- $(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+ $(call transform-jarjar)
else
full_classes_header_jarjar := $(full_classes_turbine_jar)
endif
@@ -334,8 +335,7 @@
ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
$(full_classes_jarjar_jar): $(full_classes_processed_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
- @echo JarJar: $@
- $(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+ $(call transform-jarjar)
else
full_classes_jarjar_jar := $(full_classes_processed_jar)
endif
@@ -470,6 +470,17 @@
ifneq ($(filter obfuscation,$(LOCAL_PROGUARD_ENABLED)),)
$(built_dex_intermediate): .KATI_IMPLICIT_OUTPUTS := $(proguard_dictionary) $(proguard_configuration)
+
+ # Make a rule to copy the proguard_dictionary to a packaging directory.
+ $(eval $(call copy-one-file,$(proguard_dictionary),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+
+ $(eval $(call copy-one-file,$(full_classes_pre_proguard_jar),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar)
endif
endif # LOCAL_PROGUARD_ENABLED defined
diff --git a/core/java_host_unit_test_config_template.xml b/core/java_host_unit_test_config_template.xml
index ff300da..d8795f9 100644
--- a/core/java_host_unit_test_config_template.xml
+++ b/core/java_host_unit_test_config_template.xml
@@ -17,6 +17,7 @@
<configuration description="Runs {MODULE}">
<option name="test-suite-tag" value="apct" />
<option name="test-suite-tag" value="apct-unit-tests" />
+ <option name="config-descriptor:metadata" key="component" value="{MODULE}" />
{EXTRA_CONFIGS}
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index 279b0e4..990b7d4 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -33,6 +33,7 @@
ifeq ($(prebuilt_module_is_dex_javalib),true)
my_dex_jar := $(my_prebuilt_src_file)
+my_manifest_or_apk := $(my_prebuilt_src_file)
# This is a target shared library, i.e. a jar with classes.dex.
$(foreach pair,$(PRODUCT_BOOT_JARS), \
@@ -43,7 +44,9 @@
#######################################
# defines built_odex along with rule to install odex
+my_manifest_or_apk := $(my_prebuilt_src_file)
include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+my_manifest_or_apk :=
#######################################
$(built_module) : $(my_prebuilt_src_file)
$(call copy-file-to-target)
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
index b1bd3e6..befbc59 100644
--- a/core/local_vndk.mk
+++ b/core/local_vndk.mk
@@ -5,6 +5,7 @@
ifndef LOCAL_SDK_VERSION
ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_OEM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
LOCAL_USE_VNDK:=true
+ LOCAL_USE_VNDK_VENDOR:=true
# Note: no need to check LOCAL_MODULE_PATH* since LOCAL_[VENDOR|ODM|OEM]_MODULE is already
# set correctly before this is included.
endif
@@ -40,6 +41,7 @@
# If we're not using the VNDK, drop all restrictions
ifndef BOARD_VNDK_VERSION
LOCAL_USE_VNDK:=
+ LOCAL_USE_VNDK_VENDOR:=
LOCAL_USE_VNDK_PRODUCT:=
endif
endif
diff --git a/core/main.mk b/core/main.mk
index 5ea95c8..1e9a95f 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -41,7 +41,12 @@
# without changing the command line every time. Avoids rebuilds
# when using ninja.
$(shell mkdir -p $(SOONG_OUT_DIR) && \
- echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.txt)
+ echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.tmp; \
+ if ! cmp -s $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; then \
+ mv $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; \
+ else \
+ rm $(SOONG_OUT_DIR)/build_number.tmp; \
+ fi)
BUILD_NUMBER_FILE := $(SOONG_OUT_DIR)/build_number.txt
.KATI_READONLY := BUILD_NUMBER_FILE
$(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
@@ -83,6 +88,8 @@
-include test/vts/tools/vts-core-tradefed/build/config.mk
# CSUITE-specific config.
-include test/app_compat/csuite/tools/build/config.mk
+# CTS-Root-specific config.
+-include test/cts-root/tools/build/config.mk
# Clean rules
.PHONY: clean-dex-files
@@ -283,6 +290,27 @@
ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
endif
+ifneq ($(TARGET_BUILD_VARIANT),user)
+ ifdef PRODUCT_SET_DEBUGFS_RESTRICTIONS
+ ADDITIONAL_VENDOR_PROPERTIES += \
+ ro.product.enforce_debugfs_restrictions=$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)
+ endif
+endif
+
+# Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
+# This must not be defined for the non-GRF devices.
+ifdef BOARD_SHIPPING_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+ ro.board.first_api_level=$(BOARD_SHIPPING_API_LEVEL)
+
+# To manually set the vendor API level of the vendor modules, BOARD_API_LEVEL can be used.
+# The values of the GRF properties will be verified by post_process_props.py
+ifdef BOARD_API_LEVEL
+ADDITIONAL_VENDOR_PROPERTIES += \
+ ro.board.api_level=$(BOARD_API_LEVEL)
+endif
+endif
+
ADDITIONAL_VENDOR_PROPERTIES += \
ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
@@ -1256,8 +1284,10 @@
$(if $(or $(ALL_MODULES.$(m).PATH),$(call get-modules-for-2nd-arch,TARGET,$(m))),,$(m)))
$(call maybe-print-list-and-error,$(filter-out $(_allow_list),$(_nonexistent_modules)),\
$(INTERNAL_PRODUCT) includes non-existent modules in PRODUCT_PACKAGES)
- $(call maybe-print-list-and-error,$(filter-out $(_nonexistent_modules),$(_allow_list)),\
- $(INTERNAL_PRODUCT) includes redundant allow list entries for non-existent PRODUCT_PACKAGES)
+ # TODO(b/182105280): Consider re-enabling this check when the ART modules
+ # have been cleaned up from the allowed_list in target/product/generic.mk.
+ #$(call maybe-print-list-and-error,$(filter-out $(_nonexistent_modules),$(_allow_list)),\
+ # $(INTERNAL_PRODUCT) includes redundant allow list entries for non-existent PRODUCT_PACKAGES)
endif
# Check to ensure that all modules in PRODUCT_HOST_PACKAGES exist
@@ -1485,6 +1515,12 @@
.PHONY: vendorbootimage_debug
vendorbootimage_debug: $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET)
+.PHONY: vendorramdisk
+vendorramdisk: $(INSTALLED_VENDOR_RAMDISK_TARGET)
+
+.PHONY: vendorramdisk_debug
+vendorramdisk_debug: $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+
.PHONY: productimage
productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
@@ -1543,12 +1579,16 @@
$(INSTALLED_VENDORIMAGE_TARGET) \
$(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_ODMIMAGE_TARGET) \
$(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
$(INSTALLED_ODM_DLKMIMAGE_TARGET) \
$(INSTALLED_SUPERIMAGE_EMPTY_TARGET) \
$(INSTALLED_PRODUCTIMAGE_TARGET) \
$(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
+ $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
+ $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
$(INSTALLED_FILES_FILE) \
$(INSTALLED_FILES_JSON) \
$(INSTALLED_FILES_FILE_VENDOR) \
@@ -1726,11 +1766,11 @@
$(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
$(INSTALLED_DEBUG_RAMDISK_TARGET) \
$(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
- $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
- )
- $(call dist-for-goals, bootimage_test_harness, \
$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+ $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+ $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
)
endif
@@ -1844,7 +1884,7 @@
ndk: $(SOONG_OUT_DIR)/ndk.timestamp
.PHONY: ndk
-# Checks that build/soong/apex/allowed_deps.txt remains up to date
+# Checks that allowed_deps.txt remains up to date
ifneq ($(UNSAFE_DISABLE_APEX_ALLOWED_DEPS_CHECK),true)
droidcore: ${APEX_ALLOWED_DEPS_CHECK}
endif
diff --git a/core/node_fns.mk b/core/node_fns.mk
index b81d60c..8d20160 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -195,7 +195,11 @@
$(call clear-var-list, $(3))
$(eval LOCAL_PATH := $(patsubst %/,%,$(dir $(2))))
$(eval MAKEFILE_LIST :=)
+ $(call dump-import-start,$(_include_stack))
+ $(call dump-config-vals,$(2),before)
$(eval include $(2))
+ $(call dump-import-done,$(_include_stack))
+ $(call dump-config-vals,$(2),after)
$(eval _included := $(filter-out $(2),$(MAKEFILE_LIST)))
$(eval MAKEFILE_LIST :=)
$(eval LOCAL_PATH :=)
@@ -250,6 +254,7 @@
# of the default list semantics
#
define import-nodes
+$(call dump-phase-start,$(1),$(2),$(3),$(4),build/make/core/node_fns.mk) \
$(if \
$(foreach _in,$(2), \
$(eval _node_import_context := _nic.$(1).[[$(_in)]]) \
@@ -263,5 +268,6 @@
$(if $(_include_stack),$(eval $(error ASSERTION FAILED: _include_stack \
should be empty here: $(_include_stack))),) \
) \
-,)
+,) \
+$(call dump-phase-end,build/make/core/node_fns.mk)
endef
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 89f822b..9678380 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -80,7 +80,7 @@
ifeq (true,$(is_container))
# Include shared libraries' notices for "container" types, but not for binaries etc.
notice_deps := \
- $(sort \
+ $(strip \
$(LOCAL_REQUIRED_MODULES) \
$(LOCAL_STATIC_LIBRARIES) \
$(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -95,7 +95,7 @@
)
else
notice_deps := \
- $(sort \
+ $(strip \
$(LOCAL_REQUIRED_MODULES) \
$(LOCAL_STATIC_LIBRARIES) \
$(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -106,24 +106,24 @@
)
endif
ifeq ($(LOCAL_IS_HOST_MODULE),true)
-notice_deps := $(sort $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
else
-notice_deps := $(sort $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
endif
ifdef my_register_name
ALL_MODULES.$(my_register_name).LICENSE_PACKAGE_NAME := $(strip $(license_package_name))
-ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds))
-ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions))
-ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map))
-ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(sort $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps))
-ALL_MODULES.$(my_register_name).IS_CONTAINER := $(sort $(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container))
+ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds)
+ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions)
+ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map)
+ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps)
+ALL_MODULES.$(my_register_name).IS_CONTAINER := $(strip $(filter-out false,$(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container)))
endif
ifdef notice_file
ifdef my_register_name
-ALL_MODULES.$(my_register_name).NOTICES := $(sort $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file))
+ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
endif
# This relies on the name of the directory in PRODUCT_OUT matching where
@@ -180,9 +180,10 @@
installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
ifdef my_register_name
-ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(installed_notice_file)
-endif
-
+ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE) $(installed_notice_file)
+ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES := $(ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES) $(module_installed_filename)
+INSTALLED_NOTICE_FILES.$(installed_notice_file).MODULE := $(my_register_name)
+else
$(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
$(installed_notice_file) : PRIVATE_NOTICES := $(notice_file)
@@ -190,6 +191,7 @@
@echo Notice file: $< -- $@
$(hide) mkdir -p $(dir $@)
$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
+endif
ifdef LOCAL_INSTALLED_MODULE
# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 1b40624..9f5a599 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -101,7 +101,6 @@
enforce_rro_enabled :=
ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
# * means all system and system_ext APKs, so enable conditionally based on module path.
- # Note that modules in PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS are excluded even if it is '*'
# Note that base_rules.mk has not yet been included, so it's likely that only
# one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
@@ -120,12 +119,6 @@
enforce_rro_enabled := true
endif
-# TODO(b/150820813) Some modules depend on static overlay, remove this after eliminating the dependency.
-ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS)))
- enforce_rro_enabled :=
-endif
-
-
product_package_overlays := $(strip \
$(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
$(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
@@ -479,31 +472,6 @@
# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
PACKAGES.$(LOCAL_PACKAGE_NAME).PARTITION := $(actual_partition_tag)
-# Verify LOCAL_USES_LIBRARIES/LOCAL_OPTIONAL_USES_LIBRARIES
-# If LOCAL_ENFORCE_USES_LIBRARIES is not set, default to true if either of LOCAL_USES_LIBRARIES or
-# LOCAL_OPTIONAL_USES_LIBRARIES are specified.
-# Will change the default to true unconditionally in the future.
-ifndef LOCAL_ENFORCE_USES_LIBRARIES
- ifneq (,$(strip $(LOCAL_USES_LIBRARIES)$(LOCAL_OPTIONAL_USES_LIBRARIES)))
- LOCAL_ENFORCE_USES_LIBRARIES := true
- endif
-endif
-
-my_enforced_uses_libraries :=
-ifdef LOCAL_ENFORCE_USES_LIBRARIES
- my_manifest_check := $(intermediates.COMMON)/manifest/AndroidManifest.xml.check
- $(my_manifest_check): $(MANIFEST_CHECK)
- $(my_manifest_check): PRIVATE_USES_LIBRARIES := $(LOCAL_USES_LIBRARIES)
- $(my_manifest_check): PRIVATE_OPTIONAL_USES_LIBRARIES := $(LOCAL_OPTIONAL_USES_LIBRARIES)
- $(my_manifest_check): $(full_android_manifest)
- @echo Checking manifest: $<
- $(MANIFEST_CHECK) --enforce-uses-libraries \
- $(addprefix --uses-library ,$(PRIVATE_USES_LIBRARIES)) \
- $(addprefix --optional-uses-library ,$(PRIVATE_OPTIONAL_USES_LIBRARIES)) \
- $< -o $@
- $(LOCAL_BUILT_MODULE): $(my_manifest_check)
-endif
-
# Define the rule to build the actual package.
# PRIVATE_JNI_SHARED_LIBRARIES is a list of <abi>:<path_of_built_lib>.
$(LOCAL_BUILT_MODULE): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
diff --git a/core/product.mk b/core/product.mk
index 2ab4b06..015fe44 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -195,9 +195,6 @@
# Package list to apply enforcing RRO.
_product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
-# Packages to skip auto-generating RROs for when PRODUCT_ENFORCE_RRO_TARGETS is set to *.
-_product_list_vars += PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
-
_product_list_vars += PRODUCT_SDK_ATREE_FILES
_product_list_vars += PRODUCT_SDK_ADDON_NAME
_product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
@@ -224,10 +221,10 @@
# instead of PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
_product_list_vars += PRODUCT_BOOT_JARS_EXTRA
-_product_list_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
-_product_list_vars += PRODUCT_SUPPORTS_VBOOT
-_product_list_vars += PRODUCT_SUPPORTS_VERITY
-_product_list_vars += PRODUCT_SUPPORTS_VERITY_FEC
+_product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
+_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
+_product_single_value_vars += PRODUCT_SUPPORTS_VERITY
+_product_single_value_vars += PRODUCT_SUPPORTS_VERITY_FEC
_product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
_product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
# List of system_server jars delivered via apex. Format = <apex name>:<jar name>.
@@ -239,6 +236,9 @@
# This is necessary to avoid jars reordering due to makefile inheritance order.
_product_list_vars += PRODUCT_SYSTEM_SERVER_JARS_EXTRA
+# Set to true to disable <uses-library> checks for a product.
+_product_list_vars += PRODUCT_BROKEN_VERIFY_USES_LIBRARIES
+
# All of the apps that we force preopt, this overrides WITH_DEXPREOPT.
_product_list_vars += PRODUCT_ALWAYS_PREOPT_EXTRACTED_APK
_product_list_vars += PRODUCT_DEXPREOPT_SPEED_APPS
@@ -341,6 +341,9 @@
# This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
_product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
+# When this is true, various build time as well as runtime debugfs restrictions are enabled.
+_product_single_value_vars += PRODUCT_SET_DEBUGFS_RESTRICTIONS
+
# Other dynamic partition feature flags.PRODUCT_USE_DYNAMIC_PARTITION_SIZE and
# PRODUCT_BUILD_SUPER_PARTITION default to the value of PRODUCT_USE_DYNAMIC_PARTITIONS.
_product_single_value_vars += \
@@ -363,6 +366,11 @@
_product_list_vars += PRODUCT_PACKAGE_NAME_OVERRIDES
_product_list_vars += PRODUCT_CERTIFICATE_OVERRIDES
+# A list of <overridden-apex>:<override-apex> pairs that specifies APEX module
+# overrides to be applied to the APEX names in the boot jar variables
+# (PRODUCT_BOOT_JARS, PRODUCT_UPDATABLE_BOOT_JARS etc).
+_product_list_vars += PRODUCT_BOOT_JAR_MODULE_OVERRIDES
+
# Controls for whether different partitions are built for the current product.
_product_single_value_vars += PRODUCT_BUILD_SYSTEM_IMAGE
_product_single_value_vars += PRODUCT_BUILD_SYSTEM_OTHER_IMAGE
@@ -379,13 +387,11 @@
_product_single_value_vars += PRODUCT_BUILD_BOOT_IMAGE
_product_single_value_vars += PRODUCT_BUILD_VENDOR_BOOT_IMAGE
_product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_SUPER_EMPTY_IMAGE
# List of boot jars delivered via apex
_product_list_vars += PRODUCT_UPDATABLE_BOOT_JARS
-# Whether the product would like to check prebuilt ELF files.
-_product_single_value_vars += PRODUCT_CHECK_ELF_FILES
-
# If set, device uses virtual A/B.
_product_single_value_vars += PRODUCT_VIRTUAL_AB_OTA
@@ -460,7 +466,9 @@
$(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
$(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
$(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
- $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk)))
+ $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
+ $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+ $(call dump-config-vals,$(current_mk),inherit)
endef
# Specifies a number of path prefixes, relative to PRODUCT_OUT, where the
@@ -607,6 +615,8 @@
# to a shorthand that is more convenient to read from elsewhere.
#
define strip-product-vars
+$(call dump-phase-start,PRODUCT-EXPAND,,$(_product_var_list),$(_product_single_value_vars), \
+ build/make/core/product.mk) \
$(foreach v,\
$(_product_var_list) \
PRODUCT_ENFORCE_PACKAGES_EXIST \
@@ -614,7 +624,8 @@
$(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
$(eval get-product-var = $$(if $$(filter $$(1),$$(INTERNAL_PRODUCT)),$$($$(2)),$$(PRODUCTS.$$(strip $$(1)).$$(2)))) \
$(KATI_obsolete_var PRODUCTS.$(INTERNAL_PRODUCT).$(v),Use $(v) instead) \
-)
+) \
+$(call dump-phase-end,build/make/core/product.mk)
endef
define add-to-product-copy-files-if-exists
diff --git a/core/product_config.mk b/core/product_config.mk
index 7b72b5e..d703ee3 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -146,6 +146,11 @@
endif
endif
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+_product_config_saved_KATI_ALLOW_RULES := $(.KATI_ALLOW_RULES)
+.KATI_ALLOW_RULES := $(ALLOW_RULES_IN_PRODUCT_CONFIG)
+endif
+
ifeq ($(load_all_product_makefiles),true)
# Import all product makefiles.
$(call import-products, $(all_product_makefiles))
@@ -163,12 +168,19 @@
# Quick check
$(check-all-products)
+ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
# Import all the products that have made artifact path requirements, so that we can verify
# the artifacts they produce.
# These are imported after check-all-products because some of them might not be real products.
$(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
$(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
)
+endif
+
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+.KATI_ALLOW_RULES := $(_saved_KATI_ALLOW_RULES)
+_product_config_saved_KATI_ALLOW_RULES :=
+endif
ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
$(dump-products)
@@ -181,14 +193,16 @@
ifneq ($(current_product_makefile),$(INTERNAL_PRODUCT))
$(error PRODUCT_NAME inconsistent in $(current_product_makefile) and $(INTERNAL_PRODUCT))
endif
-current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
+
############################################################################
# Strip and assign the PRODUCT_ variables.
$(call strip-product-vars)
+current_product_makefile :=
+all_product_makefiles :=
+all_product_configs :=
+
#############################################################################
# Quick check and assign default values
@@ -224,6 +238,19 @@
PRODUCT_BOOT_JARS := $(foreach pair,$(PRODUCT_BOOT_JARS), \
$(if $(findstring :,$(pair)),,platform:)$(pair))
+# Replaces references to overridden boot jar modules in a boot jars variable.
+# $(1): Name of a boot jars variable with <apex>:<jar> pairs.
+define replace-boot-jar-module-overrides
+ $(foreach pair,$(PRODUCT_BOOT_JAR_MODULE_OVERRIDES),\
+ $(eval _rbjmo_from := $(call word-colon,1,$(pair)))\
+ $(eval _rbjmo_to := $(call word-colon,2,$(pair)))\
+ $(eval $(1) := $(patsubst $(_rbjmo_from):%,$(_rbjmo_to):%,$($(1)))))
+endef
+
+$(call replace-boot-jar-module-overrides,PRODUCT_BOOT_JARS)
+$(call replace-boot-jar-module-overrides,PRODUCT_UPDATABLE_BOOT_JARS)
+$(call replace-boot-jar-module-overrides,ART_APEX_JARS)
+
# The extra system server jars must be appended at the end after common system server jars.
PRODUCT_SYSTEM_SERVER_JARS += $(PRODUCT_SYSTEM_SERVER_JARS_EXTRA)
@@ -401,6 +428,11 @@
$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
$(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
+ifdef PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
+ $(error PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS is deprecated, consider using RRO for \
+ $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
+endif
+
define product-overrides-config
$$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
$$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/product_config.rbc b/core/product_config.rbc
new file mode 100644
index 0000000..111e759
--- /dev/null
+++ b/core/product_config.rbc
@@ -0,0 +1,489 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:envsetup.rbc", _envsetup_init = "init")
+
+"""Runtime functions."""
+
+def _global_init():
+ """Returns dict created from the runtime environment."""
+ globals = dict()
+
+ # Environment variables
+ for k in dir(rblf_env):
+ globals[k] = getattr(rblf_env, k)
+
+ # Variables set as var=value command line arguments
+ for k in dir(rblf_cli):
+ globals[k] = getattr(rblf_cli, k)
+
+ globals.setdefault("PRODUCT_SOONG_NAMESPACES", [])
+ _envsetup_init(globals)
+
+ # Variables that should be defined.
+ mandatory_vars = [
+ "PLATFORM_VERSION_CODENAME",
+ "PLATFORM_VERSION",
+ "PRODUCT_SOONG_NAMESPACES",
+ # TODO(asmundak): do we need TARGET_ARCH? AOSP does not reference it
+ "TARGET_BUILD_TYPE",
+ "TARGET_BUILD_VARIANT",
+ "TARGET_PRODUCT",
+ ]
+ for bv in mandatory_vars:
+ if not bv in globals:
+ fail(bv, " is not defined")
+
+ return globals
+
+_globals_base = _global_init()
+
+def __print_attr(attr, value):
+ if not value:
+ return
+ if type(value) == "list":
+ if _options.rearrange:
+ value = __printvars_rearrange_list(value)
+ if _options.format == "pretty":
+ print(attr, "=", repr(value))
+ elif _options.format == "make":
+ print(attr, ":=", " ".join(value))
+ elif _options.format == "pretty":
+ print(attr, "=", repr(value))
+ elif _options.format == "make":
+ print(attr, ":=", value)
+ else:
+ fail("bad output format", _options.format)
+
+def _printvars(globals, cfg):
+ """Prints known configuration variables."""
+ for attr, val in sorted(cfg.items()):
+ __print_attr(attr, val)
+ if _options.print_globals:
+ print()
+ for attr, val in sorted(globals.items()):
+ if attr not in _globals_base:
+ __print_attr(attr, val)
+
+def __printvars_rearrange_list(value_list):
+ """Rearrange value list: return only distinct elements, maybe sorted."""
+ seen = {item: 0 for item in value_list}
+ return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
+
+def _product_configuration(top_pcm_name, top_pcm):
+ """Creates configuration."""
+
+ # Product configuration is created by traversing product's inheritance
+ # tree. It is traversed twice.
+ # First, beginning with top-level module we execute a module and find
+ # its ancestors, repeating this recursively. At the end of this phase
+ # we get the full inheritance tree.
+ # Second, we traverse the tree in the postfix order (i.e., visiting a
+ # node after its ancestors) to calculate the product configuration.
+ #
+ # PCM means "Product Configuration Module", i.e., a Starlark file
+ # whose body consists of a single init function.
+
+ globals = dict(**_globals_base)
+
+ config_postfix = [] # Configs in postfix order
+
+ # Each PCM is represented by a quadruple of function, config, children names
+ # and readyness (that is, the configurations from inherited PCMs have been
+ # substituted).
+ configs = {top_pcm_name: (top_pcm, None, [], False)} # All known PCMs
+
+ stash = [] # Configs to push once their descendants are done
+
+ # Stack containing PCMs to be processed. An item in the stack
+ # is a pair of PCMs name and its height in the product inheritance tree.
+ pcm_stack = [(top_pcm_name, 0)]
+ pcm_count = 0
+
+ # Run it until pcm_stack is exhausted, but no more than N times
+ for n in range(1000):
+ if not pcm_stack:
+ break
+ (name, height) = pcm_stack.pop()
+ pcm, cfg, c, _ = configs[name]
+
+ # cfg is set only after PCM has been called, leverage this
+ # to prevent calling the same PCM twice
+ if cfg != None:
+ continue
+
+ # Push ancestors until we reach this node's height
+ config_postfix.extend([stash.pop() for i in range(len(stash) - height)])
+
+ # Run this one, obtaining its configuration and child PCMs.
+ if _options.trace_modules:
+ print("%d:" % n)
+
+ # Run PCM.
+ handle = __h_new()
+ pcm(globals, handle)
+
+ # Now we know everything about this PCM, record it in 'configs'.
+ children = __h_inherited_modules(handle)
+ if _options.trace_modules:
+ print(" ", " ".join(children.keys()))
+ configs[name] = (pcm, __h_cfg(handle), children.keys(), False)
+ pcm_count = pcm_count + 1
+
+ if len(children) == 0:
+ # Leaf PCM goes straight to the config_postfix
+ config_postfix.append(name)
+ continue
+
+ # Stash this PCM, process children in the sorted order
+ stash.append(name)
+ for child_name in sorted(children, reverse = True):
+ if child_name not in configs:
+ configs[child_name] = (children[child_name], None, [], False)
+ pcm_stack.append((child_name, len(stash)))
+ if pcm_stack:
+ fail("Inheritance processing took too many iterations")
+
+ # Flush the stash
+ config_postfix.extend([stash.pop() for i in range(len(stash))])
+ if len(config_postfix) != pcm_count:
+ fail("Ran %d modules but postfix tree has only %d entries" % (pcm_count, len(config_postfix)))
+
+ if _options.trace_modules:
+ print("\n---Postfix---")
+ for x in config_postfix:
+ print(" ", x)
+
+ # Traverse the tree from the bottom, evaluating inherited values
+ for pcm_name in config_postfix:
+ pcm, cfg, children_names, ready = configs[pcm_name]
+
+ # Should run
+ if cfg == None:
+ fail("%s: has not been run" % pcm_name)
+
+ # Ready once
+ if ready:
+ continue
+
+ # Children should be ready
+ for child_name in children_names:
+ if not configs[child_name][3]:
+ fail("%s: child is not ready" % child_name)
+
+ _substitute_inherited(configs, pcm_name, cfg)
+ _percolate_inherited(configs, pcm_name, cfg, children_names)
+ configs[pcm_name] = pcm, cfg, children_names, True
+
+ return globals, configs[top_pcm_name][1]
+
+def _substitute_inherited(configs, pcm_name, cfg):
+ """Substitutes inherited values in all the attributes.
+
+ When a value of an attribute is a list, some of its items may be
+ references to a value of a same attribute in an inherited product,
+ e.g., for a given module PRODUCT_PACKAGES can be
+ ["foo", (submodule), "bar"]
+ and for 'submodule' PRODUCT_PACKAGES may be ["baz"]
+ (we use a tuple to distinguish submodule references).
+ After the substitution the value of PRODUCT_PACKAGES for the module
+ will become ["foo", "baz", "bar"]
+ """
+ for attr, val in cfg.items():
+ # TODO(asmundak): should we handle single vars?
+ if type(val) != "list":
+ continue
+
+ if attr not in _options.trace_variables:
+ cfg[attr] = _value_expand(configs, attr, val)
+ else:
+ old_val = val
+ new_val = _value_expand(configs, attr, val)
+ if new_val != old_val:
+ print("%s(i): %s=%s (was %s)" % (pcm_name, attr, new_val, old_val))
+ cfg[attr] = new_val
+
+def _value_expand(configs, attr, values_list):
+ """Expands references to inherited values in a given list."""
+ result = []
+ expanded = {}
+ for item in values_list:
+ # Inherited values are 1-tuples
+ if type(item) != "tuple":
+ result.append(item)
+ continue
+ child_name = item[0]
+ if child_name in expanded:
+ continue
+ expanded[child_name] = True
+ child = configs[child_name]
+ if not child[3]:
+ fail("%s should be ready" % child_name)
+ __move_items(result, child[1], attr)
+
+ return result
+
+def _percolate_inherited(configs, cfg_name, cfg, children_names):
+ """Percolates the settings that are present only in children."""
+ percolated_attrs = {}
+ for child_name in children_names:
+ child_cfg = configs[child_name][1]
+ for attr, value in child_cfg.items():
+ if type(value) != "list":
+ if attr in percolated_attrs or not attr in cfg:
+ cfg[attr] = value
+ percolated_attrs[attr] = True
+ continue
+ if attr in percolated_attrs:
+ # We already are percolating this one, just add this list
+ __move_items(cfg[attr], child_cfg, attr)
+ elif not attr in cfg:
+ percolated_attrs[attr] = True
+ cfg[attr] = []
+ __move_items(cfg[attr], child_cfg, attr)
+
+ for attr in _options.trace_variables:
+ if attr in percolated_attrs:
+ print("%s: %s^=%s" % (cfg_name, attr, cfg[attr]))
+
+def __move_items(to_list, from_cfg, attr):
+ value = from_cfg.get(attr, [])
+ if value:
+ to_list.extend(value)
+ from_cfg[attr] = []
+
+def _indirect(pcm_name):
+ """Returns configuration item for the inherited module."""
+ return (pcm_name,)
+
+def _addprefix(prefix, string_or_list):
+ """Adds prefix and returns a list.
+
+ If string_or_list is a list, prepends prefix to each element.
+ Otherwise, string_or_list is considered to be a string which
+ is split into words and then prefix is prepended to each one.
+
+ Args:
+ prefix
+ string_or_list
+
+ """
+ return [prefix + x for x in __words(string_or_list)]
+
+def _addsuffix(suffix, string_or_list):
+ """Adds suffix and returns a list.
+
+ If string_or_list is a list, appends suffix to each element.
+ Otherwise, string_or_list is considered to be a string which
+ is split into words and then suffix is appended to each one.
+
+ Args:
+ suffix
+ string_or_list
+ """
+ return [x + suffix for x in __words(string_or_list)]
+
+def __words(string_or_list):
+ if type(string_or_list) == "list":
+ return string_or_list
+ return string_or_list.split()
+
+# Handle manipulation functions.
+# A handle passed to a PCM consists of:
+# product attributes dict ("cfg")
+# inherited modules dict (maps module name to PCM)
+# default value list (initially empty, modified by inheriting)
+def __h_new():
+ """Constructs a handle which is passed to PCM."""
+ return (dict(), dict(), list())
+
+def __h_inherited_modules(handle):
+ """Returns PCM's inherited modules dict."""
+ return handle[1]
+
+def __h_cfg(handle):
+ """Returns PCM's product configuration attributes dict.
+
+ This function is also exported as rblf.cfg, and every PCM
+ calls it at the beginning.
+ """
+ return handle[0]
+
+def _setdefault(handle, attr):
+ """If attribute has not been set, assigns default value to it.
+
+ This function is exported as rblf.setdefault().
+ Only list attributes are initialized this way. The default
+ value is kept in the PCM's handle. Calling inherit() updates it.
+ """
+ cfg = handle[0]
+ if cfg.get(attr) == None:
+ cfg[attr] = list(handle[2])
+ return cfg[attr]
+
+def _inherit(handle, pcm_name, pcm):
+ """Records inheritance.
+
+ This function is exported as rblf.inherit, PCM calls it when
+ a module is inherited.
+ """
+ cfg, inherited, default_lv = handle
+ inherited[pcm_name] = pcm
+ default_lv.append(_indirect(pcm_name))
+
+ # Add inherited module reference to all configuration values
+ for attr, val in cfg.items():
+ if type(val) == "list":
+ val.append(_indirect(pcm_name))
+
+def _copy_if_exists(path_pair):
+ """If from file exists, returns [from:to] pair."""
+ value = path_pair.split(":", 2)
+
+ # Check that l[0] exists
+ return [":".join(value)] if rblf_file_exists(value[0]) else []
+
+def _enforce_product_packages_exist(pkg_string_or_list):
+ """Makes including non-existent modules in PRODUCT_PACKAGES an error."""
+
+ #TODO(asmundak)
+ pass
+
+def _file_wildcard_exists(file_pattern):
+ """Return True if there are files matching given bash pattern."""
+ return len(rblf_wildcard(file_pattern)) > 0
+
+def _find_and_copy(pattern, from_dir, to_dir):
+ """Return a copy list for the files matching the pattern."""
+ return ["%s/%s:%s/%s" % (from_dir, f, to_dir, f) for f in rblf_wildcard(pattern, from_dir)]
+
+def _filter_out(pattern, text):
+ """Return all the words from `text' that do not match any word in `pattern'.
+
+ Args:
+ pattern: string or list of words. '%' stands for wildcard (in regex terms, '.*')
+ text: string or list of words
+ Return:
+ list of words
+ """
+ rex = __mk2regex(__words(pattern))
+ res = []
+ for w in __words(text):
+ if not _regex_match(rex, w):
+ res.append(w)
+ return res
+
+def _filter(pattern, text):
+ """Return all the words in `text` that match `pattern`.
+
+ Args:
+ pattern: strings of words or a list. A word can contain '%',
+ which stands for any sequence of characters.
+ text: string or list of words.
+ """
+ rex = __mk2regex(__words(pattern))
+ res = []
+ for w in __words(text):
+ if _regex_match(rex, w):
+ res.append(w)
+ return res
+
+def __mk2regex(words):
+ """Returns regular expression equivalent to Make pattern."""
+
+ # TODO(asmundak): this will mishandle '\%'
+ return "^(" + "|".join([w.replace("%", ".*", 1) for w in words]) + ")"
+
+def _regex_match(regex, w):
+ return rblf_regex(regex, w)
+
+def _require_artifacts_in_path(paths, allowed_paths):
+ """TODO."""
+ pass
+
+def _require_artifacts_in_path_relaxed(paths, allowed_paths):
+ """TODO."""
+ pass
+
+def _expand_wildcard(pattern):
+ """Expands shell wildcard pattern."""
+ return rblf_wildcard(pattern)
+
+def _mkerror(file, message = ""):
+ """Prints error and stops."""
+ fail("%s: %s. Stop" % (file, message))
+
+def _mkwarning(file, message = ""):
+ """Prints warning."""
+ print("%s: warning: %s" % (file, message))
+
+def _mkinfo(file, message = ""):
+ """Prints info."""
+ print(message)
+
+def __get_options():
+ """Returns struct containing runtime global settings."""
+ settings = dict(
+ format = "pretty",
+ print_globals = False,
+ rearrange = "",
+ trace_modules = False,
+ trace_variables = [],
+ )
+ for x in getattr(rblf_cli, "RBC_OUT", "").split(","):
+ if x == "sort" or x == "unique":
+ if settings["rearrange"]:
+ fail("RBC_OUT: either sort or unique is allowed (and sort implies unique)")
+ settings["rearrange"] = x
+ elif x == "pretty" or x == "make":
+ settings["format"] = x
+ elif x == "global":
+ settings["print_globals"] = True
+ elif x != "":
+ fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
+ for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
+ if x == "!trace":
+ settings["trace_modules"] = True
+ elif x != "":
+ settings["trace_variables"].append(x)
+ return struct(**settings)
+
+# Settings used during debugging.
+_options = __get_options()
+rblf = struct(
+ addprefix = _addprefix,
+ addsuffix = _addsuffix,
+ copy_if_exists = _copy_if_exists,
+ cfg = __h_cfg,
+ enforce_product_packages_exist = _enforce_product_packages_exist,
+ expand_wildcard = _expand_wildcard,
+ file_exists = rblf_file_exists,
+ file_wildcard_exists = _file_wildcard_exists,
+ filter = _filter,
+ filter_out = _filter_out,
+ find_and_copy = _find_and_copy,
+ global_init = _global_init,
+ inherit = _inherit,
+ indirect = _indirect,
+ mkinfo = _mkinfo,
+ mkerror = _mkerror,
+ mkwarning = _mkwarning,
+ printvars = _printvars,
+ product_configuration = _product_configuration,
+ require_artifacts_in_path = _require_artifacts_in_path,
+ require_artifacts_in_path_relaxed = _require_artifacts_in_path_relaxed,
+ setdefault = _setdefault,
+ shell = rblf_shell,
+ warning = _mkwarning,
+)
diff --git a/core/rbe.mk b/core/rbe.mk
index 91606d4..19c0e42 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -34,6 +34,12 @@
cxx_compare := false
endif
+ ifdef RBE_CXX_COMPARE
+ cxx_compare := $(RBE_CXX_COMPARE)
+ else
+ cxx_compare := "false"
+ endif
+
ifdef RBE_JAVAC_EXEC_STRATEGY
javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
else
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 12b7f44..139de10 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -39,7 +39,6 @@
else
my_target_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
endif
-my_target_libatomic := $(call intermediates-dir-for,STATIC_LIBRARIES,libatomic,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libatomic.a
ifeq ($(LOCAL_NO_CRT),true)
my_target_crtbegin_so_o :=
my_target_crtend_so_o :=
@@ -55,7 +54,6 @@
my_target_crtend_so_o := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJECT_crtend_so.sdk.$(my_ndk_crt_version))
endif
$(linked_module): PRIVATE_TARGET_LIBCRT_BUILTINS := $(my_target_libcrt_builtins)
-$(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
$(linked_module): PRIVATE_TARGET_CRTBEGIN_SO_O := $(my_target_crtbegin_so_o)
$(linked_module): PRIVATE_TARGET_CRTEND_SO_O := $(my_target_crtend_so_o)
@@ -65,7 +63,6 @@
$(my_target_crtbegin_so_o) \
$(my_target_crtend_so_o) \
$(my_target_libcrt_builtins) \
- $(my_target_libatomic) \
$(LOCAL_ADDITIONAL_DEPENDENCIES) $(CLANG_CXX)
$(transform-o-to-shared-lib)
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 50ac93a..ce7b142 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -7,7 +7,7 @@
# LOCAL_SOONG_HEADER_JAR
# LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
# LOCAL_SOONG_PROGUARD_DICT
-# LOCAL_SOONG_PROGUARD_USAGE
+# LOCAL_SOONG_PROGUARD_USAGE_ZIP
# LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
# LOCAL_SOONG_RRO_DIRS
# LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
@@ -74,23 +74,31 @@
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
- $(intermediates.COMMON)/jacoco-report-classes.jar))
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar))
$(call add-dependency,$(LOCAL_BUILT_MODULE),\
- $(intermediates.COMMON)/jacoco-report-classes.jar)
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar)
endif
ifdef LOCAL_SOONG_PROGUARD_DICT
$(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
$(intermediates.COMMON)/proguard_dictionary))
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar))
$(call add-dependency,$(LOCAL_BUILT_MODULE),\
$(intermediates.COMMON)/proguard_dictionary)
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+ $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar)
endif
ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
$(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
- $(intermediates.COMMON)/proguard_usage.zip))
+ $(call local-packaging-dir,proguard_usage)/proguard_usage.zip))
$(call add-dependency,$(LOCAL_BUILT_MODULE),\
- $(intermediates.COMMON)/proguard_usage.zip)
+ $(call local-packaging-dir,proguard_usage)/proguard_usage.zip)
endif
ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
diff --git a/core/soong_config.mk b/core/soong_config.mk
index fde5832..17176df 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -34,7 +34,7 @@
$(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
-$(call add_json_bool, Allow_missing_dependencies, $(ALLOW_MISSING_DEPENDENCIES))
+$(call add_json_bool, Allow_missing_dependencies, $(filter true,$(ALLOW_MISSING_DEPENDENCIES)))
$(call add_json_bool, Unbundled_build, $(TARGET_BUILD_UNBUNDLED))
$(call add_json_bool, Unbundled_build_apps, $(TARGET_BUILD_APPS))
$(call add_json_bool, Always_use_prebuilt_sdks, $(TARGET_BUILD_USE_PREBUILT_SDKS))
@@ -80,7 +80,6 @@
$(call add_json_list, DeviceResourceOverlays, $(DEVICE_PACKAGE_OVERLAYS))
$(call add_json_list, ProductResourceOverlays, $(PRODUCT_PACKAGE_OVERLAYS))
$(call add_json_list, EnforceRROTargets, $(PRODUCT_ENFORCE_RRO_TARGETS))
-$(call add_json_list, EnforceRROExemptedTargets, $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
$(call add_json_list, EnforceRROExcludedOverlays, $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
$(call add_json_str, AAPTCharacteristics, $(TARGET_AAPT_CHARACTERISTICS))
@@ -153,13 +152,21 @@
$(call add_json_bool,$(module),true))
$(call end_json_map)
+$(call add_json_bool, DirectedRecoverySnapshot, $(DIRECTED_RECOVERY_SNAPSHOT))
+$(call add_json_map, RecoverySnapshotModules)
+$(foreach module,$(RECOVERY_SNAPSHOT_MODULES),\
+ $(call add_json_bool,$(module),true))
+$(call end_json_map)
+
+$(call add_json_list, VendorSnapshotDirsIncluded, $(VENDOR_SNAPSHOT_DIRS_INCLUDED))
+$(call add_json_list, VendorSnapshotDirsExcluded, $(VENDOR_SNAPSHOT_DIRS_EXCLUDED))
+$(call add_json_list, RecoverySnapshotDirsIncluded, $(RECOVERY_SNAPSHOT_DIRS_INCLUDED))
+$(call add_json_list, RecoverySnapshotDirsExcluded, $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
+
$(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
$(call add_json_bool, Enforce_vintf_manifest, $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
-$(call add_json_bool, Check_elf_files, $(filter true,$(PRODUCT_CHECK_ELF_FILES)))
-
$(call add_json_bool, Uml, $(filter true,$(TARGET_USER_MODE_LINUX)))
-$(call add_json_bool, Use_lmkd_stats_log, $(filter true,$(TARGET_LMKD_STATS_LOG)))
$(call add_json_str, VendorPath, $(TARGET_COPY_OUT_VENDOR))
$(call add_json_str, OdmPath, $(TARGET_COPY_OUT_ODM))
$(call add_json_str, VendorDlkmPath, $(TARGET_COPY_OUT_VENDOR_DLKM))
@@ -190,16 +197,20 @@
$(call add_json_list, BoardSepolicyM4Defs, $(BOARD_SEPOLICY_M4DEFS))
$(call add_json_str, BoardSepolicyVers, $(BOARD_SEPOLICY_VERS))
+$(call add_json_str, PlatformSepolicyVersion, $(PLATFORM_SEPOLICY_VERSION))
+
$(call add_json_bool, Flatten_apex, $(filter true,$(TARGET_FLATTEN_APEX)))
$(call add_json_bool, ForceApexSymlinkOptimization, $(filter true,$(TARGET_FORCE_APEX_SYMLINK_OPTIMIZATION)))
$(call add_json_str, DexpreoptGlobalConfig, $(DEX_PREOPT_CONFIG))
+$(call add_json_bool, WithDexpreopt, $(filter true,$(WITH_DEXPREOPT)))
+
$(call add_json_list, ManifestPackageNameOverrides, $(PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES))
$(call add_json_list, PackageNameOverrides, $(PRODUCT_PACKAGE_NAME_OVERRIDES))
$(call add_json_list, CertificateOverrides, $(PRODUCT_CERTIFICATE_OVERRIDES))
-$(call add_json_bool, EnforceSystemCertificate, $(ENFORCE_SYSTEM_CERTIFICATE))
+$(call add_json_bool, EnforceSystemCertificate, $(filter true,$(ENFORCE_SYSTEM_CERTIFICATE)))
$(call add_json_list, EnforceSystemCertificateAllowList, $(ENFORCE_SYSTEM_CERTIFICATE_ALLOW_LIST))
$(call add_json_list, ProductHiddenAPIStubs, $(PRODUCT_HIDDENAPI_STUBS))
@@ -221,24 +232,38 @@
$(call end_json_map))
$(call end_json_map)
-$(call add_json_bool, EnforceProductPartitionInterface, $(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
+$(call add_json_bool, EnforceProductPartitionInterface, $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)))
$(call add_json_str, DeviceCurrentApiLevelForVendorModules, $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
-$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY))
+$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY)))
$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST))
$(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES))
$(call add_json_bool, CompressedApex, $(PRODUCT_COMPRESSED_APEX))
-$(call add_json_bool, BoardUsesRecoveryAsBoot, $(BOARD_USES_RECOVERY_AS_BOOT))
+$(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
$(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES))
$(call add_json_list, BoardKernelModuleInterfaceVersions, $(BOARD_KERNEL_MODULE_INTERFACE_VERSIONS))
-$(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
$(call add_json_str, PrebuiltHiddenApiDir, $(BOARD_PREBUILT_HIDDENAPI_DIR))
+$(call add_json_str, ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+
+$(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
+$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
+$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
+
+$(call add_json_bool, BuildDebugfsRestrictionsEnabled, $(filter true,$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)))
+
+$(call add_json_bool, RequiresInsecureExecmemForSwiftshader, $(filter true,$(PRODUCT_REQUIRES_INSECURE_EXECMEM_FOR_SWIFTSHADER)))
+
+$(call add_json_bool, SelinuxIgnoreNeverallows, $(filter true,$(SELINUX_IGNORE_NEVERALLOWS)))
+
+$(call add_json_bool, SepolicySplit, $(filter true,$(PRODUCT_SEPOLICY_SPLIT)))
+
$(call json_end)
$(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index c600178..0922def 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -47,23 +47,31 @@
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
- $(intermediates.COMMON)/jacoco-report-classes.jar))
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar))
$(call add-dependency,$(common_javalib.jar),\
- $(intermediates.COMMON)/jacoco-report-classes.jar)
+ $(call local-packaging-dir,jacoco)/jacoco-report-classes.jar)
endif
ifdef LOCAL_SOONG_PROGUARD_DICT
$(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
$(intermediates.COMMON)/proguard_dictionary))
- $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+ $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+ $(call add-dependency,$(common_javalib.jar),\
$(intermediates.COMMON)/proguard_dictionary)
+ $(call add-dependency,$(common_javalib.jar),\
+ $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+ $(call add-dependency,$(common_javalib.jar),\
+ $(call local-packaging-dir,proguard_dictionary)/classes.jar)
endif
-ifdef LOCAL_SOONG_PROGUARD_USAGE
+ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
$(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
- $(intermediates.COMMON)/proguard_usage.zip))
- $(call add-dependency,$(LOCAL_BUILT_MODULE),\
- $(intermediates.COMMON)/proguard_usage.zip)
+ $(call local-packaging-dir,proguard_usage)/proguard_usage.zip))
+ $(call add-dependency,$(common_javalib.jar),\
+ $(call local-packaging-dir,proguard_usage)/proguard_usage.zip)
endif
@@ -120,9 +128,11 @@
$(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
$(eval $(call add-dependency,$(LOCAL_BUILT_MODULE),$(common_javalib.jar)))
- $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar)))
- ifneq ($(TURBINE_ENABLED),false)
- $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_header_jar)))
+ ifdef LOCAL_SOONG_CLASSES_JAR
+ $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_jar)))
+ ifneq ($(TURBINE_ENABLED),false)
+ $(eval $(call add-dependency,$(common_javalib.jar),$(full_classes_header_jar)))
+ endif
endif
endif
@@ -153,8 +163,10 @@
$(eval $(call copy-one-file,$(LOCAL_SOONG_DEXPREOPT_CONFIG), $(call local-intermediates-dir,)/dexpreopt.config))
endif
+ifdef LOCAL_SOONG_CLASSES_JAR
javac-check : $(full_classes_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_jar)
+endif
.PHONY: javac-check-$(LOCAL_MODULE)
ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/sysprop.mk b/core/sysprop.mk
index df27067..daebdd3 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -122,7 +122,7 @@
echo "$$(line)" >> $$@;\
)\
)
- $(hide) $(POST_PROCESS_PROPS) $$(_option) $$@ $(5)
+ $(hide) $(POST_PROCESS_PROPS) $$(_option) --sdk-version $(PLATFORM_SDK_VERSION) $$@ $(5)
$(hide) $(foreach file,$(strip $(6)),\
if [ -f "$(file)" ]; then\
cat $(file) >> $$@;\
@@ -331,7 +331,7 @@
$(android_info_prop): $(INSTALLED_ANDROID_INFO_TXT_TARGET)
cat $< | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' > $@
-_prop_files_ += $(android_info_pro)
+_prop_files_ += $(android_info_prop)
ifdef property_overrides_split_enabled
# Order matters here. When there are duplicates, the last one wins.
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 40b2ba8..5745451 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -39,3 +39,9 @@
LOCAL_MODULE_RELATIVE_PATH := $(LOCAL_MODULE)
endif
endif
+
+# Implicitly run this test under MTE SYNC for aarch64 binaries. This is a no-op
+# on non-MTE hardware.
+ifneq (,$(filter arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
+ LOCAL_WHOLE_STATIC_LIBRARIES += note_memtag_heap_sync
+endif
diff --git a/core/tasks/cts_root.mk b/core/tasks/cts_root.mk
new file mode 100644
index 0000000..b618121
--- /dev/null
+++ b/core/tasks/cts_root.mk
@@ -0,0 +1,25 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/cts-root/README.md),)
+test_suite_name := cts_root
+test_suite_tradefed := cts-root-tradefed
+test_suite_readme := test/cts-root/README.md
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: cts_root
+cts_root: $(compatibility_zip)
+$(call dist-for-goals, cts_root, $(compatibility_zip))
+endif
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 4bbfd39..c866259 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -29,3 +29,4 @@
droidcore: $(MODULE_INFO_JSON)
$(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
+$(call dist-for-goals, droidcore, $(MODULE_INFO_JSON))
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 2b43f0f..20a1694 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -19,6 +19,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := $(my_package_name)
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
LOCAL_MODULE_CLASS := PACKAGING
LOCAL_MODULE_STEM := $(my_package_name).zip
LOCAL_UNINSTALLABLE_MODULE := true
diff --git a/core/verify_uses_libraries.sh b/core/verify_uses_libraries.sh
deleted file mode 100755
index dde0447..0000000
--- a/core/verify_uses_libraries.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-# apt_binary is $(AAPT) in the build.
-
-# Parse sdk, targetSdk, and uses librares in the APK, then cross reference against build specified ones.
-
-set -e
-local_apk=$1
-badging=$(${aapt_binary} dump badging "${local_apk}")
-export sdk_version=$(echo "${badging}" | grep "sdkVersion" | sed -n "s/sdkVersion:'\(.*\)'/\1/p")
-# Export target_sdk_version to the caller.
-export target_sdk_version=$(echo "${badging}" | grep "targetSdkVersion" | sed -n "s/targetSdkVersion:'\(.*\)'/\1/p")
-uses_libraries=$(echo "${badging}" | grep "uses-library" | sed -n "s/uses-library:'\(.*\)'/\1/p")
-optional_uses_libraries=$(echo "${badging}" | grep "uses-library-not-required" | sed -n "s/uses-library-not-required:'\(.*\)'/\1/p")
-
-# Verify that the uses libraries match exactly.
-# Currently we validate the ordering of the libraries since it matters for resolution.
-single_line_libs=$(echo "${uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
-if [[ "${single_line_libs}" != "${uses_library_names}" ]]; then
- echo "LOCAL_USES_LIBRARIES (${uses_library_names})" \
- "do not match (${single_line_libs}) in manifest for ${local_apk}"
- exit 1
-fi
-
-# Verify that the optional uses libraries match exactly.
-single_line_optional_libs=$(echo "${optional_uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
-if [[ "${single_line_optional_libs}" != "${optional_uses_library_names}" ]]; then
- echo "LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) " \
- "do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
- exit 1
-fi
-
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 0c91a14..c9e3e80 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -240,7 +240,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2021-02-05
+ PLATFORM_SECURITY_PATCH := 2021-03-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index c03e2cb..344a01a 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -23,10 +23,12 @@
- ggrep: Greps on all local Gradle files.
- gogrep: Greps on all local Go files.
- jgrep: Greps on all local Java files.
+- ktgrep: Greps on all local Kotlin files.
- resgrep: Greps on all local res/*.xml files.
- mangrep: Greps on all local AndroidManifest.xml files.
- mgrep: Greps on all local Makefiles and *.bp files.
- owngrep: Greps on all local OWNERS files.
+- rsgrep: Greps on all local Rust files.
- sepgrep: Greps on all local sepolicy files.
- sgrep: Greps on all local source files.
- godir: Go to the directory containing a file.
@@ -34,6 +36,7 @@
- gomod: Go to the directory containing a module.
- pathmod: Get the directory containing a module.
- outmod: Gets the location of a module's installed outputs with a certain extension.
+- dirmods: Gets the modules defined in a given directory.
- installmod: Adb installs a module's built APK.
- refreshmod: Refresh list of modules for allmod/gomod/pathmod/outmod/installmod.
- syswrite: Remount partitions (e.g. system.img) as writable, rebooting if necessary.
@@ -1000,7 +1003,7 @@
Darwin)
function sgrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|xml|sh|mk|aidl|vts|proto)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto)' \
-exec grep --color -n "$@" {} +
}
@@ -1008,7 +1011,7 @@
*)
function sgrep()
{
- find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
+ find . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
-exec grep --color -n "$@" {} +
}
;;
@@ -1037,6 +1040,18 @@
-exec grep --color -n "$@" {} +
}
+function rsgrep()
+{
+ find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rs" \
+ -exec grep --color -n "$@" {} +
+}
+
+function ktgrep()
+{
+ find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.kt" \
+ -exec grep --color -n "$@" {} +
+}
+
function cgrep()
{
find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
@@ -1085,7 +1100,7 @@
function treegrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' \
-exec grep --color -n -i "$@" {} +
}
@@ -1099,7 +1114,7 @@
function treegrep()
{
- find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|xml)' -type f \
+ find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|hpp|S|java|kt|xml)' -type f \
-exec grep --color -n -i "$@" {} +
}
@@ -1404,8 +1419,9 @@
python -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
}
-# Get the path of a specific module in the android tree, as cached in module-info.json. If any build change
-# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first. Note: This is the inverse of dirmods.
function pathmod() {
if [[ $# -ne 1 ]]; then
echo "usage: pathmod <module>" >&2
@@ -1429,6 +1445,36 @@
fi
}
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first. Note: This is the inverse of pathmod.
+function dirmods() {
+ if [[ $# -ne 1 ]]; then
+ echo "usage: dirmods <path>" >&2
+ return 1
+ fi
+
+ verifymodinfo || return 1
+
+ python -c "import json, os
+dir = '$1'
+while dir.endswith('/'):
+ dir = dir[:-1]
+prefix = dir + '/'
+module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
+results = set()
+for m in module_info.values():
+ for path in m.get(u'path', []):
+ if path == dir or path.startswith(prefix):
+ name = m.get(u'module_name')
+ if name:
+ results.add(name)
+for name in sorted(results):
+ print(name)
+"
+}
+
+
# Go to a specific module in the android tree, as cached in module-info.json. If any build change
# is made, and it should be reflected in the output, you should run 'refreshmod' first.
function gomod() {
diff --git a/help.sh b/help.sh
index 4af5154..06a9056 100755
--- a/help.sh
+++ b/help.sh
@@ -12,11 +12,15 @@
source build/envsetup.sh # Add "lunch" (and other utilities and variables)
# to the shell environment.
lunch [<product>-<variant>] # Choose the device to target.
-m -j [<goals>] # Execute the configured build.
+m [<goals>] # Execute the configured build.
Usage of "m" imitates usage of the program "make".
See '"${SCRIPT_DIR}"'/Usage.txt for more info about build usage and concepts.
+The parallelism of the build can be set with a -jN argument to "m". If you
+don'\''t provide a -j argument, the build system automatically selects a parallel
+task count that it thinks is optimal for your system.
+
Common goals are:
clean (aka clobber) equivalent to rm -rf out/
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 9edc85c..4dd6b17 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -35,6 +35,8 @@
# $(DEVICE_MANIFEST_FILE) can be a list of files
include $(CLEAR_VARS)
LOCAL_MODULE := vendor_manifest.xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf
@@ -65,6 +67,8 @@
my_fragment_files := $$($$(my_fragment_files_var))
include $$(CLEAR_VARS)
LOCAL_MODULE := vendor_manifest_$(1).xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
LOCAL_MODULE_STEM := manifest_$(1).xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/vintf
@@ -94,6 +98,8 @@
# ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
include $(CLEAR_VARS)
LOCAL_MODULE := odm_manifest.xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
LOCAL_MODULE_STEM := manifest.xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_RELATIVE_PATH := vintf
@@ -124,6 +130,8 @@
my_fragment_files := $$($$(my_fragment_files_var))
include $$(CLEAR_VARS)
LOCAL_MODULE := odm_manifest_$(1).xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
LOCAL_MODULE_STEM := manifest_$(1).xml
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_RELATIVE_PATH := vintf
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index bf015e5..00f6e5b 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -19,7 +19,8 @@
# the devices with metadata parition
BOARD_USES_METADATA_PARTITION := true
-BOARD_VNDK_VERSION := current
+# Default is current, but allow devices to override vndk version if needed.
+BOARD_VNDK_VERSION ?= current
# Required flag for non-64 bit devices from P.
TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 37c0f25..27dc158 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -24,7 +24,12 @@
kernel/prebuilts/5.10/arm64/kernel-5.10-lz4:kernel-5.10-lz4 \
kernel/prebuilts/mainline/arm64/kernel-mainline-allsyms:kernel-mainline \
kernel/prebuilts/mainline/arm64/kernel-mainline-gz-allsyms:kernel-mainline-gz \
- kernel/prebuilts/mainline/arm64/kernel-mainline-lz4-allsyms:kernel-mainline-lz4
+ kernel/prebuilts/mainline/arm64/kernel-mainline-lz4-allsyms:kernel-mainline-lz4 \
+
+$(call dist-for-goals, dist_files, kernel/prebuilts/4.19/arm64/prebuilt-info.txt:kernel/4.19/prebuilt-info.txt)
+$(call dist-for-goals, dist_files, kernel/prebuilts/5.4/arm64/prebuilt-info.txt:kernel/5.4/prebuilt-info.txt)
+$(call dist-for-goals, dist_files, kernel/prebuilts/5.10/arm64/prebuilt-info.txt:kernel/5.10/prebuilt-info.txt)
+$(call dist-for-goals, dist_files, kernel/prebuilts/mainline/arm64/prebuilt-info.txt:kernel/mainline/prebuilt-info.txt)
ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
PRODUCT_COPY_FILES += \
diff --git a/target/product/OWNERS b/target/product/OWNERS
index 259c8f4..82e6e88 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -1 +1,5 @@
per-file runtime_libart.mk = calin@google.com, mast@google.com, ngeoffray@google.com, oth@google.com, rpl@google.com, vmarko@google.com
+
+# GSI
+per-file gsi_release.mk = file:/target/product/gsi/OWNERS
+per-file gsi_keys.mk = file:/target/product/gsi/OWNERS
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index c7ae1f0..8562d4f 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -116,7 +116,6 @@
iptables \
ip-up-vpn \
javax.obex \
- keystore \
keystore2 \
credstore \
ld.mc \
@@ -214,6 +213,7 @@
ndc \
netd \
NetworkStackNext \
+ odsign \
org.apache.http.legacy \
otacerts \
PackageInstaller \
@@ -384,11 +384,6 @@
SettingsProvider \
WallpaperBackup
-# Packages included only for eng/userdebug builds, when building with SANITIZE_TARGET=address
-PRODUCT_PACKAGES_DEBUG_ASAN := \
- fuzz \
- honggfuzz
-
PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE := \
libdumpcoverage
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 131ba31..bb17dda 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -36,17 +36,8 @@
com.android.permission:framework-permission \
com.android.sdkext:framework-sdkextensions \
com.android.wifi:framework-wifi \
- com.android.tethering:framework-tethering
-
-# Add the compatibility library that is needed when android.test.base
-# is removed from the bootclasspath.
-# Default to excluding android.test.base from the bootclasspath.
-ifneq ($(REMOVE_ATB_FROM_BCP),false)
- PRODUCT_PACKAGES += framework-atb-backward-compatibility
- PRODUCT_BOOT_JARS += framework-atb-backward-compatibility
-else
- PRODUCT_BOOT_JARS += android.test.base
-endif
+ com.android.tethering:framework-tethering \
+ com.android.ipsec:android.net.ipsec.ike
# Minimal configuration for running dex2oat (default argument values).
# PRODUCT_USES_DEFAULT_ART_CONFIG must be true to enable boot image compilation.
diff --git a/target/product/generic.mk b/target/product/generic.mk
index d3f81b1..fb5b727 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -29,4 +29,10 @@
PRODUCT_NAME := generic
allowed_list := product_manifest.xml
+
+# TODO(b/182105280): When ART prebuilts are used in this product, Soong doesn't
+# produce any Android.mk entries for them. Exclude them until that problem is
+# fixed.
+allowed_list += com.android.art com.android.art.debug
+
$(call enforce-product-packages-exist,$(allowed_list))
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 9580ade..1f310c9 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -32,8 +32,6 @@
PRODUCT_PACKAGES += \
LiveWallpapersPicker \
PartnerBookmarksProvider \
- PresencePolling \
- RcsService \
Stk \
Tag \
TimeZoneUpdater \
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
index b7ce86e..88472eb 100644
--- a/target/product/gsi/Android.bp
+++ b/target/product/gsi/Android.bp
@@ -12,6 +12,15 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
filegroup {
name: "vndk_lib_lists",
srcs: [
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index f21fe16..ecce01a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -63,6 +63,8 @@
# Script to update the latest VNDK lib list
include $(CLEAR_VARS)
LOCAL_MODULE := update-vndk-list.sh
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := EXECUTABLES
LOCAL_MODULE_STEM := $(LOCAL_MODULE)
LOCAL_IS_HOST_MODULE := true
@@ -146,6 +148,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := vndk_package
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
LOCAL_REQUIRED_MODULES := \
$(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
@@ -169,6 +173,8 @@
_vndk_versions += $(BOARD_VNDK_VERSION)
endif
LOCAL_MODULE := vndk_apex_snapshot_package
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(_vndk_versions),com.android.vndk.v$(vndk_ver))
include $(BUILD_PHONY_PACKAGE)
@@ -181,6 +187,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := gsi_skip_mount.cfg
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_STEM := skip_mount.cfg
LOCAL_SRC_FILES := $(LOCAL_MODULE)
LOCAL_MODULE_CLASS := ETC
@@ -204,6 +212,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := init.gsi.rc
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_SRC_FILES := $(LOCAL_MODULE)
LOCAL_MODULE_CLASS := ETC
LOCAL_SYSTEM_EXT_MODULE := true
@@ -214,6 +224,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := init.vndk-nodef.rc
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_SRC_FILES := $(LOCAL_MODULE)
LOCAL_MODULE_CLASS := ETC
LOCAL_SYSTEM_EXT_MODULE := true
diff --git a/target/product/gsi/OWNERS b/target/product/gsi/OWNERS
index 3fdd5af..39f97de 100644
--- a/target/product/gsi/OWNERS
+++ b/target/product/gsi/OWNERS
@@ -1,3 +1,6 @@
+bowgotsai@google.com
jiyong@google.com
justinyun@google.com
smoreland@google.com
+szuweilin@google.com
+yochiang@google.com
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 2ca6687..550ae7c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -14,17 +14,14 @@
LLNDK: libmediandk.so
LLNDK: libnativewindow.so
LLNDK: libneuralnetworks.so
+LLNDK: libneuralnetworks_shim.so
LLNDK: libselinux.so
LLNDK: libsync.so
LLNDK: libvndksupport.so
LLNDK: libvulkan.so
VNDK-SP: android.hardware.common-V2-ndk_platform.so
-VNDK-SP: android.hardware.common-unstable-ndk_platform.so
VNDK-SP: android.hardware.common.fmq-V1-ndk_platform.so
-VNDK-SP: android.hardware.common.fmq-ndk_platform.so
-VNDK-SP: android.hardware.common.fmq-unstable-ndk_platform.so
VNDK-SP: android.hardware.graphics.common-V2-ndk_platform.so
-VNDK-SP: android.hardware.graphics.common-unstable-ndk_platform.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
VNDK-SP: android.hardware.graphics.common@1.2.so
@@ -62,10 +59,7 @@
VNDK-SP: libz.so
VNDK-core: android.hardware.audio.common@2.0.so
VNDK-core: android.hardware.authsecret-V1-ndk_platform.so
-VNDK-core: android.hardware.authsecret-ndk_platform.so
-VNDK-core: android.hardware.authsecret-unstable-ndk_platform.so
VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
-VNDK-core: android.hardware.automotive.occupant_awareness-ndk_platform.so
VNDK-core: android.hardware.configstore-utils.so
VNDK-core: android.hardware.configstore@1.0.so
VNDK-core: android.hardware.configstore@1.1.so
@@ -76,49 +70,28 @@
VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
VNDK-core: android.hardware.health.storage-V1-ndk_platform.so
-VNDK-core: android.hardware.health.storage-ndk_platform.so
-VNDK-core: android.hardware.health.storage-unstable-ndk_platform.so
VNDK-core: android.hardware.identity-V2-ndk_platform.so
-VNDK-core: android.hardware.identity-ndk_platform.so
VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
-VNDK-core: android.hardware.keymaster-ndk_platform.so
VNDK-core: android.hardware.light-V1-ndk_platform.so
-VNDK-core: android.hardware.light-ndk_platform.so
VNDK-core: android.hardware.media.bufferpool@2.0.so
VNDK-core: android.hardware.media.omx@1.0.so
VNDK-core: android.hardware.media@1.0.so
VNDK-core: android.hardware.memtrack-V1-ndk_platform.so
-VNDK-core: android.hardware.memtrack-ndk_platform.so
-VNDK-core: android.hardware.memtrack-unstable-ndk_platform.so
VNDK-core: android.hardware.memtrack@1.0.so
VNDK-core: android.hardware.oemlock-V1-ndk_platform.so
-VNDK-core: android.hardware.oemlock-ndk_platform.so
-VNDK-core: android.hardware.oemlock-unstable-ndk_platform.so
VNDK-core: android.hardware.power-V1-ndk_platform.so
-VNDK-core: android.hardware.power-ndk_platform.so
+VNDK-core: android.hardware.power.stats-V1-ndk_platform.so
VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
-VNDK-core: android.hardware.rebootescrow-ndk_platform.so
VNDK-core: android.hardware.security.keymint-V1-ndk_platform.so
-VNDK-core: android.hardware.security.keymint-ndk_platform.so
-VNDK-core: android.hardware.security.keymint-unstable-ndk_platform.so
VNDK-core: android.hardware.security.secureclock-V1-ndk_platform.so
-VNDK-core: android.hardware.security.secureclock-ndk_platform.so
-VNDK-core: android.hardware.security.secureclock-unstable-ndk_platform.so
VNDK-core: android.hardware.security.sharedsecret-V1-ndk_platform.so
-VNDK-core: android.hardware.security.sharedsecret-ndk_platform.so
-VNDK-core: android.hardware.security.sharedsecret-unstable-ndk_platform.so
VNDK-core: android.hardware.soundtrigger@2.0-core.so
VNDK-core: android.hardware.soundtrigger@2.0.so
VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
-VNDK-core: android.hardware.vibrator-ndk_platform.so
VNDK-core: android.hardware.weaver-V1-ndk_platform.so
-VNDK-core: android.hardware.weaver-ndk_platform.so
-VNDK-core: android.hardware.weaver-unstable-ndk_platform.so
VNDK-core: android.hidl.token@1.0-utils.so
VNDK-core: android.hidl.token@1.0.so
VNDK-core: android.system.keystore2-V1-ndk_platform.so
-VNDK-core: android.system.keystore2-ndk_platform.so
-VNDK-core: android.system.keystore2-unstable-ndk_platform.so
VNDK-core: android.system.suspend@1.0.so
VNDK-core: libaudioroute.so
VNDK-core: libaudioutils.so
diff --git a/target/product/gsi/gsi_skip_mount.cfg b/target/product/gsi/gsi_skip_mount.cfg
index ad3c7d9..28f4349 100644
--- a/target/product/gsi/gsi_skip_mount.cfg
+++ b/target/product/gsi/gsi_skip_mount.cfg
@@ -1,3 +1,9 @@
+# Skip "system" mountpoints.
/oem
/product
/system_ext
+# Skip sub-mountpoints of system mountpoints.
+/oem/*
+/product/*
+/system_ext/*
+/system/*
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 34821b6..25fa68b 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -36,6 +36,9 @@
# default configs are applied.
PRODUCT_SHIPPING_API_LEVEL := 30
+# Enable dynamic partitions to facilitate mixing onto Cuttlefish
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
@@ -58,3 +61,10 @@
# Support additional P, Q and R VNDK packages
PRODUCT_EXTRA_VNDK_VERSIONS := 28 29 30
+
+# Do not build non-GSI partition images.
+PRODUCT_BUILD_CACHE_IMAGE := false
+PRODUCT_BUILD_USERDATA_IMAGE := false
+PRODUCT_BUILD_VENDOR_IMAGE := false
+PRODUCT_BUILD_SUPER_PARTITION := false
+PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 4ebec51..c7ac907 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -57,8 +57,8 @@
# system server jars which are updated via apex modules.
# The values should be of the format <apex name>:<jar name>
PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
+ com.android.art:service-art \
com.android.permission:service-permission \
- com.android.ipsec:android.net.ipsec.ike \
PRODUCT_COPY_FILES += \
system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index e655d51..4f14ddd 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -30,10 +30,43 @@
PRODUCT_PACKAGES += com.android.runtime
# ART APEX module.
-# Note that this package includes the minimal boot classpath JARs (listed in
-# ART_APEX_JARS), which should no longer be added directly to PRODUCT_PACKAGES.
-PRODUCT_PACKAGES += com.android.art-autoselect
-PRODUCT_HOST_PACKAGES += com.android.art-autoselect
+#
+# Select either release (com.android.art) or debug (com.android.art.debug)
+# variant of the ART APEX. By default, "user" build variants contain the release
+# module, while the "eng" build variant contain the debug module. However, if
+# `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is defined, it overrides the previous
+# logic:
+# - if `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is set to `false`, the
+# build will include the release module (whatever the build
+# variant);
+# - if `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is set to `true`, the
+# build will include the debug module (whatever the build variant).
+#
+# Note that the ART APEX package includes the minimal boot classpath JARs
+# (listed in ART_APEX_JARS), which should no longer be added directly to
+# PRODUCT_PACKAGES.
+
+art_target_include_debug_build := $(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD)
+ifneq (false,$(art_target_include_debug_build))
+ ifneq (,$(filter eng,$(TARGET_BUILD_VARIANT)))
+ art_target_include_debug_build := true
+ endif
+endif
+
+ifeq (true,$(art_target_include_debug_build))
+ PRODUCT_PACKAGES += com.android.art.debug
+ apex_test_module := art-check-debug-apex-gen-fakebin
+else
+ PRODUCT_PACKAGES += com.android.art
+ apex_test_module := art-check-release-apex-gen-fakebin
+endif
+
+ifeq (true,$(SOONG_CONFIG_art_module_source_build)
+ PRODUCT_HOST_PACKAGES += $(apex_test_module)
+endif
+
+art_target_include_debug_build :=
+apex_test_module :=
# Certificates.
PRODUCT_PACKAGES += \
@@ -56,17 +89,18 @@
ifeq (eng,$(TARGET_BUILD_VARIANT))
PRODUCT_SYSTEM_PROPERTIES += \
pm.dexopt.first-boot?=extract \
- pm.dexopt.boot?=extract
+ pm.dexopt.boot-after-ota?=extract
else
PRODUCT_SYSTEM_PROPERTIES += \
- pm.dexopt.first-boot?=quicken \
- pm.dexopt.boot?=verify
+ pm.dexopt.first-boot?=verify \
+ pm.dexopt.boot-after-ota?=verify
endif
# The install filter is speed-profile in order to enable the use of
# profiles from the dex metadata files. Note that if a profile is not provided
# or if it is empty speed-profile is equivalent to (quicken + empty app image).
PRODUCT_SYSTEM_PROPERTIES += \
+ pm.dexopt.post-boot?=extract \
pm.dexopt.install?=speed-profile \
pm.dexopt.install-fast?=skip \
pm.dexopt.install-bulk?=speed-profile \
@@ -106,3 +140,12 @@
PRODUCT_SYSTEM_PROPERTIES += \
ro.iorapd.enable?=true
+# Enable Madvising of the whole art, odex and vdex files to MADV_WILLNEED.
+# The size specified here is the size limit of how much of the file
+# (in bytes) is madvised.
+# We madvise the whole .art file to MADV_WILLNEED with UINT_MAX limit.
+# For odex and vdex files, we limit madvising to 100MB.
+PRODUCT_SYSTEM_PROPERTIES += \
+ dalvik.vm.madvise.vdexfile.size=104857600 \
+ dalvik.vm.madvise.odexfile.size=104857600 \
+ dalvik.vm.madvise.artfile.size=4294967295
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index 761de05..0831b54 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -63,5 +63,9 @@
PRODUCT_NAME := sdk_phone_arm64
PRODUCT_DEVICE := emulator_arm64
PRODUCT_MODEL := Android SDK built for arm64
+# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
+# RadioConfigLib), which makes it impossible to translate their module names to
+# library name, so the check fails.
+PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index 5081a87..f649980 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -63,3 +63,7 @@
PRODUCT_NAME := sdk_phone_armv7
PRODUCT_DEVICE := emulator_arm
PRODUCT_MODEL := Android SDK built for arm
+# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
+# RadioConfigLib), which makes it impossible to translate their module names to
+# library name, so the check fails.
+PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 9096ff3..0e1bca4 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -58,3 +58,7 @@
PRODUCT_NAME := sdk_phone_x86
PRODUCT_DEVICE := emulator_x86
PRODUCT_MODEL := Android SDK built for x86
+# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
+# RadioConfigLib), which makes it impossible to translate their module names to
+# library name, so the check fails.
+PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index 161043b..fffac04 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -59,3 +59,7 @@
PRODUCT_NAME := sdk_phone_x86_64
PRODUCT_DEVICE := emulator_x86_64
PRODUCT_MODEL := Android SDK built for x86_64
+# Disable <uses-library> checks for SDK product. It lacks some libraries (e.g.
+# RadioConfigLib), which makes it impossible to translate their module names to
+# library name, so the check fails.
+PRODUCT_BROKEN_VERIFY_USES_LIBRARIES := true
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 5f4f82b..98698c5 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -1,4 +1,13 @@
// AOSP test certificate
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
android_app_certificate {
name: "aosp-testkey",
certificate: "testkey",
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index d6a8b53..cedad5b 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -5,6 +5,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := verity_key
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_SRC_FILES := $(LOCAL_MODULE)
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
@@ -24,6 +26,8 @@
ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
include $(CLEAR_VARS)
LOCAL_MODULE := verity_key_ramdisk
+ LOCAL_LICENSE_KINDS := legacy_restricted
+ LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_SRC_FILES := verity_key
LOCAL_MODULE_STEM := verity_key
@@ -37,6 +41,8 @@
ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
include $(CLEAR_VARS)
LOCAL_MODULE := adb_keys
+ LOCAL_LICENSE_KINDS := legacy_restricted
+ LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
@@ -51,13 +57,15 @@
include $(CLEAR_VARS)
LOCAL_MODULE := otacerts
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_STEM := otacerts.zip
LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/security
include $(BUILD_SYSTEM)/base_rules.mk
$(LOCAL_BUILT_MODULE): PRIVATE_CERT := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
$(LOCAL_BUILT_MODULE): $(SOONG_ZIP) $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem
- $(SOONG_ZIP) -o $@ -j -f $(PRIVATE_CERT)
+ $(SOONG_ZIP) -o $@ -j -symlinks=false -f $(PRIVATE_CERT)
#######################################
@@ -65,6 +73,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := otacerts.recovery
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_MODULE_STEM := otacerts.zip
LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
@@ -78,5 +88,5 @@
$(SOONG_ZIP) \
$(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem \
$(extra_recovery_keys)
- $(SOONG_ZIP) -o $@ -j \
+ $(SOONG_ZIP) -o $@ -j -symlinks=false \
$(foreach key_file, $(PRIVATE_CERT) $(PRIVATE_EXTRA_RECOVERY_KEYS), -f $(key_file))
diff --git a/target/product/sysconfig/Android.bp b/target/product/sysconfig/Android.bp
index 5632d17..29122e4 100644
--- a/target/product/sysconfig/Android.bp
+++ b/target/product/sysconfig/Android.bp
@@ -12,6 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
prebuilt_etc {
name: "preinstalled-packages-platform-aosp-product.xml",
product_specific: true,
@@ -30,4 +34,4 @@
product_specific: true,
sub_dir: "sysconfig",
src: "preinstalled-packages-platform-handheld-product.xml",
-}
\ No newline at end of file
+}
diff --git a/tests/device.rbc b/tests/device.rbc
new file mode 100644
index 0000000..5d4e70c
--- /dev/null
+++ b/tests/device.rbc
@@ -0,0 +1,42 @@
+
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Top-level test configuration.
+# Converted from the following makefile
+### PRODUCT_PACKAGES += dev
+### PRODUCT_HOST_PACKAGES += host
+### $(call inherit-product, $(LOCAL_PATH)/part1.mk)
+### PRODUCT_COPY_FILES += device_from:device_to
+### include $(LOCAL_PATH)/include1.mk
+### PRODUCT_PACKAGES += dev_after
+### PRODUCT_COPY_FILES += $(call find-copy-subdir-files,audio_platform_info*.xml,device/google/redfin/audio,$(TARGET_COPY_OUT_VENDOR)/etc) xyz
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":part1.rbc", _part1_init = "init")
+load(":include1.rbc", _include1_init = "init")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+ rblf.setdefault(handle, "PRODUCT_PACKAGES")
+ cfg["PRODUCT_PACKAGES"] += ["dev"]
+ rblf.setdefault(handle, "PRODUCT_HOST_PACKAGES")
+ cfg["PRODUCT_HOST_PACKAGES"] += ["host"]
+ rblf.inherit(handle, "test/part1", _part1_init)
+ rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+ cfg["PRODUCT_COPY_FILES"] += ["device_from:device_to"]
+ _include1_init(g, handle)
+ cfg["PRODUCT_PACKAGES"] += ["dev_after"]
+ cfg["PRODUCT_COPY_FILES"] += (rblf.find_and_copy("audio_platform_info*.xml", "device/google/redfin/audio", "||VENDOR-PATH-PH||/etc") +
+ ["xyz"])
diff --git a/tests/include1.rbc b/tests/include1.rbc
new file mode 100644
index 0000000..c0c9b3b
--- /dev/null
+++ b/tests/include1.rbc
@@ -0,0 +1,25 @@
+
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Included file (not inherited)
+# Converted from makefile
+### PRODUCT_PACKAGES += inc
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+ rblf.setdefault(handle, "PRODUCT_PACKAGES")
+ cfg["PRODUCT_PACKAGES"] += ["inc"]
diff --git a/tests/part1.rbc b/tests/part1.rbc
new file mode 100644
index 0000000..3e50751
--- /dev/null
+++ b/tests/part1.rbc
@@ -0,0 +1,28 @@
+
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Part configuration
+# Converted from
+### PRODUCT_COPY_FILES += part_from:part_to
+### PRODUCT_PRODUCT_PROPERTIES += part_properties
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+ cfg = rblf.cfg(handle)
+ rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+ cfg["PRODUCT_COPY_FILES"] += ["part_from:part_to"]
+ rblf.setdefault(handle, "PRODUCT_PRODUCT_PROPERTIES")
+ cfg["PRODUCT_PRODUCT_PROPERTIES"] += ["part_properties"]
diff --git a/tests/run.rbc b/tests/run.rbc
new file mode 100644
index 0000000..b13f835
--- /dev/null
+++ b/tests/run.rbc
@@ -0,0 +1,50 @@
+
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Run test configuration and verify its result.
+# The main configuration file is device.rbc.
+# It inherits part1.rbc and also includes include1.rbc
+# TODO(asmundak): more tests are needed to verify that:
+# * multi-level inheritance works as expected
+# * all runtime functions (wildcard, regex, etc.) work
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":device.rbc", "init")
+
+def assert_eq(expected, actual):
+ if expected != actual:
+ fail("Expected %s, got %s" % (expected, actual))
+
+
+globals, config = rblf.product_configuration("test/device", init)
+assert_eq(
+ {
+ "PRODUCT_COPY_FILES": [
+ "part_from:part_to",
+ "device_from:device_to",
+ "device/google/redfin/audio/audio_platform_info_noextcodec_snd.xml:||VENDOR-PATH-PH||/etc/audio_platform_info_noextcodec_snd.xml",
+ "xyz"
+ ],
+ "PRODUCT_HOST_PACKAGES": ["host"],
+ "PRODUCT_PACKAGES": [
+ "dev",
+ "inc",
+ "dev_after"
+ ],
+ "PRODUCT_PRODUCT_PROPERTIES": ["part_properties"]
+ },
+ { k:v for k, v in sorted(config.items()) }
+)
diff --git a/tools/Android.bp b/tools/Android.bp
index e0f3739..269e610 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -12,6 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ // SPDX-license-identifier-BSD
+ // SPDX-license-identifier-CC-BY
+ // SPDX-license-identifier-GPL
+ // SPDX-license-identifier-MIT
+ default_applicable_licenses: ["build_make_license"],
+}
+
python_binary_host {
name: "generate-self-extracting-archive",
srcs: ["generate-self-extracting-archive.py"],
@@ -62,10 +75,10 @@
srcs: ["extract_kernel.py"],
version: {
py2: {
- enabled: true,
+ enabled: false,
},
py3: {
- enabled: false,
+ enabled: true,
},
},
}
diff --git a/tools/acp/Android.bp b/tools/acp/Android.bp
index 64f5a10..78738b0 100644
--- a/tools/acp/Android.bp
+++ b/tools/acp/Android.bp
@@ -2,6 +2,15 @@
//
// Custom version of cp.
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
cc_binary_host {
srcs: ["acp.c"],
diff --git a/tools/apicheck/Android.bp b/tools/apicheck/Android.bp
index 8fe20e9..f58042f 100644
--- a/tools/apicheck/Android.bp
+++ b/tools/apicheck/Android.bp
@@ -12,6 +12,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
java_binary_host {
name: "apicheck",
wrapper: "etc/apicheck",
diff --git a/tools/atree/Android.bp b/tools/atree/Android.bp
index 5fbe042..7906d8b 100644
--- a/tools/atree/Android.bp
+++ b/tools/atree/Android.bp
@@ -2,6 +2,15 @@
//
// Copies files into the directory structure described by a manifest
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
cc_binary_host {
name: "atree",
srcs: [
diff --git a/tools/compare_builds.py b/tools/compare_builds.py
new file mode 100755
index 0000000..838a628
--- /dev/null
+++ b/tools/compare_builds.py
@@ -0,0 +1,661 @@
+#!/usr/bin/env -S python3 -u
+
+"""
+This script helps find various build behaviors that make builds less hermetic
+and repeatable. Depending on the flags, it runs a sequence of builds and looks
+for files that have changed or have been improperly regenerated, updating
+their timestamps incorrectly. It also looks for changes that the build has
+done to the source tree, and for files whose contents are dependent on the
+location of the out directory.
+
+This utility has two major modes, full and incremental. By default, this tool
+runs in full mode. To run in incremental mode, pass the --incremental flag.
+
+
+FULL MODE
+
+In full mode, this tool helps verify BUILD CORRECTNESS by examining its
+REPEATABILITY. In full mode, this tool runs two complete builds in different
+directories and compares the CONTENTS of the two directories. Lists of any
+files that are added, removed or changed are printed, sorted by the timestamp
+of that file, to aid finding which dependencies trigger the rebuilding of
+other files.
+
+
+INCREMENTAL MODE
+
+In incremental mode, this tool helps verfiy the SPEED of the build. It runs two
+builds and looks at the TIMESTAMPS of the generated files, and reports files
+that were changed by the second build. In theory, an incremental build with no
+source files touched should not have any generated targets changed. As in full
+builds, the file list is returned sorted by timestamp.
+
+
+OTHER CHECKS
+
+In both full and incremental mode, this tool looks at the timestamps of all
+source files in the tree, and reports on files that have been touched. In the
+output, these are labeled with the header "Source files touched after start of
+build."
+
+In addition, by default, this tool sets the OUT_DIR environment variable to
+something other than "out" in order to find build rules that are not respecting
+the OUT_DIR. If you see these, you should fix them, but if your build can not
+complete for some reason because of this, you can pass the --no-check-out-dir
+flag to suppress this check.
+
+
+OTHER FLAGS
+
+In full mode, the --detect-embedded-paths flag does the two builds in different
+directories, to help in finding rules that embed the out directory path into
+the targets.
+
+The --hide-build-output flag hides the output of successful bulds, to make
+script output cleaner. The output of builds that fail is still shown.
+
+The --no-build flag is useful if you have already done a build and would
+just like to re-run the analysis.
+
+The --target flag lets you specify a build target other than the default
+full build (droid). You can pass "nothing" as in the example below, or a
+specific target, to reduce the scope of the checks performed.
+
+The --touch flag lets you specify a list of source files to touch between
+the builds, to examine the consequences of editing a particular file.
+
+
+EXAMPLE COMMANDLINES
+
+Please run build/make/tools/compare_builds.py --help for a full listing
+of the commandline flags. Here are a sampling of useful combinations.
+
+ 1. Find files changed during an incremental build that doesn't build
+ any targets.
+
+ build/make/tools/compare_builds.py --incremental --target nothing
+
+ Long incremental build times, or consecutive builds that re-run build actions
+ are usually caused by files being touched as part of loading the makefiles.
+
+ The nothing build (m nothing) loads the make and blueprint files, generates
+ the dependency graph, but then doesn't actually build any targets. Checking
+ against this build is the fastest and easiest way to find files that are
+ modified while makefiles are read, for example with $(shell) invocations.
+
+ 2. Find packaging targets that are different, ignoring intermediate files.
+
+ build/make/tools/compare_builds.py --subdirs --detect-embedded-paths
+
+ These flags will compare the final staging directories for partitions,
+ as well as the APKs, apexes, testcases, and the like (the full directory
+ list is in the DEFAULT_DIRS variable below). Since these are the files
+ that are ultimately released, it is more important that these files be
+ replicable, even if the intermediates that went into them are not (for
+ example, when debugging symbols are stripped).
+
+ 3. Check that all targets are repeatable.
+
+ build/make/tools/compare_builds.py --detect-embedded-paths
+
+ This check will list all of the differences in built targets that it can
+ find. Be aware that the AOSP tree still has quite a few targets that
+ are flagged by this check, so OEM changes might be lost in that list.
+ That said, each file shown here is a potential blocker for a repeatable
+ build.
+
+ 4. See what targets are rebuilt when a file is touched between builds.
+
+ build/make/tools/compare_builds.py --incremental \
+ --touch frameworks/base/core/java/android/app/Activity.java
+
+ This check simulates the common engineer workflow of touching a single
+ file and rebuilding the whole system. To see a restricted view, consider
+ also passing a --target option for a common use case. For example:
+
+ build/make/tools/compare_builds.py --incremental --target framework \
+ --touch frameworks/base/core/java/android/app/Activity.java
+"""
+
+import argparse
+import itertools
+import os
+import shutil
+import stat
+import subprocess
+import sys
+
+
+# Soong
+SOONG_UI = "build/soong/soong_ui.bash"
+
+
+# Which directories to use if no --subdirs is supplied without explicit directories.
+DEFAULT_DIRS = (
+ "apex",
+ "data",
+ "product",
+ "ramdisk",
+ "recovery",
+ "root",
+ "system",
+ "system_ext",
+ "system_other",
+ "testcases",
+ "vendor",
+)
+
+
+# Files to skip for incremental timestamp checking
+BUILD_INTERNALS_PREFIX_SKIP = (
+ "soong/.glob/",
+ ".path/",
+)
+
+
+BUILD_INTERNALS_SUFFIX_SKIP = (
+ "/soong/soong_build_metrics.pb",
+ "/.installable_test_files",
+ "/files.db",
+ "/.blueprint.bootstrap",
+ "/build_number.txt",
+ "/build.ninja",
+ "/.out-dir",
+ "/build_fingerprint.txt",
+ "/build_thumbprint.txt",
+ "/.copied_headers_list",
+ "/.installable_files",
+)
+
+
+class DiffType(object):
+ def __init__(self, code, message):
+ self.code = code
+ self.message = message
+
+DIFF_NONE = DiffType("DIFF_NONE", "Files are the same")
+DIFF_MODE = DiffType("DIFF_MODE", "Stat mode bits differ")
+DIFF_SIZE = DiffType("DIFF_SIZE", "File size differs")
+DIFF_SYMLINK = DiffType("DIFF_SYMLINK", "Symlinks point to different locations")
+DIFF_CONTENTS = DiffType("DIFF_CONTENTS", "File contents differ")
+
+
+def main():
+ argparser = argparse.ArgumentParser(description="Diff build outputs from two builds.",
+ epilog="Run this command from the root of the tree."
+ + " Before running this command, the build environment"
+ + " must be set up, including sourcing build/envsetup.sh"
+ + " and running lunch.")
+ argparser.add_argument("--detect-embedded-paths", action="store_true",
+ help="Use unique out dirs to detect paths embedded in binaries.")
+ argparser.add_argument("--incremental", action="store_true",
+ help="Compare which files are touched in two consecutive builds without a clean in between.")
+ argparser.add_argument("--hide-build-output", action="store_true",
+ help="Don't print the build output for successful builds")
+ argparser.add_argument("--no-build", dest="run_build", action="store_false",
+ help="Don't build or clean, but do everything else.")
+ argparser.add_argument("--no-check-out-dir", dest="check_out_dir", action="store_false",
+ help="Don't check for rules not honoring movable out directories.")
+ argparser.add_argument("--subdirs", nargs="*",
+ help="Only scan these subdirs of $PRODUCT_OUT instead of the whole out directory."
+ + " The --subdirs argument with no listed directories will give a default list.")
+ argparser.add_argument("--target", default="droid",
+ help="Make target to run. The default is droid")
+ argparser.add_argument("--touch", nargs="+", default=[],
+ help="Files to touch between builds. Must pair with --incremental.")
+ args = argparser.parse_args(sys.argv[1:])
+
+ if args.detect_embedded_paths and args.incremental:
+ sys.stderr.write("Can't pass --detect-embedded-paths and --incremental together.\n")
+ sys.exit(1)
+ if args.detect_embedded_paths and not args.check_out_dir:
+ sys.stderr.write("Can't pass --detect-embedded-paths and --no-check-out-dir together.\n")
+ sys.exit(1)
+ if args.touch and not args.incremental:
+ sys.stderr.write("The --incremental flag is required if the --touch flag is passed.")
+ sys.exit(1)
+
+ AssertAtTop()
+ RequireEnvVar("TARGET_PRODUCT")
+ RequireEnvVar("TARGET_BUILD_VARIANT")
+
+ # Out dir file names:
+ # - dir_prefix - The directory we'll put everything in (except for maybe the top level
+ # out/ dir).
+ # - *work_dir - The directory that we will build directly into. This is in dir_prefix
+ # unless --no-check-out-dir is set.
+ # - *out_dir - After building, if work_dir is different from out_dir, we move the out
+ # directory to here so we can do the comparisions.
+ # - timestamp_* - Files we touch so we know the various phases between the builds, so we
+ # can compare timestamps of files.
+ if args.incremental:
+ dir_prefix = "out_incremental"
+ if args.check_out_dir:
+ first_work_dir = first_out_dir = dir_prefix + "/out"
+ second_work_dir = second_out_dir = dir_prefix + "/out"
+ else:
+ first_work_dir = first_out_dir = "out"
+ second_work_dir = second_out_dir = "out"
+ else:
+ dir_prefix = "out_full"
+ first_out_dir = dir_prefix + "/out_1"
+ second_out_dir = dir_prefix + "/out_2"
+ if not args.check_out_dir:
+ first_work_dir = second_work_dir = "out"
+ elif args.detect_embedded_paths:
+ first_work_dir = first_out_dir
+ second_work_dir = second_out_dir
+ else:
+ first_work_dir = dir_prefix + "/work"
+ second_work_dir = dir_prefix + "/work"
+ timestamp_start = dir_prefix + "/timestamp_start"
+ timestamp_between = dir_prefix + "/timestamp_between"
+ timestamp_end = dir_prefix + "/timestamp_end"
+
+ if args.run_build:
+ # Initial clean, if necessary
+ print("Cleaning " + dir_prefix + "/")
+ Clean(dir_prefix)
+ print("Cleaning out/")
+ Clean("out")
+ CreateEmptyFile(timestamp_start)
+ print("Running the first build in " + first_work_dir)
+ RunBuild(first_work_dir, first_out_dir, args.target, args.hide_build_output)
+ for f in args.touch:
+ print("Touching " + f)
+ TouchFile(f)
+ CreateEmptyFile(timestamp_between)
+ print("Running the second build in " + second_work_dir)
+ RunBuild(second_work_dir, second_out_dir, args.target, args.hide_build_output)
+ CreateEmptyFile(timestamp_end)
+ print("Done building")
+ print()
+
+ # Which out directories to scan
+ if args.subdirs is not None:
+ if args.subdirs:
+ subdirs = args.subdirs
+ else:
+ subdirs = DEFAULT_DIRS
+ first_files = ProductFiles(RequireBuildVar(first_out_dir, "PRODUCT_OUT"), subdirs)
+ second_files = ProductFiles(RequireBuildVar(second_out_dir, "PRODUCT_OUT"), subdirs)
+ else:
+ first_files = OutFiles(first_out_dir)
+ second_files = OutFiles(second_out_dir)
+
+ printer = Printer()
+
+ if args.incremental:
+ # Find files that were rebuilt unnecessarily
+ touched_incrementally = FindOutFilesTouchedAfter(first_files,
+ GetFileTimestamp(timestamp_between))
+ printer.PrintList("Touched in incremental build", touched_incrementally)
+ else:
+ # Compare the two out dirs
+ added, removed, changed = DiffFileList(first_files, second_files)
+ printer.PrintList("Added", added)
+ printer.PrintList("Removed", removed)
+ printer.PrintList("Changed", changed, "%s %s")
+
+ # Find files in the source tree that were touched
+ touched_during = FindSourceFilesTouchedAfter(GetFileTimestamp(timestamp_start))
+ printer.PrintList("Source files touched after start of build", touched_during)
+
+ # Find files and dirs that were output to "out" and didn't respect $OUT_DIR
+ if args.check_out_dir:
+ bad_out_dir_contents = FindFilesAndDirectories("out")
+ printer.PrintList("Files and directories created by rules that didn't respect $OUT_DIR",
+ bad_out_dir_contents)
+
+ # If we didn't find anything, print success message
+ if not printer.printed_anything:
+ print("No bad behaviors found.")
+
+
+def AssertAtTop():
+ """If the current directory is not the top of an android source tree, print an error
+ message and exit."""
+ if not os.access(SOONG_UI, os.X_OK):
+ sys.stderr.write("FAILED: Please run from the root of the tree.\n")
+ sys.exit(1)
+
+
+def RequireEnvVar(name):
+ """Gets an environment variable. If that fails, then print an error message and exit."""
+ result = os.environ.get(name)
+ if not result:
+ sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+ sys.exit(1)
+ return result
+
+
+def RunSoong(out_dir, args, capture_output):
+ env = dict(os.environ)
+ env["OUT_DIR"] = out_dir
+ args = [SOONG_UI,] + args
+ if capture_output:
+ proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ combined_output, none = proc.communicate()
+ return proc.returncode, combined_output
+ else:
+ result = subprocess.run(args, env=env)
+ return result.returncode, None
+
+
+def GetBuildVar(out_dir, name):
+ """Gets a variable from the build system."""
+ returncode, output = RunSoong(out_dir, ["--dumpvar-mode", name], True)
+ if returncode != 0:
+ return None
+ else:
+ return output.decode("utf-8").strip()
+
+
+def RequireBuildVar(out_dir, name):
+ """Gets a variable from the builds system. If that fails, then print an error
+ message and exit."""
+ value = GetBuildVar(out_dir, name)
+ if not value:
+ sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+ sys.exit(1)
+ return value
+
+
+def Clean(directory):
+ """"Deletes the supplied directory."""
+ try:
+ shutil.rmtree(directory)
+ except FileNotFoundError:
+ pass
+
+
+def RunBuild(work_dir, out_dir, target, hide_build_output):
+ """Runs a build. If the build fails, prints a message and exits."""
+ returncode, output = RunSoong(work_dir,
+ ["--build-mode", "--all-modules", "--dir=" + os.getcwd(), target],
+ hide_build_output)
+ if work_dir != out_dir:
+ os.replace(work_dir, out_dir)
+ if returncode != 0:
+ if hide_build_output:
+ # The build output was hidden, so print it now for debugging
+ sys.stderr.buffer.write(output)
+ sys.stderr.write("FAILED: Build failed. Stopping.\n")
+ sys.exit(1)
+
+
+def DiffFileList(first_files, second_files):
+ """Examines the files.
+
+ Returns:
+ Filenames of files in first_filelist but not second_filelist (added files)
+ Filenames of files in second_filelist but not first_filelist (removed files)
+ 2-Tuple of filenames for the files that are in both but are different (changed files)
+ """
+ # List of files, relative to their respective PRODUCT_OUT directories
+ first_filelist = sorted([x for x in first_files], key=lambda x: x[1])
+ second_filelist = sorted([x for x in second_files], key=lambda x: x[1])
+
+ added = []
+ removed = []
+ changed = []
+
+ first_index = 0
+ second_index = 0
+
+ while first_index < len(first_filelist) and second_index < len(second_filelist):
+ # Path relative to source root and path relative to PRODUCT_OUT
+ first_full_filename, first_relative_filename = first_filelist[first_index]
+ second_full_filename, second_relative_filename = second_filelist[second_index]
+
+ if first_relative_filename < second_relative_filename:
+ # Removed
+ removed.append(first_full_filename)
+ first_index += 1
+ elif first_relative_filename > second_relative_filename:
+ # Added
+ added.append(second_full_filename)
+ second_index += 1
+ else:
+ # Both present
+ diff_type = DiffFiles(first_full_filename, second_full_filename)
+ if diff_type != DIFF_NONE:
+ changed.append((first_full_filename, second_full_filename))
+ first_index += 1
+ second_index += 1
+
+ while first_index < len(first_filelist):
+ first_full_filename, first_relative_filename = first_filelist[first_index]
+ removed.append(first_full_filename)
+ first_index += 1
+
+ while second_index < len(second_filelist):
+ second_full_filename, second_relative_filename = second_filelist[second_index]
+ added.append(second_full_filename)
+ second_index += 1
+
+ return (SortByTimestamp(added),
+ SortByTimestamp(removed),
+ SortByTimestamp(changed, key=lambda item: item[1]))
+
+
+def FindOutFilesTouchedAfter(files, timestamp):
+ """Find files in the given file iterator that were touched after timestamp."""
+ result = []
+ for full, relative in files:
+ ts = GetFileTimestamp(full)
+ if ts > timestamp:
+ result.append(TouchedFile(full, ts))
+ return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def GetFileTimestamp(filename):
+ """Get timestamp for a file (just wraps stat)."""
+ st = os.stat(filename, follow_symlinks=False)
+ return st.st_mtime
+
+
+def SortByTimestamp(items, key=lambda item: item):
+ """Sort the list by timestamp of files.
+ Args:
+ items - the list of items to sort
+ key - a function to extract a filename from each element in items
+ """
+ return [x[0] for x in sorted([(item, GetFileTimestamp(key(item))) for item in items],
+ key=lambda y: y[1])]
+
+
+def FindSourceFilesTouchedAfter(timestamp):
+ """Find files in the source tree that have changed after timestamp. Ignores
+ the out directory."""
+ result = []
+ for root, dirs, files in os.walk(".", followlinks=False):
+ if root == ".":
+ RemoveItemsFromList(dirs, (".repo", "out", "out_full", "out_incremental"))
+ for f in files:
+ full = os.path.sep.join((root, f))[2:]
+ ts = GetFileTimestamp(full)
+ if ts > timestamp:
+ result.append(TouchedFile(full, ts))
+ return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def FindFilesAndDirectories(directory):
+ """Finds all files and directories inside a directory."""
+ result = []
+ for root, dirs, files in os.walk(directory, followlinks=False):
+ result += [os.path.sep.join((root, x, "")) for x in dirs]
+ result += [os.path.sep.join((root, x)) for x in files]
+ return result
+
+
+def CreateEmptyFile(filename):
+ """Create an empty file with now as the timestamp at filename."""
+ try:
+ os.makedirs(os.path.dirname(filename))
+ except FileExistsError:
+ pass
+ open(filename, "w").close()
+ os.utime(filename)
+
+
+def TouchFile(filename):
+ os.utime(filename)
+
+
+def DiffFiles(first_filename, second_filename):
+ def AreFileContentsSame(remaining, first_filename, second_filename):
+ """Compare the file contents. They must be known to be the same size."""
+ CHUNK_SIZE = 32*1024
+ with open(first_filename, "rb") as first_file:
+ with open(second_filename, "rb") as second_file:
+ while remaining > 0:
+ size = min(CHUNK_SIZE, remaining)
+ if first_file.read(CHUNK_SIZE) != second_file.read(CHUNK_SIZE):
+ return False
+ remaining -= size
+ return True
+
+ first_stat = os.stat(first_filename, follow_symlinks=False)
+ second_stat = os.stat(first_filename, follow_symlinks=False)
+
+ # Mode bits
+ if first_stat.st_mode != second_stat.st_mode:
+ return DIFF_MODE
+
+ # File size
+ if first_stat.st_size != second_stat.st_size:
+ return DIFF_SIZE
+
+ # Contents
+ if stat.S_ISLNK(first_stat.st_mode):
+ if os.readlink(first_filename) != os.readlink(second_filename):
+ return DIFF_SYMLINK
+ elif stat.S_ISREG(first_stat.st_mode):
+ if not AreFileContentsSame(first_stat.st_size, first_filename, second_filename):
+ return DIFF_CONTENTS
+
+ return DIFF_NONE
+
+
+class FileIterator(object):
+ """Object that produces an iterator containing all files in a given directory.
+
+ Each iteration yields a tuple containing:
+
+ [0] (full) Path to file relative to source tree.
+ [1] (relative) Path to the file relative to the base directory given in the
+ constructor.
+ """
+
+ def __init__(self, base_dir):
+ self._base_dir = base_dir
+
+ def __iter__(self):
+ return self._Iterator(self, self._base_dir)
+
+ def ShouldIncludeFile(self, root, path):
+ return False
+
+ class _Iterator(object):
+ def __init__(self, parent, base_dir):
+ self._parent = parent
+ self._base_dir = base_dir
+ self._walker = os.walk(base_dir, followlinks=False)
+ self._current_index = 0
+ self._current_dir = []
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ # os.walk's iterator will eventually terminate by raising StopIteration
+ while True:
+ if self._current_index >= len(self._current_dir):
+ root, dirs, files = self._walker.__next__()
+ full_paths = [os.path.sep.join((root, f)) for f in files]
+ pairs = [(f, f[len(self._base_dir)+1:]) for f in full_paths]
+ self._current_dir = [(full, relative) for full, relative in pairs
+ if self._parent.ShouldIncludeFile(root, relative)]
+ self._current_index = 0
+ if not self._current_dir:
+ continue
+ index = self._current_index
+ self._current_index += 1
+ return self._current_dir[index]
+
+
+class OutFiles(FileIterator):
+ """Object that produces an iterator containing all files in a given out directory,
+ except for files which are known to be touched as part of build setup.
+ """
+ def __init__(self, out_dir):
+ super().__init__(out_dir)
+ self._out_dir = out_dir
+
+ def ShouldIncludeFile(self, root, relative):
+ # Skip files in root, although note that this could actually skip
+ # files that are sadly generated directly into that directory.
+ if root == self._out_dir:
+ return False
+ # Skiplist
+ for skip in BUILD_INTERNALS_PREFIX_SKIP:
+ if relative.startswith(skip):
+ return False
+ for skip in BUILD_INTERNALS_SUFFIX_SKIP:
+ if relative.endswith(skip):
+ return False
+ return True
+
+
+class ProductFiles(FileIterator):
+ """Object that produces an iterator containing files in listed subdirectories of $PRODUCT_OUT.
+ """
+ def __init__(self, product_out, subdirs):
+ super().__init__(product_out)
+ self._subdirs = subdirs
+
+ def ShouldIncludeFile(self, root, relative):
+ for subdir in self._subdirs:
+ if relative.startswith(subdir):
+ return True
+ return False
+
+
+class TouchedFile(object):
+ """A file in the out directory with a timestamp."""
+ def __init__(self, filename, timestamp):
+ self.filename = filename
+ self.timestamp = timestamp
+
+
+def RemoveItemsFromList(haystack, needles):
+ for needle in needles:
+ try:
+ haystack.remove(needle)
+ except ValueError:
+ pass
+
+
+class Printer(object):
+ def __init__(self):
+ self.printed_anything = False
+
+ def PrintList(self, title, items, fmt="%s"):
+ if items:
+ if self.printed_anything:
+ sys.stdout.write("\n")
+ sys.stdout.write("%s:\n" % title)
+ for item in items:
+ sys.stdout.write(" %s\n" % fmt % item)
+ self.printed_anything = True
+
+
+if __name__ == "__main__":
+ try:
+ main()
+ except KeyboardInterrupt:
+ pass
+
+
+# vim: ts=2 sw=2 sts=2 nocindent
diff --git a/tools/droiddoc/Android.bp b/tools/droiddoc/Android.bp
index 0428068..efd30c1 100644
--- a/tools/droiddoc/Android.bp
+++ b/tools/droiddoc/Android.bp
@@ -12,6 +12,19 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ // SPDX-license-identifier-BSD
+ // SPDX-license-identifier-CC-BY
+ // SPDX-license-identifier-GPL
+ // SPDX-license-identifier-MIT
+ default_applicable_licenses: ["build_make_license"],
+}
+
droiddoc_exported_dir {
name: "droiddoc-templates-pdk",
path: "templates-pdk",
diff --git a/tools/exercise_compare_builds b/tools/exercise_compare_builds
new file mode 100755
index 0000000..38e8405
--- /dev/null
+++ b/tools/exercise_compare_builds
@@ -0,0 +1,66 @@
+#!/bin/bash
+# Tests for compare_builds.py
+# usage (from root of source tree):
+# build/make/tools/exercise_compare_builds
+
+HIDE_BUILD_OUTPUT=--hide-build-output
+
+function run()
+{
+ echo
+ echo
+ echo ============================================================
+ echo $1
+ shift
+ echo ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+ echo ============================================================
+ time ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+}
+
+function run_tests()
+{
+ # These should error out
+
+ run "Incremental build, Separate work dirs (invalid flag combo, should error out)" \
+ --incremental --detect-embedded-paths
+ run "Use out/ as work dir, Separate work dirs (invalid flag combo, should error out)" \
+ --no-check-out-dir --detect-embedded-paths
+
+ # Each grouping starts with a build, and the following ones use --no-build to save time
+
+ run "REBUILD: Full builds, Same work dir, Whole out dir"
+ run "Full builds, Same work dir, Default subdirs" \
+ --no-build --subdirs
+ run "Full builds, Same work dir, Only $PRODUCT_OUT/system" \
+ --no-build --subdirs system
+
+ run "REBUILD: Full builds, Use out/ as work dir, Whole out dir" \
+ --no-check-out-dir
+ run "Full builds, Use out/ as work dir, Default subdirs" \
+ --no-build --no-check-out-dir --subdirs
+ run "Full builds, Use out/ as work dir, Only $PRODUCT_OUT/system" \
+ --no-build --no-check-out-dir --subdirs system
+
+ run "REBUILD: Full builds, Separate work dirs, Whole out dir" \
+ --detect-embedded-paths
+ run "Full builds, Separate work dirs, Default subdirs" \
+ --no-build --detect-embedded-paths --subdirs
+ run "Full builds, Separate work dirs, Only $PRODUCT_OUT/system" \
+ --no-build --detect-embedded-paths --subdirs system
+
+ run "REBUILD: Incremental build, Same work dir, Whole out dir" \
+ --incremental
+ run "Incremental build, Same work dir, Default subdirs" \
+ --no-build --incremental --subdirs
+ run "Incremental build, Same work dir, Only $PRODUCT_OUT/system" \
+ --no-build --incremental --subdirs system
+
+ run "REBUILD: Incremental build, Use out/ as work dir, Whole out dir" \
+ --incremental --no-check-out-dir
+ run "Incremental build, Use out/ as work dir, Default subdirs" \
+ --no-build --incremental --no-check-out-dir --subdirs
+ run "Incremental build, Use out/ as work dir, Only $PRODUCT_OUT/system" \
+ --no-build --incremental --no-check-out-dir --subdirs system
+}
+
+time run_tests 2>&1 | tee exercise_compare_builds.txt
diff --git a/tools/extract_kernel.py b/tools/extract_kernel.py
index 0046b38..44fbcdf 100755
--- a/tools/extract_kernel.py
+++ b/tools/extract_kernel.py
@@ -39,12 +39,12 @@
# "Linux version " UTS_RELEASE " (" LINUX_COMPILE_BY "@"
# LINUX_COMPILE_HOST ") (" LINUX_COMPILER ") " UTS_VERSION "\n";
LINUX_BANNER_PREFIX = b'Linux version '
-LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX + \
+LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX.decode() + \
r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \((?P<compiler>.*)\) .*\n'
def get_from_release(input_bytes, start_idx, key):
- null_idx = input_bytes.find('\x00', start_idx)
+ null_idx = input_bytes.find(b'\x00', start_idx)
if null_idx < 0:
return None
try:
@@ -69,7 +69,7 @@
value = get_from_release(input_bytes, idx, key)
if value:
- return value
+ return value.encode()
idx += len(LINUX_BANNER_PREFIX)
@@ -140,7 +140,7 @@
while True:
idx = input_bytes.find(search_bytes, idx)
if idx < 0:
- raise StopIteration()
+ return
yield try_decompress_bytes(cmd, input_bytes[idx:])
idx += 1
@@ -183,6 +183,11 @@
return False
return True
+def to_bytes_io(b):
+ """
+ Make b, which is either sys.stdout or sys.stdin, receive bytes as arguments.
+ """
+ return b.buffer if sys.version_info.major == 3 else b
def main():
parser = argparse.ArgumentParser(
@@ -194,35 +199,35 @@
help='Input kernel image. If not specified, use stdin',
metavar='FILE',
type=argparse.FileType('rb'),
- default=sys.stdin)
+ default=to_bytes_io(sys.stdin))
parser.add_argument('--output-configs',
help='If specified, write configs. Use stdout if no file '
'is specified.',
metavar='FILE',
nargs='?',
type=argparse.FileType('wb'),
- const=sys.stdout)
+ const=to_bytes_io(sys.stdout))
parser.add_argument('--output-version',
help='If specified, write version. Use stdout if no file '
'is specified.',
metavar='FILE',
nargs='?',
type=argparse.FileType('wb'),
- const=sys.stdout)
+ const=to_bytes_io(sys.stdout))
parser.add_argument('--output-release',
help='If specified, write kernel release. Use stdout if '
'no file is specified.',
metavar='FILE',
nargs='?',
type=argparse.FileType('wb'),
- const=sys.stdout)
+ const=to_bytes_io(sys.stdout))
parser.add_argument('--output-compiler',
help='If specified, write the compiler information. Use stdout if no file '
'is specified.',
metavar='FILE',
nargs='?',
type=argparse.FileType('wb'),
- const=sys.stdout)
+ const=to_bytes_io(sys.stdout))
parser.add_argument('--tools',
help='Decompression tools to use. If not specified, PATH '
'is searched.',
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 1dd5e4a..4544e07 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -12,6 +12,15 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
bootstrap_go_package {
name: "soong-fs_config",
pkgPath: "android/soong/fs_config",
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index c338462..10d25e0 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -42,6 +42,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := \
fs_config_dirs_system \
fs_config_dirs_system_ext \
@@ -55,6 +57,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := \
fs_config_files_system \
fs_config_files_system_ext \
@@ -69,6 +73,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_system_ext)
include $(BUILD_PHONY_PACKAGE)
@@ -79,6 +85,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_system_ext)
include $(BUILD_PHONY_PACKAGE)
@@ -89,6 +97,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_product)
include $(BUILD_PHONY_PACKAGE)
@@ -99,6 +109,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_product)
include $(BUILD_PHONY_PACKAGE)
@@ -109,6 +121,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs_nonsystem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_dirs_$(t))
include $(BUILD_PHONY_PACKAGE)
@@ -119,6 +133,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files_nonsystem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_files_$(t))
include $(BUILD_PHONY_PACKAGE)
@@ -129,6 +145,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_dirs_system
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
include $(BUILD_SYSTEM)/base_rules.mk
@@ -154,6 +172,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := fs_config_files_system
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
include $(BUILD_SYSTEM)/base_rules.mk
@@ -180,6 +200,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_vendor
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -204,6 +226,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_vendor
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -231,6 +255,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_oem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -255,6 +281,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_oem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -282,6 +310,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_odm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -306,6 +336,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_odm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -333,6 +365,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_vendor_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -357,6 +391,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_vendor_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -384,6 +420,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_odm_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -408,6 +446,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_odm_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -435,6 +475,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -459,6 +501,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -485,6 +529,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_dirs_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
@@ -509,6 +555,8 @@
include $(CLEAR_VARS)
LOCAL_MODULE := _fs_config_files_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
LOCAL_MODULE_CLASS := ETC
LOCAL_INSTALLED_MODULE_STEM := fs_config_files
LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
diff --git a/tools/fs_get_stats/Android.bp b/tools/fs_get_stats/Android.bp
index 67742b8..9457de4 100644
--- a/tools/fs_get_stats/Android.bp
+++ b/tools/fs_get_stats/Android.bp
@@ -1,3 +1,12 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
cc_binary_host {
name: "fs_get_stats",
srcs: ["fs_get_stats.c"],
diff --git a/tools/libhost/Android.bp b/tools/libhost/Android.bp
index 4c9100f..a83f2e7 100644
--- a/tools/libhost/Android.bp
+++ b/tools/libhost/Android.bp
@@ -1,3 +1,12 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "build_make_license"
+ // to get the below license kinds:
+ // legacy_restricted
+ default_applicable_licenses: ["build_make_license"],
+}
+
cc_library_host_static {
srcs: ["CopyFile.c"],
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index d8c9cb1..46bae29 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -42,7 +42,59 @@
# default to "adb". That might not the right policy there, but it's better
# to be explicit.
if not prop_list.get_value("persist.sys.usb.config"):
- prop_list.put("persist.sys.usb.config", "none");
+ prop_list.put("persist.sys.usb.config", "none")
+
+def validate_and_add_grf_props(prop_list, sdk_version):
+ """Validate GRF properties if exist.
+
+ If ro.board.first_api_level is defined, check if its value is valid for the
+ sdk version.
+ Also, validate the value of ro.board.api_level if defined. If the
+ ro.board.api_level property is not defined, define it with the required
+ vendor API level for the GRF policy.
+
+ Returns:
+ True if the GRF properties are valid.
+ """
+ grf_api_level = prop_list.get_value("ro.board.first_api_level")
+ board_api_level = prop_list.get_value("ro.board.api_level")
+
+ if not grf_api_level:
+ if board_api_level:
+ sys.stderr.write("error: non-GRF device must not define "
+ "ro.board.api_level\n")
+ return False
+ # non-GRF device skips the GRF validation test
+ return True
+
+ grf_api_level = int(grf_api_level)
+ if grf_api_level > sdk_version:
+ sys.stderr.write("error: ro.board.first_api_level(%d) must be less than "
+ "or equal to ro.build.version.sdk(%d)\n"
+ % (grf_api_level, sdk_version))
+ return False
+
+ grf_window = 4
+ grf_required_api_level = (grf_api_level
+ + grf_window * ((sdk_version - grf_api_level) // grf_window))
+
+ if board_api_level:
+ board_api_level = int(board_api_level)
+ if board_api_level < grf_api_level or board_api_level > sdk_version:
+ sys.stderr.write("error: ro.board.api_level(%d) must be neither less "
+ "than ro.board.first_api_level(%d) nor greater than "
+ "ro.build.version.sdk(%d)\n"
+ % (board_api_level, grf_api_level, sdk_version))
+ return False
+ if board_api_level < grf_required_api_level:
+ sys.stderr.write("error: ro.board.api_level(%d) must be greater than or "
+ "equal to %d based on GRF policy\n"
+ % (board_api_level, grf_required_api_level))
+ return False
+ else:
+ prop_list.put("ro.board.api_level", str(grf_required_api_level))
+
+ return True
def validate(prop_list):
"""Validate the properties.
@@ -215,6 +267,7 @@
default=False)
parser.add_argument("filename")
parser.add_argument("disallowed_keys", metavar="KEY", type=str, nargs="*")
+ parser.add_argument("--sdk-version", type=int, required=True)
args = parser.parse_args()
if not args.filename.endswith("/build.prop"):
@@ -225,6 +278,8 @@
mangle_build_prop(props)
if not override_optional_props(props, args.allow_dup):
sys.exit(1)
+ if not validate_and_add_grf_props(props, args.sdk_version):
+ sys.exit(1)
if not validate(props):
sys.exit(1)
diff --git a/tools/product_config/Android.bp b/tools/product_config/Android.bp
index 287ed5a..5fdbcf0 100644
--- a/tools/product_config/Android.bp
+++ b/tools/product_config/Android.bp
@@ -1,3 +1,7 @@
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
java_defaults {
name: "product-config-defaults",
srcs: ["src/**/*.java"],
@@ -18,6 +22,6 @@
static_libs: [
"junit"
],
+ manifest: "TEST_MANIFEST.MF",
test_suites: ["general-tests"]
}
-
diff --git a/tools/product_config/TEST_MANIFEST.MF b/tools/product_config/TEST_MANIFEST.MF
new file mode 100644
index 0000000..287a77f
--- /dev/null
+++ b/tools/product_config/TEST_MANIFEST.MF
@@ -0,0 +1,2 @@
+Manifest-Version: 1.0
+Main-Class: com.android.build.config.TestRunner
diff --git a/tools/product_config/inherit_tree.py b/tools/product_config/inherit_tree.py
new file mode 100755
index 0000000..ae8a275
--- /dev/null
+++ b/tools/product_config/inherit_tree.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+
+#
+# Run from the root of the tree, after product-config has been run to see
+# the product inheritance hierarchy for the current lunch target.
+#
+
+import csv
+import sys
+
+def PrintNodes(graph, node, prefix):
+ sys.stdout.write("%s%s" % (prefix, node))
+ children = graph.get(node, [])
+ if children:
+ sys.stdout.write(" {\n")
+ for child in sorted(graph.get(node, [])):
+ PrintNodes(graph, child, prefix + " ")
+ sys.stdout.write("%s}\n" % prefix);
+ else:
+ sys.stdout.write("\n")
+
+def main(argv):
+ if len(argv) != 2:
+ print("usage: inherit_tree.py out/$TARGET_PRODUCT-$TARGET_BUILD_VARIANT/dumpconfig.csv")
+ sys.exit(1)
+
+ root = None
+ graph = {}
+ with open(argv[1], newline='') as csvfile:
+ for line in csv.reader(csvfile):
+ if not root:
+ # Look for PRODUCTS
+ if len(line) < 3 or line[0] != "phase" or line[1] != "PRODUCTS":
+ continue
+ root = line[2]
+ else:
+ # Everything else
+ if len(line) < 3 or line[0] != "inherit":
+ continue
+ graph.setdefault(line[1], list()).append(line[2])
+
+ PrintNodes(graph, root, "")
+
+
+if __name__ == "__main__":
+ main(sys.argv)
+
+# vim: set expandtab ts=2 sw=2 sts=2:
+
diff --git a/tools/product_config/src/com/android/build/config/CommandException.java b/tools/product_config/src/com/android/build/config/CommandException.java
new file mode 100644
index 0000000..f1a2c39
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/CommandException.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Exception to indicate that a fatal error has occurred. Throwing this
+ * will cause errors to be printed, cleanup to occur, and the command to
+ * exit with a failure code.
+ *
+ * These are user errors. Throwing other exceptions will result in
+ * the stack trace being shown.
+ */
+public class CommandException extends RuntimeException {
+ public CommandException() {
+ super();
+ }
+
+ public CommandException(String message) {
+ super(message);
+ }
+
+ public CommandException(String message, Throwable chain) {
+ super(message, chain);
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/ConfigBase.java b/tools/product_config/src/com/android/build/config/ConfigBase.java
new file mode 100644
index 0000000..9a81011
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConfigBase.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Common parts between MakeConfig and the to-be-added GenericConfig, BazelConfig and SoongConfig.
+ */
+public class ConfigBase {
+ protected String mPhase;
+ protected List<String> mRootNodes;
+
+ /**
+ * State of the make varaible environment from before the first config file.
+ */
+ protected Map<String, Str> mInitialVariables = new HashMap();
+
+ /**
+ * State of the make varaible environment from after the first config file.
+ */
+ protected Map<String, Str> mFinalVariables = new HashMap();
+
+
+ /**
+ * The variables that are handled specially.
+ */
+ protected final TreeMap<String, VarType> mProductVars = new TreeMap();
+
+ public void setPhase(String phase) {
+ mPhase = phase;
+ }
+
+ public String getPhase() {
+ return mPhase;
+ }
+
+ public void setRootNodes(List<String> filenames) {
+ mRootNodes = new ArrayList(filenames);
+ }
+
+ public List<String> getRootNodes() {
+ return mRootNodes;
+ }
+
+ public void addProductVar(String name, VarType type) {
+ mProductVars.put(name, type);
+ }
+
+ public TreeMap<String, VarType> getProductVars() {
+ return mProductVars;
+ }
+
+ public VarType getVarType(String name) {
+ final VarType t = mProductVars.get(name);
+ if (t != null) {
+ return t;
+ } else {
+ return VarType.UNKNOWN;
+ }
+ }
+
+ public boolean isProductVar(String name) {
+ return mProductVars.get(name) != null;
+ }
+
+ /**
+ * Return the state the make variable environment from before the first config file.
+ */
+ public Map<String, Str> getInitialVariables() {
+ return mInitialVariables;
+ }
+
+ /**
+ * Return the state the make variable environment from before the first config file.
+ */
+ public Map<String, Str> getFinalVariables() {
+ return mFinalVariables;
+ }
+
+ /**
+ * Copy common base class fields from that to this.
+ */
+ public void copyFrom(ConfigBase that) {
+ setPhase(that.getPhase());
+ setRootNodes(that.getRootNodes());
+ for (Map.Entry<String, VarType> entry: that.getProductVars().entrySet()) {
+ addProductVar(entry.getKey(), entry.getValue());
+ }
+ mInitialVariables = new HashMap(that.getInitialVariables());
+ mFinalVariables = new HashMap(that.getFinalVariables());
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
new file mode 100644
index 0000000..39bd5df
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Converts a MakeConfig into a Generic config by applying heuristics about
+ * the types of variable assignments that we do.
+ */
+public class ConvertMakeToGenericConfig {
+ private final Errors mErrors;
+
+ public ConvertMakeToGenericConfig(Errors errors) {
+ mErrors = errors;
+ }
+
+ public GenericConfig convert(Map<String, MakeConfig> make) {
+ final GenericConfig result = new GenericConfig();
+
+ final MakeConfig products = make.get("PRODUCTS");
+ if (products == null) {
+ mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCTS phase in dumpconfig output.");
+ return null;
+ }
+
+ // Base class fields
+ result.copyFrom(products);
+
+ // Each file
+ for (MakeConfig.ConfigFile f: products.getConfigFiles()) {
+ final GenericConfig.ConfigFile genericFile
+ = new GenericConfig.ConfigFile(f.getFilename());
+ result.addConfigFile(genericFile);
+
+ final List<MakeConfig.Block> blocks = f.getBlocks();
+
+ // Some assertions:
+ // TODO: Include better context for these errors.
+ // There should always be at least a BEGIN and an AFTER, so assert this.
+ if (blocks.size() < 2) {
+ throw new RuntimeException("expected at least blocks.size() >= 2. Actcual size: "
+ + blocks.size());
+ }
+ if (blocks.get(0).getBlockType() != MakeConfig.BlockType.BEFORE) {
+ throw new RuntimeException("expected first block to be BEFORE");
+ }
+ if (blocks.get(blocks.size() - 1).getBlockType() != MakeConfig.BlockType.AFTER) {
+ throw new RuntimeException("expected first block to be AFTER");
+ }
+ // Everything in between should be an INHERIT block.
+ for (int index = 1; index < blocks.size() - 1; index++) {
+ if (blocks.get(index).getBlockType() != MakeConfig.BlockType.INHERIT) {
+ throw new RuntimeException("expected INHERIT at block " + index);
+ }
+ }
+
+ // Each block represents a snapshot of the interpreter variable state (minus a few big
+ // sets of variables which we don't export because they're used in the internals
+ // of node_fns.mk, so we know they're not necessary here). The first (BEFORE) one
+ // is everything that is set before the file is included, so it forms the base
+ // for everything else.
+ MakeConfig.Block prevBlock = blocks.get(0);
+
+ for (int index = 1; index < blocks.size(); index++) {
+ final MakeConfig.Block block = blocks.get(index);
+ for (final Map.Entry<String, Str> entry: block.getVars().entrySet()) {
+ final String varName = entry.getKey();
+ final GenericConfig.Assign assign = convertAssignment(block.getBlockType(),
+ block.getInheritedFile(), products.getVarType(varName), varName,
+ entry.getValue(), prevBlock.getVar(varName));
+ if (assign != null) {
+ genericFile.addStatement(assign);
+ }
+ }
+ // Handle variables that are in prevBlock but not block -- they were
+ // deleted. Is this even possible, or do they show up as ""? We will
+ // treat them as positive assigments to empty string
+ for (String prevName: prevBlock.getVars().keySet()) {
+ if (!block.getVars().containsKey(prevName)) {
+ genericFile.addStatement(
+ new GenericConfig.Assign(prevName, new Str("")));
+ }
+ }
+ if (block.getBlockType() == MakeConfig.BlockType.INHERIT) {
+ genericFile.addStatement(
+ new GenericConfig.Inherit(block.getInheritedFile()));
+ }
+ // For next iteration
+ prevBlock = block;
+ }
+ }
+
+ // Overwrite the final variables with the ones that come from the PRODUCTS-EXPAND phase.
+ // Drop the ones that were newly defined between the two phases, but leave values
+ // that were modified between. We do need to reproduce that logic in this tool.
+ final MakeConfig expand = make.get("PRODUCT-EXPAND");
+ if (expand == null) {
+ mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCT-EXPAND phase in dumpconfig"
+ + " output.");
+ return null;
+ }
+ final Map<String, Str> productsFinal = products.getFinalVariables();
+ final Map<String, Str> expandInitial = expand.getInitialVariables();
+ final Map<String, Str> expandFinal = expand.getFinalVariables();
+ final Map<String, Str> finalFinal = result.getFinalVariables();
+ finalFinal.clear();
+ for (Map.Entry<String, Str> var: expandFinal.entrySet()) {
+ final String varName = var.getKey();
+ if (expandInitial.containsKey(varName) && !productsFinal.containsKey(varName)) {
+ continue;
+ }
+ finalFinal.put(varName, var.getValue());
+ }
+
+ return result;
+ }
+
+ /**
+ * Converts one variable from a MakeConfig Block into a GenericConfig Assignment.
+ */
+ GenericConfig.Assign convertAssignment(MakeConfig.BlockType blockType, Str inheritedFile,
+ VarType varType, String varName, Str varVal, Str prevVal) {
+ if (prevVal == null) {
+ // New variable.
+ return new GenericConfig.Assign(varName, varVal);
+ } else if (!varVal.equals(prevVal)) {
+ // The value changed from the last block.
+ if (varVal.length() == 0) {
+ // It was set to empty
+ return new GenericConfig.Assign(varName, varVal);
+ } else {
+ // Product vars have the @inherit processing. Other vars we
+ // will just ignore and put in one section at the end, based
+ // on the difference between the BEFORE and AFTER blocks.
+ if (varType == VarType.UNKNOWN) {
+ if (blockType == MakeConfig.BlockType.AFTER) {
+ // For UNKNOWN variables, we don't worry about the
+ // intermediate steps, just take the final value.
+ return new GenericConfig.Assign(varName, varVal);
+ } else {
+ return null;
+ }
+ } else {
+ return convertInheritedVar(blockType, inheritedFile,
+ varName, varVal, prevVal);
+ }
+ }
+ } else {
+ // Variable not touched
+ return null;
+ }
+ }
+
+ /**
+ * Handle the special inherited values, where the inherit-product puts in the
+ * @inherit:... markers, adding Statements to the ConfigFile.
+ */
+ GenericConfig.Assign convertInheritedVar(MakeConfig.BlockType blockType, Str inheritedFile,
+ String varName, Str varVal, Str prevVal) {
+ String varText = varVal.toString();
+ String prevText = prevVal.toString().trim();
+ if (blockType == MakeConfig.BlockType.INHERIT) {
+ // inherit-product appends @inherit:... so drop that.
+ final String marker = "@inherit:" + inheritedFile;
+ if (varText.endsWith(marker)) {
+ varText = varText.substring(0, varText.length() - marker.length()).trim();
+ } else {
+ mErrors.ERROR_IMPROPER_PRODUCT_VAR_MARKER.add(varVal.getPosition(),
+ "Variable didn't end with marker \"" + marker + "\": " + varText);
+ }
+ }
+
+ if (!varText.equals(prevText)) {
+ // If the variable value was actually changed.
+ final ArrayList<String> words = split(varText, prevText);
+ if (words.size() == 0) {
+ // Pure Assignment, none of the previous value is present.
+ return new GenericConfig.Assign(varName, new Str(varVal.getPosition(), varText));
+ } else {
+ // Self referential value (prepend, append, both).
+ if (words.size() > 2) {
+ // This is indicative of a construction that might not be quite
+ // what we want. The above code will do something that works if it was
+ // of the form "VAR := a $(VAR) b $(VAR) c", but if the original code
+ // something else this won't work. This doesn't happen in AOSP, but
+ // it's a theoretically possibility, so someone might do it.
+ mErrors.WARNING_VARIABLE_RECURSION.add(varVal.getPosition(),
+ "Possible unsupported variable recursion: "
+ + varName + " = " + varVal + " (prev=" + prevVal + ")");
+ }
+ return new GenericConfig.Assign(varName, Str.toList(varVal.getPosition(), words));
+ }
+ } else {
+ // Variable not touched
+ return null;
+ }
+ }
+
+ /**
+ * Split 'haystack' on occurrences of 'needle'. Trims each string of whitespace
+ * to preserve make list semantics.
+ */
+ private static ArrayList<String> split(String haystack, String needle) {
+ final ArrayList<String> result = new ArrayList();
+ final int needleLen = needle.length();
+ if (needleLen == 0) {
+ return result;
+ }
+ int start = 0;
+ int end;
+ while ((end = haystack.indexOf(needle, start)) >= 0) {
+ result.add(haystack.substring(start, end).trim());
+ start = end + needleLen;
+ }
+ result.add(haystack.substring(start).trim());
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/CsvParser.java b/tools/product_config/src/com/android/build/config/CsvParser.java
new file mode 100644
index 0000000..1c8b9c3
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/CsvParser.java
@@ -0,0 +1,242 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A CSV parser.
+ */
+public class CsvParser {
+ /**
+ * Internal string buffer grows by this amount.
+ */
+ private static final int CHUNK_SIZE = 64 * 1024;
+
+ /**
+ * Error parsing.
+ */
+ public static class ParseException extends Exception {
+ private int mLine;
+ private int mColumn;
+
+ public ParseException(int line, int column, String message) {
+ super(message);
+ mLine = line;
+ mColumn = column;
+ }
+
+ /**
+ * Line number in source file.
+ */
+ public int getLine() {
+ return mLine;
+ }
+
+ /**
+ * Column in source file.
+ */
+ public int getColumn() {
+ return mColumn;
+ }
+ }
+
+ public static class Line {
+ private final int mLineNumber;
+ private final List<String> mFields;
+
+ Line(int lineno, List<String> fields) {
+ mLineNumber = lineno;
+ mFields = fields;
+ }
+
+ public int getLine() {
+ return mLineNumber;
+ }
+
+ public List<String> getFields() {
+ return mFields;
+ }
+ }
+
+ // Parser States
+ private static final int STATE_START_LINE = 0;
+ private static final int STATE_START_FIELD = 1;
+ private static final int STATE_INSIDE_QUOTED_FIELD = 2;
+ private static final int STATE_FIRST_QUOTATION_MARK = 3;
+ private static final int STATE_INSIDE_UNQUOTED_FIELD = 4;
+ private static final int STATE_DONE = 5;
+
+ // Parser Actions
+ private static final int ACTION_APPEND_CHAR = 1;
+ private static final int ACTION_FIELD_COMPLETE = 2;
+ private static final int ACTION_LINE_COMPLETE = 4;
+
+ /**
+ * Constructor.
+ */
+ private CsvParser() {
+ }
+
+ /**
+ * Reads CSV and returns a list of Line objects.
+ *
+ * Handles newlines inside fields quoted with double quotes (").
+ *
+ * Doesn't report blank lines, but does include empty fields.
+ */
+ public static List<Line> parse(Reader reader)
+ throws ParseException, IOException {
+ ArrayList<Line> result = new ArrayList();
+ int line = 1;
+ int column = 1;
+ int pos = 0;
+ char[] buf = new char[CHUNK_SIZE];
+ HashMap<String,String> stringPool = new HashMap();
+ ArrayList<String> fields = new ArrayList();
+
+ int state = STATE_START_LINE;
+ while (state != STATE_DONE) {
+ int c = reader.read();
+ int action = 0;
+
+ if (state == STATE_START_LINE) {
+ if (c <= 0) {
+ // No data, skip ACTION_LINE_COMPLETE.
+ state = STATE_DONE;
+ } else if (c == '"') {
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ // Consume the newline, state stays STATE_START_LINE.
+ } else {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_UNQUOTED_FIELD;
+ }
+ } else if (state == STATE_START_FIELD) {
+ if (c <= 0) {
+ // Field will be empty
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == '"') {
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_UNQUOTED_FIELD;
+ }
+ } else if (state == STATE_INSIDE_QUOTED_FIELD) {
+ if (c <= 0) {
+ throw new ParseException(line, column,
+ "Bad input: End of input inside quoted field.");
+ } else if (c == '"') {
+ state = STATE_FIRST_QUOTATION_MARK;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ }
+ } else if (state == STATE_FIRST_QUOTATION_MARK) {
+ if (c <= 0) {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == '"') {
+ action = ACTION_APPEND_CHAR;
+ state = STATE_INSIDE_QUOTED_FIELD;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ throw new ParseException(line, column,
+ "Bad input: Character after field ended or unquoted '\"'.");
+ }
+ } else if (state == STATE_INSIDE_UNQUOTED_FIELD) {
+ if (c <= 0) {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_DONE;
+ } else if (c == ',') {
+ action = ACTION_FIELD_COMPLETE;
+ state = STATE_START_FIELD;
+ } else if (c == '\n') {
+ action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+ state = STATE_START_LINE;
+ } else {
+ action = ACTION_APPEND_CHAR;
+ }
+ }
+
+ if ((action & ACTION_APPEND_CHAR) != 0) {
+ // Reallocate buffer if necessary. Hopefully not often because CHUNK_SIZE is big.
+ if (pos >= buf.length) {
+ char[] old = buf;
+ buf = new char[old.length + CHUNK_SIZE];
+ System.arraycopy(old, 0, buf, 0, old.length);
+ }
+ // Store the character
+ buf[pos] = (char)c;
+ pos++;
+ }
+ if ((action & ACTION_FIELD_COMPLETE) != 0) {
+ // A lot of the strings are duplicated, so pool them to reduce peak memory
+ // usage. This could be made slightly better by having a custom key class
+ // that does the lookup without making a new String that gets immediately
+ // thrown away.
+ String field = new String(buf, 0, pos);
+ final String cached = stringPool.get(field);
+ if (cached == null) {
+ stringPool.put(field, field);
+ } else {
+ field = cached;
+ }
+ fields.add(field);
+ pos = 0;
+ }
+ if ((action & ACTION_LINE_COMPLETE) != 0) {
+ // Only report lines with any contents
+ if (fields.size() > 0) {
+ result.add(new Line(line, fields));
+ fields = new ArrayList();
+ }
+ }
+
+ if (c == '\n') {
+ line++;
+ column = 1;
+ } else {
+ column++;
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/DumpConfigParser.java b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
new file mode 100644
index 0000000..c4cd963
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * Parses the output of ckati building build/make/core/dumpconfig.mk.
+ *
+ * The format is as follows:
+ * - All processed lines are colon (':') separated fields.
+ * - Lines before the dumpconfig_version line are dropped for forward compatibility
+ * - Lines where the first field is config_var describe variables declared in makefiles
+ * (implemented by the dump-config-vals macro)
+ * Field Description
+ * 0 "config_var" row type
+ * 1 Product makefile being processed
+ * 2 The variable name
+ * 3 The value of the variable
+ * 4 The location of the variable, as best tracked by kati
+ */
+public class DumpConfigParser {
+ private static final boolean DEBUG = false;
+
+ private final Errors mErrors;
+ private final String mFilename;
+ private final Reader mReader;
+
+ private final Map<String,MakeConfig> mResults = new HashMap();
+
+ private static final Pattern LIST_SEPARATOR = Pattern.compile("\\s+");
+
+ /**
+ * Constructor.
+ */
+ private DumpConfigParser(Errors errors, String filename, Reader reader) {
+ mErrors = errors;
+ mFilename = filename;
+ mReader = reader;
+ }
+
+ /**
+ * Parse the text into a map of the phase names to MakeConfig objects.
+ */
+ public static Map<String,MakeConfig> parse(Errors errors, String filename, Reader reader)
+ throws CsvParser.ParseException, IOException {
+ DumpConfigParser parser = new DumpConfigParser(errors, filename, reader);
+ parser.parseImpl();
+ return parser.mResults;
+ }
+
+ /**
+ * Parse the input.
+ */
+ private void parseImpl() throws CsvParser.ParseException, IOException {
+ final List<CsvParser.Line> lines = CsvParser.parse(mReader);
+ final int lineCount = lines.size();
+ int index = 0;
+
+ int dumpconfigVersion = 0;
+
+ // Ignore lines until until we get a dumpconfig_version line for forward compatibility.
+ // In a previous life, this loop parsed from all of kati's stdout, not just the file
+ // that dumpconfig.mk writes, but it's harmless to leave this loop in. It gives us a
+ // little bit of flexibility which we probably won't need anyway, this tool probably
+ // won't diverge from dumpconfig.mk anyway.
+ for (; index < lineCount; index++) {
+ final CsvParser.Line line = lines.get(index);
+ final List<String> fields = line.getFields();
+
+ if (matchLineType(line, "dumpconfig_version", 1)) {
+ try {
+ dumpconfigVersion = Integer.parseInt(fields.get(1));
+ } catch (NumberFormatException ex) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Couldn't parse dumpconfig_version: " + fields.get(1));
+ }
+ break;
+ }
+ }
+
+ // If we never saw dumpconfig_version, there's a problem with the command, so stop.
+ if (dumpconfigVersion == 0) {
+ mErrors.ERROR_DUMPCONFIG.fatal(
+ new Position(mFilename),
+ "Never saw a valid dumpconfig_version line.");
+ }
+
+ // Any lines before the start signal will be dropped. We create garbage objects
+ // here to avoid having to check for null everywhere.
+ MakeConfig makeConfig = new MakeConfig();
+ MakeConfig.ConfigFile configFile = new MakeConfig.ConfigFile("<ignored>");
+ MakeConfig.Block block = new MakeConfig.Block(MakeConfig.BlockType.UNSET);
+ Map<String, Str> initialVariables = new HashMap();
+ Map<String, Str> finalVariables = new HashMap();
+
+ // Number of "phases" we've seen so far.
+ for (; index < lineCount; index++) {
+ final CsvParser.Line line = lines.get(index);
+ final List<String> fields = line.getFields();
+ final String lineType = fields.get(0);
+
+ if (matchLineType(line, "phase", 2)) {
+ // Start the new one
+ makeConfig = new MakeConfig();
+ makeConfig.setPhase(fields.get(1));
+ makeConfig.setRootNodes(splitList(fields.get(2)));
+ // If there is a duplicate phase of the same name, continue parsing, but
+ // don't add it. Emit a warning.
+ if (!mResults.containsKey(makeConfig.getPhase())) {
+ mResults.put(makeConfig.getPhase(), makeConfig);
+ } else {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Duplicate phase: " + makeConfig.getPhase()
+ + ". This one will be dropped.");
+ }
+ initialVariables = makeConfig.getInitialVariables();
+ finalVariables = makeConfig.getFinalVariables();
+
+ if (DEBUG) {
+ System.out.println("PHASE:");
+ System.out.println(" " + makeConfig.getPhase());
+ System.out.println(" " + makeConfig.getRootNodes());
+ }
+ } else if (matchLineType(line, "var", 2)) {
+ final VarType type = "list".equals(fields.get(1)) ? VarType.LIST : VarType.SINGLE;
+ makeConfig.addProductVar(fields.get(2), type);
+
+ if (DEBUG) {
+ System.out.println(" VAR: " + type + " " + fields.get(2));
+ }
+ } else if (matchLineType(line, "import", 1)) {
+ final List<String> importStack = splitList(fields.get(1));
+ if (importStack.size() == 0) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "'import' line with empty include stack.");
+ continue;
+ }
+
+ // The beginning of importing a new file.
+ configFile = new MakeConfig.ConfigFile(importStack.get(0));
+ if (makeConfig.addConfigFile(configFile) != null) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Duplicate file imported in section: " + configFile.getFilename());
+ }
+ // We expect a Variable block next.
+ block = new MakeConfig.Block(MakeConfig.BlockType.BEFORE);
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" IMPORT: " + configFile.getFilename());
+ }
+ } else if (matchLineType(line, "inherit", 2)) {
+ final String currentFile = fields.get(1);
+ final String inheritedFile = fields.get(2);
+ if (!configFile.getFilename().equals(currentFile)) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Unexpected current file in 'inherit' line '" + currentFile
+ + "' while processing '" + configFile.getFilename() + "'");
+ continue;
+ }
+
+ // There is already a file in progress, so add another var block to that.
+ block = new MakeConfig.Block(MakeConfig.BlockType.INHERIT);
+ // TODO: Make dumpconfig.mk also output a Position for inherit-product
+ block.setInheritedFile(new Str(inheritedFile));
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" INHERIT: " + inheritedFile);
+ }
+ } else if (matchLineType(line, "imported", 1)) {
+ final List<String> importStack = splitList(fields.get(1));
+ if (importStack.size() == 0) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "'imported' line with empty include stack.");
+ continue;
+ }
+ final String currentFile = importStack.get(0);
+ if (!configFile.getFilename().equals(currentFile)) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Unexpected current file in 'imported' line '" + currentFile
+ + "' while processing '" + configFile.getFilename() + "'");
+ continue;
+ }
+
+ // There is already a file in progress, so add another var block to that.
+ // This will be the last one, but will check that after parsing.
+ block = new MakeConfig.Block(MakeConfig.BlockType.AFTER);
+ configFile.addBlock(block);
+
+ if (DEBUG) {
+ System.out.println(" AFTER: " + currentFile);
+ }
+ } else if (matchLineType(line, "val", 5)) {
+ final String productMakefile = fields.get(1);
+ final String blockTypeString = fields.get(2);
+ final String varName = fields.get(3);
+ final String varValue = fields.get(4);
+ final Position pos = Position.parse(fields.get(5));
+ final Str str = new Str(pos, varValue);
+
+ if (blockTypeString.equals("initial")) {
+ initialVariables.put(varName, str);
+ } else if (blockTypeString.equals("final")) {
+ finalVariables.put(varName, str);
+ } else {
+ if (!productMakefile.equals(configFile.getFilename())) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Mismatched 'val' product makefile."
+ + " Expected: " + configFile.getFilename()
+ + " Saw: " + productMakefile);
+ continue;
+ }
+
+ final MakeConfig.BlockType blockType = parseBlockType(line, blockTypeString);
+ if (blockType == null) {
+ continue;
+ }
+ if (blockType != block.getBlockType()) {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Mismatched 'val' block type."
+ + " Expected: " + block.getBlockType()
+ + " Saw: " + blockType);
+ }
+
+ // Add the variable to the block in progress
+ block.addVar(varName, str);
+ }
+ } else {
+ if (DEBUG) {
+ System.out.print("# ");
+ for (int d = 0; d < fields.size(); d++) {
+ System.out.print(fields.get(d));
+ if (d != fields.size() - 1) {
+ System.out.print(",");
+ }
+ }
+ System.out.println();
+ }
+ }
+ }
+ }
+
+ /**
+ * Return true if the line type matches 'lineType' and there are at least 'fieldCount'
+ * fields (not including the first field which is the line type).
+ */
+ private boolean matchLineType(CsvParser.Line line, String lineType, int fieldCount) {
+ final List<String> fields = line.getFields();
+ if (!lineType.equals(fields.get(0))) {
+ return false;
+ }
+ if (fields.size() < (fieldCount + 1)) {
+ mErrors.WARNING_DUMPCONFIG.add(new Position(mFilename, line.getLine()),
+ fields.get(0) + " line has " + fields.size() + " fields. Expected at least "
+ + (fieldCount + 1) + " fields.");
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Split a string with space separated items (i.e. the make list format) into a List<String>.
+ */
+ private static List<String> splitList(String text) {
+ // Arrays.asList returns a fixed-length List, so we copy it into an ArrayList to not
+ // propagate that surprise detail downstream.
+ return new ArrayList(Arrays.asList(LIST_SEPARATOR.split(text.trim())));
+ }
+
+ /**
+ * Parse a BockType or issue a warning if it can't be parsed.
+ */
+ private MakeConfig.BlockType parseBlockType(CsvParser.Line line, String text) {
+ if ("before".equals(text)) {
+ return MakeConfig.BlockType.BEFORE;
+ } else if ("inherit".equals(text)) {
+ return MakeConfig.BlockType.INHERIT;
+ } else if ("after".equals(text)) {
+ return MakeConfig.BlockType.AFTER;
+ } else {
+ mErrors.WARNING_DUMPCONFIG.add(
+ new Position(mFilename, line.getLine()),
+ "Invalid block type: " + text);
+ return null;
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/ErrorReporter.java b/tools/product_config/src/com/android/build/config/ErrorReporter.java
index f382b4e..0a0c9f4 100644
--- a/tools/product_config/src/com/android/build/config/ErrorReporter.java
+++ b/tools/product_config/src/com/android/build/config/ErrorReporter.java
@@ -49,6 +49,16 @@
*/
private boolean mHadError;
+ public static class FatalException extends RuntimeException {
+ FatalException(String message) {
+ super(message);
+ }
+
+ FatalException(String message, Throwable chain) {
+ super(message, chain);
+ }
+ }
+
/**
* Whether errors are errors, warnings or hidden.
*/
@@ -127,12 +137,41 @@
public String getHelp() {
return mHelp;
}
+
+ /**
+ * Add an error with no source position.
+ */
+ public void add(String message) {
+ ErrorReporter.this.add(this, false, new Position(), message);
+ }
+
+ /**
+ * Add an error.
+ */
+ public void add(Position pos, String message) {
+ ErrorReporter.this.add(this, false, pos, message);
+ }
+
+ /**
+ * Add an error with no source position, and throw a FatalException, stopping processing
+ * immediately.
+ */
+ public void fatal(String message) {
+ ErrorReporter.this.add(this, true, new Position(), message);
+ }
+
+ /**
+ * Add an error, and throw a FatalException, stopping processing immediately.
+ */
+ public void fatal(Position pos, String message) {
+ ErrorReporter.this.add(this, true, pos, message);
+ }
}
/**
* An instance of an error happening.
*/
- public class Entry {
+ public static class Entry {
private final Category mCategory;
private final Position mPosition;
private final String mMessage;
@@ -154,6 +193,13 @@
public String getMessage() {
return mMessage;
}
+
+ @Override
+ public String toString() {
+ return mPosition
+ + "[" + mCategory.getLevel().getLabel() + " " + mCategory.getCode() + "] "
+ + mMessage;
+ }
}
private void initLocked() {
@@ -191,22 +237,16 @@
}
/**
- * Add an error with no source position.
- */
- public void add(Category category, String message) {
- add(category, new Position(), message);
- }
-
- /**
* Add an error.
*/
- public void add(Category category, Position pos, String message) {
+ private void add(Category category, boolean fatal, Position pos, String message) {
synchronized (mEntries) {
initLocked();
if (mCategories.get(category.getCode()) != category) {
throw new RuntimeException("Errors.Category used from the wrong Errors object.");
}
- mEntries.add(new Entry(category, pos, message));
+ final Entry entry = new Entry(category, pos, message);
+ mEntries.add(entry);
final Level level = category.getLevel();
if (level == Level.WARNING || level == Level.ERROR) {
mHadWarningOrError = true;
@@ -214,6 +254,9 @@
if (level == Level.ERROR) {
mHadError = true;
}
+ if (fatal) {
+ throw new FatalException(entry.toString());
+ }
}
}
@@ -250,13 +293,10 @@
public void printErrors(PrintStream out) {
synchronized (mEntries) {
for (Entry entry: mEntries) {
- final Category category = entry.getCategory();
- final Level level = category.getLevel();
- if (level == Level.HIDDEN) {
+ if (entry.getCategory().getLevel() == Level.HIDDEN) {
continue;
}
- out.println(entry.getPosition() + "[" + level.getLabel() + " "
- + category.getCode() + "] " + entry.getMessage());
+ out.println(entry.toString());
}
}
}
diff --git a/tools/product_config/src/com/android/build/config/Errors.java b/tools/product_config/src/com/android/build/config/Errors.java
index 63792c8..b333e78 100644
--- a/tools/product_config/src/com/android/build/config/Errors.java
+++ b/tools/product_config/src/com/android/build/config/Errors.java
@@ -30,7 +30,7 @@
* <b>Naming Convention:</b>
* <ul>
* <li>ERROR_ for Categories with isLevelSettable false and Level.ERROR
- * <li>WARNING_ for Categories with isLevelSettable false and default WARNING or HIDDEN
+ * <li>WARNING_ for Categories with isLevelSettable true and default WARNING or HIDDEN
* <li>Don't have isLevelSettable true and not ERROR. (The constructor asserts this).
* </ul>
*/
@@ -42,4 +42,33 @@
public final Category WARNING_UNKNOWN_COMMAND_LINE_ERROR = new Category(2, true, Level.HIDDEN,
"Passing unknown errors on the command line. Hidden by default for\n"
+ "forward compatibility.");
+
+ public final Category ERROR_KATI = new Category(3, false, Level.ERROR,
+ "Error executing or reading from Kati.");
+
+ public final Category WARNING_DUMPCONFIG = new Category(4, true, Level.WARNING,
+ "Anomaly parsing the output of kati and dumpconfig.mk.");
+
+ public final Category ERROR_DUMPCONFIG = new Category(5, false, Level.ERROR,
+ "Error parsing the output of kati and dumpconfig.mk.");
+
+ public final Category WARNING_VARIABLE_RECURSION = new Category(6, true, Level.WARNING,
+ "Possible unsupported variable recursion.");
+
+ // This could be a warning, but it's very likely that the data is corrupted somehow
+ // if we're seeing this.
+ public final Category ERROR_IMPROPER_PRODUCT_VAR_MARKER = new Category(7, true, Level.ERROR,
+ "Bad input from dumpvars causing corrupted product variables.");
+
+ public final Category ERROR_MISSING_CONFIG_FILE = new Category(8, true, Level.ERROR,
+ "Unable to find config file.");
+
+ public final Category ERROR_INFINITE_RECURSION = new Category(9, true, Level.ERROR,
+ "A file tries to inherit-product from itself or its own inherited products.");
+
+ // TODO: This will become obsolete when it is possible to have starlark-based product
+ // config files.
+ public final Category WARNING_DIFFERENT_FROM_KATI = new Category(1000, true, Level.WARNING,
+ "The cross-check with the original kati implementation failed.");
+
}
diff --git a/tools/product_config/src/com/android/build/config/FlatConfig.java b/tools/product_config/src/com/android/build/config/FlatConfig.java
new file mode 100644
index 0000000..6f277fe
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlatConfig.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Flattened configuration -- set of variables after all assignments and inherits have
+ * been executed.
+ */
+public class FlatConfig extends ConfigBase {
+
+ private final TreeMap<String, Value> mValues = new TreeMap();
+
+ public TreeMap<String, Value> getValues() {
+ return mValues;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/FlattenConfig.java b/tools/product_config/src/com/android/build/config/FlattenConfig.java
new file mode 100644
index 0000000..a19802b
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlattenConfig.java
@@ -0,0 +1,474 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.regex.Pattern;
+
+public class FlattenConfig {
+ private static final Pattern RE_SPACE = Pattern.compile("\\p{Space}+");
+ private static final String PRODUCTS_PREFIX = "PRODUCTS";
+
+ private final Errors mErrors;
+ private final GenericConfig mGenericConfig;
+ private final Map<String, GenericConfig.ConfigFile> mGenericConfigs;
+ private final FlatConfig mResult = new FlatConfig();
+ private final Map<String, Value> mVariables;
+ /**
+ * Files that have been visited, to prevent infinite recursion. There are no
+ * conditionals at this point in the processing, so we don't need a stack, just
+ * a single set.
+ */
+ private final Set<Str> mStack = new HashSet();
+
+
+ private FlattenConfig(Errors errors, GenericConfig genericConfig) {
+ mErrors = errors;
+ mGenericConfig = genericConfig;
+ mGenericConfigs = genericConfig.getFiles();
+ mVariables = mResult.getValues();
+
+ // Base class fields
+ mResult.copyFrom(genericConfig);
+ }
+
+ /**
+ * Flatten a GenericConfig to a FlatConfig.
+ *
+ * Makes three passes through the genericConfig, one to flatten the single variables,
+ * one to flatten the list variables, and one to flatten the unknown variables. Each
+ * has a slightly different algorithm.
+ */
+ public static FlatConfig flatten(Errors errors, GenericConfig genericConfig) {
+ final FlattenConfig flattener = new FlattenConfig(errors, genericConfig);
+ return flattener.flattenImpl();
+ }
+
+ private FlatConfig flattenImpl() {
+ final List<String> rootNodes = mGenericConfig.getRootNodes();
+ if (rootNodes.size() == 0) {
+ mErrors.ERROR_DUMPCONFIG.add("No root nodes in PRODUCTS phase.");
+ return null;
+ } else if (rootNodes.size() != 1) {
+ final StringBuilder msg = new StringBuilder(
+ "Ignoring extra root nodes in PRODUCTS phase. All nodes are:");
+ for (final String rn: rootNodes) {
+ msg.append(' ');
+ msg.append(rn);
+ }
+ mErrors.WARNING_DUMPCONFIG.add(msg.toString());
+ }
+ final String root = rootNodes.get(0);
+
+ // TODO: Do we need to worry about the initial state of variables? Anything
+ // that from the product config
+
+ flattenListVars(root);
+ flattenSingleVars(root);
+ flattenUnknownVars(root);
+ flattenInheritsFrom(root);
+
+ setDefaultKnownVars();
+
+ // TODO: This only supports the single product mode of import-nodes, which is all the
+ // real build does. m product-graph and friends will have to be rewritten.
+ mVariables.put("PRODUCTS", new Value(VarType.UNKNOWN, new Str(root)));
+
+ return mResult;
+ }
+
+ interface AssignCallback {
+ void onAssignStatement(GenericConfig.Assign assign);
+ }
+
+ interface InheritCallback {
+ void onInheritStatement(GenericConfig.Inherit assign);
+ }
+
+ /**
+ * Do a bunch of validity checks, and then iterate through each of the statements
+ * in the given file. For Assignments, the callback is only called for variables
+ * matching varType.
+ *
+ * Adds makefiles which have been traversed to the 'seen' set, and will not traverse
+ * into an inherit statement if its makefile has already been seen.
+ */
+ private void forEachStatement(Str filename, VarType varType, Set<String> seen,
+ AssignCallback assigner, InheritCallback inheriter) {
+ if (mStack.contains(filename)) {
+ mErrors.ERROR_INFINITE_RECURSION.add(filename.getPosition(),
+ "File is already in the inherit-product stack: " + filename);
+ return;
+ }
+
+ mStack.add(filename);
+ try {
+ final GenericConfig.ConfigFile genericFile = mGenericConfigs.get(filename.toString());
+
+ if (genericFile == null) {
+ mErrors.ERROR_MISSING_CONFIG_FILE.add(filename.getPosition(),
+ "Unable to find config file: " + filename);
+ return;
+ }
+
+ for (final GenericConfig.Statement statement: genericFile.getStatements()) {
+ if (statement instanceof GenericConfig.Assign) {
+ if (assigner != null) {
+ final GenericConfig.Assign assign = (GenericConfig.Assign)statement;
+ final String varName = assign.getName();
+
+ // Assert that we're not stomping on another variable, which
+ // really should be impossible at this point.
+ assertVarType(filename, varName);
+
+ if (mGenericConfig.getVarType(varName) == varType) {
+ assigner.onAssignStatement(assign);
+ }
+ }
+ } else if (statement instanceof GenericConfig.Inherit) {
+ if (inheriter != null) {
+ final GenericConfig.Inherit inherit = (GenericConfig.Inherit)statement;
+ if (seen != null) {
+ if (seen.contains(inherit.getFilename().toString())) {
+ continue;
+ }
+ seen.add(inherit.getFilename().toString());
+ }
+ inheriter.onInheritStatement(inherit);
+ }
+ }
+ }
+ } finally {
+ // Also executes after return statements, so we always remove this.
+ mStack.remove(filename);
+ }
+ }
+
+ /**
+ * Call 'inheriter' for each child of 'filename' in alphabetical order.
+ */
+ private void forEachInheritAlpha(final Str filename, VarType varType, Set<String> seen,
+ InheritCallback inheriter) {
+ final TreeMap<Str, GenericConfig.Inherit> alpha = new TreeMap();
+ forEachStatement(filename, varType, null, null,
+ (inherit) -> {
+ alpha.put(inherit.getFilename(), inherit);
+ });
+ for (final GenericConfig.Inherit inherit: alpha.values()) {
+ // Handle 'seen' here where we actaully call back, not before, so that
+ // the proper traversal order is preserved.
+ if (seen != null) {
+ if (seen.contains(inherit.getFilename().toString())) {
+ continue;
+ }
+ seen.add(inherit.getFilename().toString());
+ }
+ inheriter.onInheritStatement(inherit);
+ }
+ }
+
+ /**
+ * Traverse the inheritance hierarchy, setting list-value product config variables.
+ */
+ private void flattenListVars(final String filename) {
+ Map<String, Value> vars = flattenListVars(new Str(filename), new HashSet());
+ // Add the result of the recursion to mVariables. We know there will be
+ // no collisions because this function only handles list variables.
+ for (Map.Entry<String, Value> entry: vars.entrySet()) {
+ mVariables.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ /**
+ * Return the variables defined, recursively, by 'filename.' The 'seen' set
+ * accumulates which nodes have been visited, as each is only done once.
+ *
+ * This convoluted algorithm isn't ideal, but it matches what is in node_fns.mk.
+ */
+ private Map<String, Value> flattenListVars(final Str filename, Set<String> seen) {
+ Map<String, Value> result = new HashMap();
+
+ // Recurse into our children first in alphabetical order, building a map of
+ // that filename to its flattened values. The order matters here because
+ // we will only look at each child once, and when a file appears multiple
+ // times, its variables must have the right set, based on whether it's been
+ // seen before. This preserves the order from node_fns.mk.
+
+ // Child filename --> { varname --> value }
+ final Map<Str, Map<String, Value>> children = new HashMap();
+ forEachInheritAlpha(filename, VarType.LIST, seen,
+ (inherit) -> {
+ final Str child = inherit.getFilename();
+ children.put(child, flattenListVars(child, seen));
+ });
+
+ // Now, traverse the values again in the original source order to concatenate the values.
+ // Note that the contcatenation order is *different* from the inherit order above.
+ forEachStatement(filename, VarType.LIST, null,
+ (assign) -> {
+ assignToListVar(result, assign.getName(), assign.getValue());
+ },
+ (inherit) -> {
+ final Map<String, Value> child = children.get(inherit.getFilename());
+ // child == null happens if this node has been visited before.
+ if (child != null) {
+ for (Map.Entry<String, Value> entry: child.entrySet()) {
+ final String varName = entry.getKey();
+ final Value varVal = entry.getValue();
+ appendToListVar(result, varName, varVal.getList());
+ }
+ }
+ });
+
+ return result;
+ }
+
+ /**
+ * Traverse the inheritance hierarchy, setting single-value product config variables.
+ */
+ private void flattenSingleVars(final String filename) {
+ flattenSingleVars(new Str(filename), new HashSet(), new HashSet());
+ }
+
+ private void flattenSingleVars(final Str filename, Set<String> seen1, Set<String> seen2) {
+ // flattenSingleVars has two loops. The first sets all variables that are
+ // defined for *this* file. The second traverses through the inheritance,
+ // to fill in values that weren't defined in this file. The first appearance of
+ // the variable is the one that wins.
+
+ forEachStatement(filename, VarType.SINGLE, seen1,
+ (assign) -> {
+ final String varName = assign.getName();
+ Value v = mVariables.get(varName);
+ // Only take the first value that we see for single variables.
+ Value value = mVariables.get(varName);
+ if (!mVariables.containsKey(varName)) {
+ final List<Str> valueList = assign.getValue();
+ // There should never be more than one item in this list, because
+ // SINGLE values should never be appended to.
+ if (valueList.size() != 1) {
+ final StringBuilder positions = new StringBuilder("[");
+ for (Str s: valueList) {
+ positions.append(s.getPosition());
+ }
+ positions.append(" ]");
+ throw new RuntimeException("Value list found for SINGLE variable "
+ + varName + " size=" + valueList.size()
+ + "positions=" + positions.toString());
+ }
+ mVariables.put(varName,
+ new Value(VarType.SINGLE,
+ valueList.get(0)));
+ }
+ }, null);
+
+ forEachInheritAlpha(filename, VarType.SINGLE, seen2,
+ (inherit) -> {
+ flattenSingleVars(inherit.getFilename(), seen1, seen2);
+ });
+ }
+
+ /**
+ * Traverse the inheritance hierarchy and flatten the values
+ */
+ private void flattenUnknownVars(String filename) {
+ flattenUnknownVars(new Str(filename), new HashSet());
+ }
+
+ private void flattenUnknownVars(final Str filename, Set<String> seen) {
+ // flattenUnknownVars has two loops: First to attempt to set the variable from
+ // this file, and then a second loop to handle the inheritance. This is odd
+ // but it matches the order the files are included in node_fns.mk. The last appearance
+ // of the value is the one that wins.
+
+ forEachStatement(filename, VarType.UNKNOWN, null,
+ (assign) -> {
+ // Overwrite the current value with whatever is now in the file.
+ mVariables.put(assign.getName(),
+ new Value(VarType.UNKNOWN,
+ flattenAssignList(assign, new Str(""))));
+ }, null);
+
+ forEachInheritAlpha(filename, VarType.UNKNOWN, seen,
+ (inherit) -> {
+ flattenUnknownVars(inherit.getFilename(), seen);
+ });
+ }
+
+ String prefix = "";
+
+ /**
+ * Sets the PRODUCTS.<filename>.INHERITS_FROM variables.
+ */
+ private void flattenInheritsFrom(final String filename) {
+ flattenInheritsFrom(new Str(filename));
+ }
+
+ /**
+ * This flatten function, unlike the others visits all of the nodes regardless
+ * of whether they have been seen before, because that's what the make code does.
+ */
+ private void flattenInheritsFrom(final Str filename) {
+ // Recurse, and gather the list our chlidren
+ final TreeSet<Str> children = new TreeSet();
+ forEachStatement(filename, VarType.LIST, null, null,
+ (inherit) -> {
+ children.add(inherit.getFilename());
+ flattenInheritsFrom(inherit.getFilename());
+ });
+
+ final String varName = "PRODUCTS." + filename + ".INHERITS_FROM";
+ if (children.size() > 0) {
+ // Build the space separated list.
+ boolean first = true;
+ final StringBuilder val = new StringBuilder();
+ for (Str child: children) {
+ if (first) {
+ first = false;
+ } else {
+ val.append(' ');
+ }
+ val.append(child);
+ }
+ mVariables.put(varName, new Value(VarType.UNKNOWN, new Str(val.toString())));
+ } else {
+ // Clear whatever flattenUnknownVars happened to have put in.
+ mVariables.remove(varName);
+ }
+ }
+
+ /**
+ * Throw an exception if there's an existing variable with a different type.
+ */
+ private void assertVarType(Str filename, String varName) {
+ if (mGenericConfig.getVarType(varName) == VarType.UNKNOWN) {
+ final Value prevValue = mVariables.get(varName);
+ if (prevValue != null
+ && prevValue.getVarType() != VarType.UNKNOWN) {
+ throw new RuntimeException("Mismatched var types:"
+ + " filename=" + filename
+ + " varType=" + mGenericConfig.getVarType(varName)
+ + " varName=" + varName
+ + " prevValue=" + Value.debugString(prevValue));
+ }
+ }
+ }
+
+ /**
+ * Depending on whether the assignment is prepending, appending, setting, etc.,
+ * update the value. We can infer which of those operations it is by the length
+ * and contents of the values. Each value in the list was originally separated
+ * by the previous value.
+ */
+ private void assignToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+ final Value value = vars.get(varName);
+ final List<Str> orig = value == null ? new ArrayList() : value.getList();
+ final List<Str> result = new ArrayList();
+ if (items.size() > 0) {
+ for (int i = 0; i < items.size(); i++) {
+ if (i != 0) {
+ result.addAll(orig);
+ }
+ final Str item = items.get(i);
+ addWords(result, item);
+ }
+ }
+ vars.put(varName, new Value(result));
+ }
+
+ /**
+ * Appends all of the words in in 'items' to an entry in vars keyed by 'varName',
+ * creating one if necessary.
+ */
+ private static void appendToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+ Value value = vars.get(varName);
+ if (value == null) {
+ value = new Value(new ArrayList());
+ vars.put(varName, value);
+ }
+ final List<Str> out = value.getList();
+ for (Str item: items) {
+ addWords(out, item);
+ }
+ }
+
+ /**
+ * Split 'item' on spaces, and add each of them as a word to 'out'.
+ */
+ private static void addWords(List<Str> out, Str item) {
+ for (String word: RE_SPACE.split(item.toString().trim())) {
+ if (word.length() > 0) {
+ out.add(new Str(item.getPosition(), word));
+ }
+ }
+ }
+
+ /**
+ * Flatten the list of strings in an Assign statement, using the previous value
+ * as a separator.
+ */
+ private Str flattenAssignList(GenericConfig.Assign assign, Str previous) {
+ final StringBuilder result = new StringBuilder();
+ Position position = previous.getPosition();
+ final List<Str> list = assign.getValue();
+ final int size = list.size();
+ for (int i = 0; i < size; i++) {
+ final Str item = list.get(i);
+ result.append(item.toString());
+ if (i != size - 1) {
+ result.append(previous);
+ }
+ final Position pos = item.getPosition();
+ if (pos != null && pos.getFile() != null) {
+ position = pos;
+ }
+ }
+ return new Str(position, result.toString());
+ }
+
+ /**
+ * Make sure that each of the product config variables has a default value.
+ */
+ private void setDefaultKnownVars() {
+ for (Map.Entry<String, VarType> entry: mGenericConfig.getProductVars().entrySet()) {
+ final String varName = entry.getKey();
+ final VarType varType = entry.getValue();
+
+ final Value val = mVariables.get(varName);
+ if (val == null) {
+ mVariables.put(varName, new Value(varType));
+ }
+ }
+
+
+ // TODO: These two for now as well, until we can rewrite the enforce packages exist
+ // handling.
+ if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST")) {
+ mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST", new Value(VarType.UNKNOWN));
+ }
+ if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST")) {
+ mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST", new Value(VarType.UNKNOWN));
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/GenericConfig.java b/tools/product_config/src/com/android/build/config/GenericConfig.java
new file mode 100644
index 0000000..2ee2735
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/GenericConfig.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Language-agnostic representation of a configuration statement.
+ */
+public class GenericConfig extends ConfigBase {
+ /**
+ * The config files that were imported in this config pass.
+ */
+ protected final TreeMap<String, ConfigFile> mConfigFiles = new TreeMap();
+
+ /**
+ * A configuration file.
+ */
+ public static class ConfigFile {
+ /**
+ * The name of the file, relative to the tree root.
+ */
+ private final String mFilename;
+
+ /**
+ * Sections of variable definitions and import statements. Product config
+ * files will always have at least one block.
+ */
+ private final ArrayList<Statement> mStatements = new ArrayList();
+
+ public ConfigFile(String filename) {
+ mFilename = filename;
+ }
+
+ public String getFilename() {
+ return mFilename;
+ }
+
+ public void addStatement(Statement statement) {
+ mStatements.add(statement);
+ }
+
+ public ArrayList<Statement> getStatements() {
+ return mStatements;
+ }
+ }
+
+ /**
+ * Base class for statements that appear in config files.
+ */
+ public static class Statement {
+ }
+
+ /**
+ * A variable assignment.
+ */
+ public static class Assign extends Statement {
+ private final String mVarName;
+ private final List<Str> mValue;
+
+ /**
+ * Assignment of a single value
+ */
+ public Assign(String varName, Str value) {
+ mVarName = varName;
+ mValue = new ArrayList();
+ mValue.add(value);
+ }
+
+ /**
+ * Assignment referencing a previous value.
+ * VAR := $(1) $(VAR) $(2) $(VAR) $(3)
+ */
+ public Assign(String varName, List<Str> value) {
+ mVarName = varName;
+ mValue = value;
+ }
+
+ public String getName() {
+ return mVarName;
+ }
+
+ public List<Str> getValue() {
+ return mValue;
+ }
+ }
+
+ /**
+ * An $(inherit-product FILENAME) statement
+ */
+ public static class Inherit extends Statement {
+ private final Str mFilename;
+
+ public Inherit(Str filename) {
+ mFilename = filename;
+ }
+
+ public Str getFilename() {
+ return mFilename;
+ }
+ }
+
+ /**
+ * Adds the given config file. Returns any one previously added, or null.
+ */
+ public ConfigFile addConfigFile(ConfigFile file) {
+ return mConfigFiles.put(file.getFilename(), file);
+ }
+
+ public TreeMap<String, ConfigFile> getFiles() {
+ return mConfigFiles;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Kati.java b/tools/product_config/src/com/android/build/config/Kati.java
new file mode 100644
index 0000000..4fa2297
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Kati.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Map;
+
+/**
+ * Wrapper for invoking kati.
+ */
+public interface Kati {
+ public Map<String, MakeConfig> loadProductConfig();
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommand.java b/tools/product_config/src/com/android/build/config/KatiCommand.java
new file mode 100644
index 0000000..f3c71d2
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommand.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.List;
+
+public interface KatiCommand {
+ public static class KatiException extends Exception {
+ private String mStderr;
+
+ public KatiException(List<String> cmd, String stderr) {
+ super("Error running kati: " + Arrays.toString(cmd.toArray()));
+ mStderr = stderr;
+ }
+
+ public String getStderr() {
+ return mStderr;
+ }
+ }
+
+ /**
+ * Run kati directly. Returns stdout data.
+ *
+ * @throws KatiException if there is an error. KatiException will contain
+ * the stderr from the kati invocation.
+ */
+ public String run(String[] args) throws KatiException;
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommandImpl.java b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
new file mode 100644
index 0000000..53480d4
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.nio.charset.StandardCharsets;
+
+public class KatiCommandImpl implements KatiCommand {
+ final Errors mErrors;
+ final Options mOptions;
+
+ /**
+ * Runnable that consumes all of an InputStream until EOF, writes the contents
+ * into a StringBuilder, and then closes the stream.
+ */
+ class OutputReader implements Runnable {
+ private final InputStream mStream;
+ private final StringBuilder mOutput;
+
+ OutputReader(InputStream stream, StringBuilder output) {
+ mStream = stream;
+ mOutput = output;
+ }
+
+ @Override
+ public void run() {
+ final char[] buf = new char[16*1024];
+ final InputStreamReader reader = new InputStreamReader(mStream, StandardCharsets.UTF_8);
+ try {
+ int amt;
+ while ((amt = reader.read(buf, 0, buf.length)) >= 0) {
+ mOutput.append(buf, 0, amt);
+ }
+ } catch (IOException ex) {
+ mErrors.ERROR_KATI.add("Error reading from kati: " + ex.getMessage());
+ } finally {
+ try {
+ reader.close();
+ } catch (IOException ex) {
+ // Close doesn't throw
+ }
+ }
+ }
+ }
+
+ public KatiCommandImpl(Errors errors, Options options) {
+ mErrors = errors;
+ mOptions = options;
+ }
+
+ /**
+ * Run kati directly. Returns stdout data.
+ *
+ * @throws KatiException if there is an error. KatiException will contain
+ * the stderr from the kati invocation.
+ */
+ public String run(String[] args) throws KatiException {
+ final ArrayList<String> cmd = new ArrayList();
+ cmd.add(mOptions.getCKatiBin());
+ for (String arg: args) {
+ cmd.add(arg);
+ }
+
+ final ProcessBuilder builder = new ProcessBuilder(cmd);
+ builder.redirectOutput(ProcessBuilder.Redirect.PIPE);
+ builder.redirectError(ProcessBuilder.Redirect.PIPE);
+
+ Process process = null;
+
+ try {
+ process = builder.start();
+ } catch (IOException ex) {
+ throw new KatiException(cmd, "IOException running process: " + ex.getMessage());
+ }
+
+ final StringBuilder stdout = new StringBuilder();
+ final Thread stdoutThread = new Thread(new OutputReader(process.getInputStream(), stdout),
+ "kati_stdout_reader");
+ stdoutThread.start();
+
+ final StringBuilder stderr = new StringBuilder();
+ final Thread stderrThread = new Thread(new OutputReader(process.getErrorStream(), stderr),
+ "kati_stderr_reader");
+ stderrThread.start();
+
+ int returnCode = waitForProcess(process);
+ joinThread(stdoutThread);
+ joinThread(stderrThread);
+
+ if (returnCode != 0) {
+ throw new KatiException(cmd, stderr.toString());
+ }
+
+ return stdout.toString();
+ }
+
+ /**
+ * Wrap Process.waitFor() because it throws InterruptedException.
+ */
+ private static int waitForProcess(Process proc) {
+ while (true) {
+ try {
+ return proc.waitFor();
+ } catch (InterruptedException ex) {
+ }
+ }
+ }
+
+ /**
+ * Wrap Thread.join() because it throws InterruptedException.
+ */
+ private static void joinThread(Thread thread) {
+ while (true) {
+ try {
+ thread.join();
+ return;
+ } catch (InterruptedException ex) {
+ }
+ }
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/KatiImpl.java b/tools/product_config/src/com/android/build/config/KatiImpl.java
new file mode 100644
index 0000000..de11f36
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiImpl.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class KatiImpl implements Kati {
+ // Subdirectory inside out for config stuff.
+ private static final String CONFIG_SUBDIR = "config";
+
+ private final Errors mErrors;
+ private final Options mOptions;
+ private final KatiCommand mCommand;
+
+ // TODO: Do we need to consider the whole or a greater subset of the
+ // environment (or a hash of it?). In theory product-variant is enough, but we know
+ // people use stuff from the environment, even though we're trying to get rid of that.
+ private String getWorkDirPath() {
+ return Paths.get(mOptions.getOutDir(), CONFIG_SUBDIR,
+ mOptions.getProduct() + '-' + mOptions.getVariant()).toString();
+ }
+
+ private String getDumpConfigCsvPath() {
+ return Paths.get(getWorkDirPath(), "dumpconfig.csv").toString();
+ }
+
+ public KatiImpl(Errors errors, Options options) {
+ this(errors, options, new KatiCommandImpl(errors, options));
+ }
+
+ // VisibleForTesting
+ public KatiImpl(Errors errors, Options options, KatiCommand command) {
+ mErrors = errors;
+ mOptions = options;
+ mCommand = command;
+ }
+
+ @Override
+ public Map<String, MakeConfig> loadProductConfig() {
+ final String csvPath = getDumpConfigCsvPath();
+ try {
+ File workDir = new File(getWorkDirPath());
+
+ if ((workDir.exists() && !workDir.isDirectory()) || !workDir.mkdirs()) {
+ mErrors.ERROR_KATI.add("Unable to create directory: " + workDir);
+ return null; // TODO: throw exception?
+ }
+
+ String out = mCommand.run(new String[] {
+ "-f", "build/make/core/dumpconfig.mk",
+ "DUMPCONFIG_FILE=" + csvPath
+ });
+
+ if (!out.contains("***DONE***")) {
+ mErrors.ERROR_KATI.add(
+ "Unknown error with kati, but it didn't print ***DONE*** message");
+ return null; // TODO: throw exception?
+ }
+ // TODO: Check that output was good.
+ } catch (KatiCommand.KatiException ex) {
+ mErrors.ERROR_KATI.add("Error running kati:\n" + ex.getStderr());
+ return null;
+ }
+
+ if (!(new File(csvPath)).canRead()) {
+ mErrors.ERROR_KATI.add("Kati ran but did not create " + csvPath);
+ return null;
+ }
+
+ try (FileReader reader = new FileReader(csvPath)) {
+ Map<String, MakeConfig> makeConfigs = DumpConfigParser.parse(mErrors, csvPath, reader);
+
+ if (makeConfigs.size() == 0) {
+ // TODO: Issue error?
+ return null;
+ }
+
+ return makeConfigs;
+ } catch (CsvParser.ParseException ex) {
+ mErrors.ERROR_KATI.add(new Position(csvPath, ex.getLine()),
+ "Unable to parse output of dumpconfig.mk: " + ex.getMessage());
+ return null; // TODO: throw exception?
+ } catch (IOException ex) {
+ System.out.println(ex);
+ mErrors.ERROR_KATI.add("Unable to read " + csvPath + ": " + ex.getMessage());
+ return null; // TODO: throw exception?
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Main.java b/tools/product_config/src/com/android/build/config/Main.java
index 7669742..5cec55e 100644
--- a/tools/product_config/src/com/android/build/config/Main.java
+++ b/tools/product_config/src/com/android/build/config/Main.java
@@ -16,6 +16,11 @@
package com.android.build.config;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
public class Main {
private final Errors mErrors;
private final Options mOptions;
@@ -26,11 +31,44 @@
}
void run() {
- System.out.println("Hello World");
-
// TODO: Check the build environment to make sure we're running in a real
// build environment, e.g. actually inside a source tree, with TARGET_PRODUCT
// and TARGET_BUILD_VARIANT defined, etc.
+ Kati kati = new KatiImpl(mErrors, mOptions);
+ Map<String, MakeConfig> makeConfigs = kati.loadProductConfig();
+ if (makeConfigs == null || mErrors.hadError()) {
+ return;
+ }
+ if (false) {
+ for (MakeConfig makeConfig: (new TreeMap<String, MakeConfig>(makeConfigs)).values()) {
+ System.out.println();
+ System.out.println("=======================================");
+ System.out.println("PRODUCT CONFIG FILES : " + makeConfig.getPhase());
+ System.out.println("=======================================");
+ makeConfig.printToStream(System.out);
+ }
+ }
+
+ ConvertMakeToGenericConfig m2g = new ConvertMakeToGenericConfig(mErrors);
+ GenericConfig generic = m2g.convert(makeConfigs);
+ if (false) {
+ System.out.println("======================");
+ System.out.println("REGENERATED MAKE FILES");
+ System.out.println("======================");
+ MakeWriter.write(System.out, generic, 0);
+ }
+
+ // TODO: Lookup shortened name as used in PRODUCT_NAME / TARGET_PRODUCT
+ FlatConfig flat = FlattenConfig.flatten(mErrors, generic);
+ if (false) {
+ System.out.println("=======================");
+ System.out.println("FLATTENED VARIABLE LIST");
+ System.out.println("=======================");
+ MakeWriter.write(System.out, flat, 0);
+ }
+
+ OutputChecker checker = new OutputChecker(flat);
+ checker.reportErrors(mErrors);
// TODO: Run kati and extract the variables and convert all that into starlark files.
@@ -42,23 +80,42 @@
public static void main(String[] args) {
Errors errors = new Errors();
+ int exitCode = 0;
- Options options = Options.parse(errors, args);
- if (errors.hadError()) {
- Options.printHelp(System.err);
+ try {
+ Options options = Options.parse(errors, args, System.getenv());
+ if (errors.hadError()) {
+ Options.printHelp(System.err);
+ System.err.println();
+ throw new CommandException();
+ }
+
+ switch (options.getAction()) {
+ case DEFAULT:
+ (new Main(errors, options)).run();
+ return;
+ case HELP:
+ Options.printHelp(System.out);
+ return;
+ }
+ } catch (CommandException | Errors.FatalException ex) {
+ // These are user errors, so don't show a stack trace
+ exitCode = 1;
+ } catch (Throwable ex) {
+ // These are programming errors in the code of this tool, so print the exception.
+ // We'll try to print this. If it's something unrecoverable, then we'll hope
+ // for the best. We will still print the errors below, because they can be useful
+ // for debugging.
+ ex.printStackTrace(System.err);
System.err.println();
+ exitCode = 1;
+ } finally {
+ // Print errors and warnings
errors.printErrors(System.err);
- System.exit(1);
- }
-
- switch (options.getAction()) {
- case DEFAULT:
- (new Main(errors, options)).run();
- errors.printErrors(System.err);
- return;
- case HELP:
- Options.printHelp(System.out);
- return;
+ if (errors.hadError()) {
+ exitCode = 1;
+ }
+ System.exit(exitCode);
}
}
}
diff --git a/tools/product_config/src/com/android/build/config/MakeConfig.java b/tools/product_config/src/com/android/build/config/MakeConfig.java
new file mode 100644
index 0000000..dda0db9
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeConfig.java
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeConfig extends ConfigBase {
+ /**
+ * The config files that were imported in this config pass.
+ */
+ protected final ArrayList<ConfigFile> mConfigFiles = new ArrayList();
+
+ public enum BlockType {
+ UNSET,
+ BEFORE,
+ INHERIT,
+ AFTER
+ }
+
+ public static class ConfigFile {
+ /**
+ * The name of the file, relative to the tree root.
+ */
+ private final String mFilename;
+
+ /**
+ * Sections of variable definitions and import statements. Product config
+ * files will always have at least one block.
+ */
+ private final ArrayList<Block> mBlocks = new ArrayList();
+
+ public ConfigFile(String filename) {
+ mFilename = filename;
+ }
+
+ public String getFilename() {
+ return mFilename;
+ }
+
+ public void addBlock(Block block) {
+ mBlocks.add(block);
+ }
+
+ public ArrayList<Block> getBlocks() {
+ return mBlocks;
+ }
+ }
+
+ /**
+ * A set of variables that were defined.
+ */
+ public static class Block {
+ private final BlockType mBlockType;
+ private final TreeMap<String, Str> mValues = new TreeMap();
+ private Str mInheritedFile;
+
+ public Block(BlockType blockType) {
+ mBlockType = blockType;
+ }
+
+ public BlockType getBlockType() {
+ return mBlockType;
+ }
+
+ public void addVar(String varName, Str varValue) {
+ mValues.put(varName, varValue);
+ }
+
+ public Str getVar(String varName) {
+ return mValues.get(varName);
+ }
+
+ public TreeMap<String, Str> getVars() {
+ return mValues;
+ }
+
+ public void setInheritedFile(Str filename) {
+ mInheritedFile = filename;
+ }
+
+ public Str getInheritedFile() {
+ return mInheritedFile;
+ }
+ }
+
+ /**
+ * Adds the given config file. Returns any one previously added, or null.
+ */
+ public ConfigFile addConfigFile(ConfigFile file) {
+ ConfigFile prev = null;
+ for (ConfigFile f: mConfigFiles) {
+ if (f.getFilename().equals(file.getFilename())) {
+ prev = f;
+ break;
+ }
+ }
+ mConfigFiles.add(file);
+ return prev;
+ }
+
+ public List<ConfigFile> getConfigFiles() {
+ return mConfigFiles;
+ }
+
+ public void printToStream(PrintStream out) {
+ out.println("MakeConfig {");
+ out.println(" phase: " + mPhase);
+ out.println(" rootNodes: " + mRootNodes);
+ out.print(" singleVars: [ ");
+ for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+ if (entry.getValue() == VarType.SINGLE) {
+ out.print(entry.getKey());
+ out.print(" ");
+ }
+ }
+ out.println("]");
+ out.print(" listVars: [ ");
+ for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+ if (entry.getValue() == VarType.LIST) {
+ out.print(entry.getKey());
+ out.print(" ");
+ }
+ }
+ out.println("]");
+ out.println(" configFiles: [");
+ for (final ConfigFile configFile: mConfigFiles) {
+ out.println(" ConfigFile {");
+ out.println(" filename: " + configFile.getFilename());
+ out.println(" blocks: [");
+ for (Block block: configFile.getBlocks()) {
+ out.println(" Block {");
+ out.println(" type: " + block.getBlockType());
+ if (block.getBlockType() == BlockType.INHERIT) {
+ out.println(" inherited: " + block.getInheritedFile());
+ }
+ out.println(" values: {");
+ for (Map.Entry<String,Str> var: block.getVars().entrySet()) {
+ if (!var.getKey().equals("PRODUCT_PACKAGES")) {
+ continue;
+ }
+ out.println(" " + var.getKey() + ": " + var.getValue());
+ }
+ out.println(" }");
+ out.println(" }");
+ }
+ out.println(" ]");
+ out.println(" }");
+ }
+ out.println(" ] // configFiles");
+ out.println("} // MakeConfig");
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/MakeWriter.java b/tools/product_config/src/com/android/build/config/MakeWriter.java
new file mode 100644
index 0000000..15fd095
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeWriter.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeWriter {
+ public static final int FLAG_WRITE_HEADER = 1;
+ public static final int FLAG_WRITE_ANNOTATIONS = 1 << 1;
+
+ private final boolean mWriteHeader;
+ private final boolean mWriteAnnotations;
+
+ public static void write(PrintStream out, GenericConfig config, int flags) {
+ (new MakeWriter(flags)).writeGeneric(out, config);
+ }
+
+ public static void write(PrintStream out, FlatConfig config, int flags) {
+ (new MakeWriter(flags)).writeFlat(out, config);
+ }
+
+
+ private MakeWriter(int flags) {
+ mWriteHeader = (flags & FLAG_WRITE_HEADER) != 0;
+ mWriteAnnotations = (flags & FLAG_WRITE_ANNOTATIONS) != 0;
+ }
+
+ private void writeGeneric(PrintStream out, GenericConfig config) {
+ for (GenericConfig.ConfigFile file: config.getFiles().values()) {
+ out.println("---------------------------------------------------------");
+ out.println("FILE: " + file.getFilename());
+ out.println("---------------------------------------------------------");
+ writeFile(out, config, file);
+ out.println();
+ }
+ out.println("---------------------------------------------------------");
+ out.println("VARIABLES TOUCHED BY MAKE BASED CONFIG:");
+ out.println("---------------------------------------------------------");
+ writeStrVars(out, OutputChecker.getModifiedVars(config.getInitialVariables(),
+ config.getFinalVariables()), config);
+ }
+
+ private void writeFile(PrintStream out, GenericConfig config, GenericConfig.ConfigFile file) {
+ if (mWriteHeader) {
+ out.println("# This file is generated by the product_config tool");
+ }
+ for (GenericConfig.Statement statement: file.getStatements()) {
+ if (statement instanceof GenericConfig.Assign) {
+ writeAssign(out, config, (GenericConfig.Assign)statement);
+ } else if (statement instanceof GenericConfig.Inherit) {
+ writeInherit(out, (GenericConfig.Inherit)statement);
+ } else {
+ throw new RuntimeException("Unexpected Statement: " + statement);
+ }
+ }
+ }
+
+ private void writeAssign(PrintStream out, GenericConfig config,
+ GenericConfig.Assign statement) {
+ final List<Str> values = statement.getValue();
+ final int size = values.size();
+ final String varName = statement.getName();
+ Position pos = null;
+ if (size == 0) {
+ return;
+ } else if (size == 1) {
+ // Plain :=
+ final Str value = values.get(0);
+ out.print(varName + " := " + value);
+ pos = value.getPosition();
+ } else if (size == 2 && values.get(0).toString().length() == 0) {
+ // Plain +=
+ final Str value = values.get(1);
+ out.print(varName + " += " + value);
+ pos = value.getPosition();
+ } else {
+ // Write it out the long way
+ out.print(varName + " := " + values.get(0));
+ for (int i = 1; i < size; i++) {
+ out.print("$(" + varName + ") " + values.get(i));
+ pos = values.get(i).getPosition();
+ }
+ }
+ if (mWriteAnnotations) {
+ out.print(" # " + config.getVarType(varName) + " " + pos);
+ }
+ out.println();
+ }
+
+ private void writeInherit(PrintStream out, GenericConfig.Inherit statement) {
+ final Str filename = statement.getFilename();
+ out.print("$(call inherit-product " + filename + ")");
+ if (mWriteAnnotations) {
+ out.print(" # " + filename.getPosition());
+ }
+ out.println();
+ }
+
+ private static class Var {
+ Var(String name, Str val) {
+ this.name = name;
+ this.val = val;
+ }
+ final String name;
+ final Str val;
+ }
+
+ private static void writeStrVars(PrintStream out, Map<String, Str> vars, ConfigBase config) {
+ // Sort by file name and var name
+ TreeMap<String, Var> sorted = new TreeMap();
+ for (Map.Entry<String, Str> entry: vars.entrySet()) {
+ sorted.put(entry.getValue().getPosition().toString() + " " + entry.getKey(),
+ new Var(entry.getKey(), entry.getValue()));
+ }
+ // Print it
+ for (Var var: sorted.values()) {
+ out.println(var.val.getPosition() + var.name + " := " + var.val);
+ }
+ }
+
+ private void writeFlat(PrintStream out, FlatConfig config) {
+ // TODO: Print positions.
+ for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+ out.print(entry.getKey());
+ out.print(" := ");
+
+ final Value value = entry.getValue();
+ if (value.getVarType() == VarType.LIST) {
+ final List<Str> list = value.getList();
+ final int size = list.size();
+ for (int i = 0; i < size; i++) {
+ out.print(list.get(i).toString());
+ if (i != size - 1) {
+ out.print(" \\\n ");
+ }
+ }
+ } else {
+ out.print(value.getStr().toString());
+ }
+ out.println();
+ }
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Options.java b/tools/product_config/src/com/android/build/config/Options.java
index 494b947..ed544dc 100644
--- a/tools/product_config/src/com/android/build/config/Options.java
+++ b/tools/product_config/src/com/android/build/config/Options.java
@@ -17,6 +17,7 @@
package com.android.build.config;
import java.io.PrintStream;
+import java.util.Map;
import java.util.TreeMap;
public class Options {
@@ -27,19 +28,50 @@
private Action mAction = Action.DEFAULT;
+ private String mProduct;
+ private String mVariant;
+ private String mOutDir;
+ private String mCKatiBin;
+
public Action getAction() {
return mAction;
}
+ public String getProduct() {
+ return mProduct;
+ }
+
+ public String getVariant() {
+ return mVariant;
+ }
+
+ public String getOutDir() {
+ return mOutDir != null ? mOutDir : "out";
+ }
+
+ public String getCKatiBin() {
+ return mCKatiBin;
+ }
+
public static void printHelp(PrintStream out) {
out.println("usage: product_config");
out.println();
- out.println("OPTIONS");
+ out.println("REQUIRED FLAGS");
+ out.println(" --ckati_bin CKATI Kati binary to use.");
+ out.println();
+ out.println("OPTIONAL FLAGS");
out.println(" --hide ERROR_ID Suppress this error.");
out.println(" --error ERROR_ID Make this ERROR_ID a fatal error.");
out.println(" --help -h This message.");
out.println(" --warning ERROR_ID Make this ERROR_ID a warning.");
out.println();
+ out.println("REQUIRED ENVIRONMENT");
+ out.println(" TARGET_PRODUCT Product to build from lunch command.");
+ out.println(" TARGET_BUILD_VARIANT Build variant from lunch command.");
+ out.println();
+ out.println("OPTIONAL ENVIRONMENT");
+ out.println(" OUT_DIR Build output directory. Defaults to \"out\".");
+ out.println();
out.println("ERRORS");
out.println(" The following are the errors that can be controlled on the");
out.println(" commandline with the --hide --warning --error flags.");
@@ -55,7 +87,7 @@
}
static class Parser {
- private class ParseException extends Exception {
+ private static class ParseException extends Exception {
public ParseException(String message) {
super(message);
}
@@ -63,20 +95,26 @@
private Errors mErrors;
private String[] mArgs;
+ private Map<String,String> mEnv;
private Options mResult = new Options();
private int mIndex;
+ private boolean mSkipRequiredArgValidation;
- public Parser(Errors errors, String[] args) {
+ public Parser(Errors errors, String[] args, Map<String,String> env) {
mErrors = errors;
mArgs = args;
+ mEnv = env;
}
public Options parse() {
+ // Args
try {
while (mIndex < mArgs.length) {
final String arg = mArgs[mIndex];
- if ("--hide".equals(arg)) {
+ if ("--ckati_bin".equals(arg)) {
+ mResult.mCKatiBin = requireNextStringArg(arg);
+ } else if ("--hide".equals(arg)) {
handleErrorCode(arg, Errors.Level.HIDDEN);
} else if ("--error".equals(arg)) {
handleErrorCode(arg, Errors.Level.ERROR);
@@ -96,14 +134,48 @@
mIndex++;
}
} catch (ParseException ex) {
- mErrors.add(mErrors.ERROR_COMMAND_LINE, ex.getMessage());
+ mErrors.ERROR_COMMAND_LINE.add(ex.getMessage());
}
+ // Environment
+ mResult.mProduct = mEnv.get("TARGET_PRODUCT");
+ mResult.mVariant = mEnv.get("TARGET_BUILD_VARIANT");
+ mResult.mOutDir = mEnv.get("OUT_DIR");
+
+ validateArgs();
+
return mResult;
}
- private void addWarning(Errors.Category category, String message) {
- mErrors.add(category, message);
+ /**
+ * For testing; don't generate errors about missing arguments
+ */
+ public void setSkipRequiredArgValidation() {
+ mSkipRequiredArgValidation = true;
+ }
+
+ private void validateArgs() {
+ if (!mSkipRequiredArgValidation) {
+ if (mResult.mCKatiBin == null || "".equals(mResult.mCKatiBin)) {
+ addMissingArgError("--ckati_bin");
+ }
+ if (mResult.mProduct == null) {
+ addMissingEnvError("TARGET_PRODUCT");
+ }
+ if (mResult.mVariant == null) {
+ addMissingEnvError("TARGET_BUILD_VARIANT");
+ }
+ }
+ }
+
+ private void addMissingArgError(String argName) {
+ mErrors.ERROR_COMMAND_LINE.add("Required command line argument missing: "
+ + argName);
+ }
+
+ private void addMissingEnvError(String envName) {
+ mErrors.ERROR_COMMAND_LINE.add("Required environment variable missing: "
+ + envName);
}
private String getNextNonFlagArg() {
@@ -117,6 +189,14 @@
return mArgs[mIndex];
}
+ private String requireNextStringArg(String arg) throws ParseException {
+ final String val = getNextNonFlagArg();
+ if (val == null) {
+ throw new ParseException(arg + " requires a string argument.");
+ }
+ return val;
+ }
+
private int requireNextNumberArg(String arg) throws ParseException {
final String val = getNextNonFlagArg();
if (val == null) {
@@ -133,12 +213,11 @@
final int code = requireNextNumberArg(arg);
final Errors.Category category = mErrors.getCategories().get(code);
if (category == null) {
- mErrors.add(mErrors.WARNING_UNKNOWN_COMMAND_LINE_ERROR,
- "Unknown error code: " + code);
+ mErrors.WARNING_UNKNOWN_COMMAND_LINE_ERROR.add("Unknown error code: " + code);
return;
}
if (!category.isLevelSettable()) {
- mErrors.add(mErrors.ERROR_COMMAND_LINE, "Can't set level for error " + code);
+ mErrors.ERROR_COMMAND_LINE.add("Can't set level for error " + code);
return;
}
category.setLevel(level);
@@ -152,7 +231,7 @@
* <p>
* Adds errors encountered to Errors object.
*/
- public static Options parse(Errors errors, String[] args) {
- return (new Parser(errors, args)).parse();
+ public static Options parse(Errors errors, String[] args, Map<String, String> env) {
+ return (new Parser(errors, args, env)).parse();
}
}
diff --git a/tools/product_config/src/com/android/build/config/OutputChecker.java b/tools/product_config/src/com/android/build/config/OutputChecker.java
new file mode 100644
index 0000000..d982dba
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/OutputChecker.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Compares the make-based configuration as reported by dumpconfig.mk
+ * with what was computed from the new tool.
+ */
+public class OutputChecker {
+ // Differences that we know about, either know issues to be fixed or intentional.
+ private static final RegexSet IGNORED_VARIABLES = new RegexSet(
+ // TODO: Rewrite the enforce packages exist logic into this tool.
+ "PRODUCT_ENFORCE_PACKAGES_EXIST",
+ "PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+ "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST",
+ "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+
+ // This is generated by this tool, but comes later in the make build system.
+ "INTERNAL_PRODUCT",
+
+ // This can be set temporarily by product_config.mk
+ ".KATI_ALLOW_RULES"
+ );
+
+ private final FlatConfig mConfig;
+ private final TreeMap<String, Variable> mVariables;
+
+ /**
+ * Represents the before and after state of a variable.
+ */
+ public static class Variable {
+ public final String name;
+ public final VarType type;
+ public final Str original;
+ public final Value updated;
+
+ public Variable(String name, VarType type, Str original) {
+ this(name, type, original, null);
+ }
+
+ public Variable(String name, VarType type, Str original, Value updated) {
+ this.name = name;
+ this.type = type;
+ this.original = original;
+ this.updated = updated;
+ }
+
+ /**
+ * Return copy of this Variable with the updated field also set.
+ */
+ public Variable addUpdated(Value updated) {
+ return new Variable(name, type, original, updated);
+ }
+
+ /**
+ * Return whether normalizedOriginal and normalizedUpdate are equal.
+ */
+ public boolean isSame() {
+ final Str normalizedOriginal = Value.normalize(original);
+ final Str normalizedUpdated = Value.normalize(updated);
+ if (normalizedOriginal == null && normalizedUpdated == null) {
+ return true;
+ } else if (normalizedOriginal != null) {
+ return normalizedOriginal.equals(normalizedUpdated);
+ } else {
+ return false;
+ }
+ }
+ }
+
+ /**
+ * Construct OutputChecker with the config it will check.
+ */
+ public OutputChecker(FlatConfig config) {
+ mConfig = config;
+ mVariables = getVariables(config);
+ }
+
+ /**
+ * Add a WARNING_DIFFERENT_FROM_KATI for each of the variables which have changed.
+ */
+ public void reportErrors(Errors errors) {
+ for (Variable var: getDifferences()) {
+ if (IGNORED_VARIABLES.matches(var.name)) {
+ continue;
+ }
+ errors.WARNING_DIFFERENT_FROM_KATI.add("product_config processing differs from"
+ + " kati processing for " + var.type + " variable " + var.name + ".\n"
+ + "original: "
+ + Value.oneLinePerWord(var.original, "<null>") + "\n"
+ + "updated: "
+ + Value.oneLinePerWord(var.updated, "<null>"));
+ }
+ }
+
+ /**
+ * Get the Variables that are different between the normalized form of the original
+ * and updated. If one is null and the other is not, even if one is an empty string,
+ * the values are considered different.
+ */
+ public List<Variable> getDifferences() {
+ final ArrayList<Variable> result = new ArrayList();
+ for (Variable var: mVariables.values()) {
+ if (!var.isSame()) {
+ result.add(var);
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Get all of the variables for this config.
+ *
+ * VisibleForTesting
+ */
+ static TreeMap<String, Variable> getVariables(FlatConfig config) {
+ final TreeMap<String, Variable> result = new TreeMap();
+
+ // Add the original values to mAll
+ for (Map.Entry<String, Str> entry: getModifiedVars(config.getInitialVariables(),
+ config.getFinalVariables()).entrySet()) {
+ final String name = entry.getKey();
+ result.put(name, new Variable(name, config.getVarType(name), entry.getValue()));
+ }
+
+ // Add the updated values to mAll
+ for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+ final String name = entry.getKey();
+ final Value value = entry.getValue();
+ Variable var = result.get(name);
+ if (var == null) {
+ result.put(name, new Variable(name, config.getVarType(name), null, value));
+ } else {
+ result.put(name, var.addUpdated(value));
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Get the entries that are different in the two maps.
+ */
+ public static Map<String, Str> getModifiedVars(Map<String, Str> before,
+ Map<String, Str> after) {
+ final HashMap<String, Str> result = new HashMap();
+
+ // Entries that were added or changed.
+ for (Map.Entry<String, Str> afterEntry: after.entrySet()) {
+ final String varName = afterEntry.getKey();
+ final Str afterValue = afterEntry.getValue();
+ final Str beforeValue = before.get(varName);
+ if (beforeValue == null || !beforeValue.equals(afterValue)) {
+ result.put(varName, afterValue);
+ }
+ }
+
+ // removed Entries that were removed, we just treat them as empty string
+ for (Map.Entry<String, Str> beforeEntry: before.entrySet()) {
+ final String varName = beforeEntry.getKey();
+ if (!after.containsKey(varName)) {
+ result.put(varName, new Str(""));
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Position.java b/tools/product_config/src/com/android/build/config/Position.java
index 7953942..266021d 100644
--- a/tools/product_config/src/com/android/build/config/Position.java
+++ b/tools/product_config/src/com/android/build/config/Position.java
@@ -16,6 +16,9 @@
package com.android.build.config;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
/**
* Position in a source file.
*/
@@ -25,6 +28,9 @@
*/
public static final int NO_LINE = -1;
+ private static final Pattern REGEX = Pattern.compile("([^:]*)(?::(\\d)*)?:?\\s*");
+ public static final String UNKNOWN = "<unknown>";
+
private final String mFile;
private final int mLine;
@@ -63,12 +69,39 @@
return mLine;
}
+ /**
+ * Return a Position object from a string containing <filename>:<line>, or the default
+ * Position(null, NO_LINE) if the string can't be parsed.
+ */
+ public static Position parse(String str) {
+ final Matcher m = REGEX.matcher(str);
+ if (!m.matches()) {
+ return new Position();
+ }
+ String filename = m.group(1);
+ if (filename.length() == 0 || UNKNOWN.equals(filename)) {
+ filename = null;
+ }
+ String lineString = m.group(2);
+ int line;
+ if (lineString == null || lineString.length() == 0) {
+ line = NO_LINE;
+ } else {
+ try {
+ line = Integer.parseInt(lineString);
+ } catch (NumberFormatException ex) {
+ line = NO_LINE;
+ }
+ }
+ return new Position(filename, line);
+ }
+
@Override
public String toString() {
if (mFile == null && mLine == NO_LINE) {
return "";
} else if (mFile == null && mLine != NO_LINE) {
- return "<unknown>:" + mLine + ": ";
+ return UNKNOWN + ":" + mLine + ": ";
} else if (mFile != null && mLine == NO_LINE) {
return mFile + ": ";
} else { // if (mFile != null && mLine != NO_LINE)
diff --git a/tools/product_config/src/com/android/build/config/RegexSet.java b/tools/product_config/src/com/android/build/config/RegexSet.java
new file mode 100644
index 0000000..70fcd29
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/RegexSet.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.regex.Pattern;
+
+/**
+ * Returns whether a string matches one of a set of presupplied regexes.
+ */
+public class RegexSet {
+ private final Pattern[] mPatterns;
+
+ public RegexSet(String... patterns) {
+ mPatterns = new Pattern[patterns.length];
+ for (int i = 0; i < patterns.length; i++) {
+ mPatterns[i] = Pattern.compile(patterns[i]);
+ }
+ }
+
+ public boolean matches(String s) {
+ for (Pattern p: mPatterns) {
+ if (p.matcher(s).matches()) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/Str.java b/tools/product_config/src/com/android/build/config/Str.java
new file mode 100644
index 0000000..2516b76
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Str.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A String and a Position, where it came from in source code.
+ */
+public class Str implements Comparable<Str> {
+ private String mValue;
+ private Position mPosition;
+
+ public Str(String s) {
+ mValue = s;
+ mPosition = new Position();
+ }
+
+ public Str(Position pos, String s) {
+ mValue = s;
+ mPosition = pos;
+ }
+
+ public int length() {
+ return mValue.length();
+ }
+
+ @Override
+ public String toString() {
+ return mValue;
+ }
+
+ public Position getPosition() {
+ return mPosition;
+ }
+
+ /**
+ * Str is equal if the string value is equal, regardless of whether the position
+ * is the same.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof Str)) {
+ return false;
+ }
+ final Str that = (Str)o;
+ return mValue.equals(that.mValue);
+ }
+
+ @Override
+ public int hashCode() {
+ return mValue.hashCode();
+ }
+
+ @Override
+ public int compareTo(Str that) {
+ return this.mValue.compareTo(that.mValue);
+ }
+
+ public static ArrayList<Str> toList(Position pos, List<String> list) {
+ final ArrayList<Str> result = new ArrayList(list.size());
+ for (String s: list) {
+ result.add(new Str(pos, s));
+ }
+ return result;
+ }
+}
diff --git a/tools/product_config/src/com/android/build/config/Value.java b/tools/product_config/src/com/android/build/config/Value.java
new file mode 100644
index 0000000..9bd6401
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Value.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+/**
+ * Class to hold the two types of variables we support, strings and lists of strings.
+ */
+public class Value {
+ private static final Pattern SPACES = Pattern.compile("\\s+");
+
+ private final VarType mVarType;
+ private final Str mStr;
+ private final ArrayList<Str> mList;
+
+ /**
+ * Construct an appropriately typed empty value.
+ */
+ public Value(VarType varType) {
+ mVarType = varType;
+ if (varType == VarType.LIST) {
+ mStr = null;
+ mList = new ArrayList();
+ mList.add(new Str(""));
+ } else {
+ mStr = new Str("");
+ mList = null;
+ }
+ }
+
+ public Value(VarType varType, Str str) {
+ mVarType = varType;
+ mStr = str;
+ mList = null;
+ }
+
+ public Value(List<Str> list) {
+ mVarType = VarType.LIST;
+ mStr = null;
+ mList = new ArrayList(list);
+ }
+
+ public VarType getVarType() {
+ return mVarType;
+ }
+
+ public Str getStr() {
+ return mStr;
+ }
+
+ public List<Str> getList() {
+ return mList;
+ }
+
+ /**
+ * Normalize a string that is behaving as a list.
+ */
+ public static String normalize(String str) {
+ if (str == null) {
+ return null;
+ }
+ return SPACES.matcher(str.trim()).replaceAll(" ").trim();
+ }
+
+ /**
+ * Normalize a string that is behaving as a list.
+ */
+ public static Str normalize(Str str) {
+ if (str == null) {
+ return null;
+ }
+ return new Str(str.getPosition(), normalize(str.toString()));
+ }
+
+ /**
+ * Normalize a this Value into the same format as normalize(Str).
+ */
+ public static Str normalize(Value val) {
+ if (val == null) {
+ return null;
+ }
+ if (val.mStr != null) {
+ return normalize(val.mStr);
+ }
+
+ if (val.mList.size() == 0) {
+ return new Str("");
+ }
+
+ StringBuilder result = new StringBuilder();
+ final int size = val.mList.size();
+ boolean first = true;
+ for (int i = 0; i < size; i++) {
+ String s = val.mList.get(i).toString().trim();
+ if (s.length() > 0) {
+ if (!first) {
+ result.append(" ");
+ } else {
+ first = false;
+ }
+ result.append(s);
+ }
+ }
+
+ // Just use the first item's position.
+ return new Str(val.mList.get(0).getPosition(), result.toString());
+ }
+
+ /**
+ * Put each word in 'str' on its own line in make format. If 'val' is null,
+ * 'nullValue' is returned.
+ */
+ public static String oneLinePerWord(Value val, String nullValue) {
+ if (val == null) {
+ return nullValue;
+ }
+ final String s = normalize(val).toString();
+ final Matcher m = SPACES.matcher(s);
+ final StringBuilder result = new StringBuilder();
+ if (s.length() > 0 && (val.mVarType == VarType.LIST || m.find())) {
+ result.append("\\\n ");
+ }
+ result.append(m.replaceAll(" \\\\\n "));
+ return result.toString();
+ }
+
+ /**
+ * Put each word in 'str' on its own line in make format. If 'str' is null,
+ * nullValue is returned.
+ */
+ public static String oneLinePerWord(Str str, String nullValue) {
+ if (str == null) {
+ return nullValue;
+ }
+ final Matcher m = SPACES.matcher(normalize(str.toString()));
+ final StringBuilder result = new StringBuilder();
+ if (m.find()) {
+ result.append("\\\n ");
+ }
+ result.append(m.replaceAll(" \\\\\n "));
+ return result.toString();
+ }
+
+ /**
+ * Return a string representing this value with detailed debugging information.
+ */
+ public static String debugString(Value val) {
+ if (val == null) {
+ return "null";
+ }
+
+ final StringBuilder str = new StringBuilder("Value(");
+ if (val.mStr != null) {
+ str.append("mStr=");
+ str.append("\"");
+ str.append(val.mStr.toString());
+ str.append("\"");
+ if (false) {
+ str.append(" (");
+ str.append(val.mStr.getPosition().toString());
+ str.append(")");
+ }
+ }
+ if (val.mList != null) {
+ str.append("mList=");
+ str.append("[");
+ for (Str s: val.mList) {
+ str.append(" \"");
+ str.append(s.toString());
+ if (false) {
+ str.append("\" (");
+ str.append(s.getPosition().toString());
+ str.append(")");
+ } else {
+ str.append("\"");
+ }
+ }
+ str.append(" ]");
+ }
+ str.append(")");
+ return str.toString();
+ }
+
+ /**
+ * Get the Positions of all of the parts of this Value.
+ */
+ public List<Position> getPositions() {
+ List<Position> result = new ArrayList();
+ if (mStr != null) {
+ result.add(mStr.getPosition());
+ }
+ if (mList != null) {
+ for (Str str: mList) {
+ result.add(str.getPosition());
+ }
+ }
+ return result;
+ }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/VarType.java b/tools/product_config/src/com/android/build/config/VarType.java
new file mode 100644
index 0000000..43e9366
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/VarType.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Whether a product config variable is a list or single-value variable.
+ */
+public enum VarType {
+ /**
+ * A product config variable that is a list of space separated strings.
+ * These are defined by _product_single_value_vars in product.mk.
+ */
+ LIST,
+
+ /**
+ * A product config varaible that is a single string.
+ * These are defined by _product_list_vars in product.mk.
+ */
+ SINGLE,
+
+ /**
+ * A variable that is given the special product config handling but is
+ * nonetheless defined by product config makefiles.
+ */
+ UNKNOWN
+}
+
diff --git a/tools/product_config/test.sh b/tools/product_config/test.sh
new file mode 100755
index 0000000..ee9ed5c
--- /dev/null
+++ b/tools/product_config/test.sh
@@ -0,0 +1,120 @@
+#!/bin/bash
+
+#
+# This script runs the full set of tests for product config:
+# 1. Build the product-config tool.
+# 2. Run the unit tests.
+# 3. Run the product config for every product available in the current
+# source tree, for each of user, userdebug and eng.
+# - To restrict which products or variants are run, set the
+# PRODUCTS or VARIANTS environment variables.
+# - Products for which the make based product config fails are
+# skipped.
+#
+
+# The PRODUCTS variable is used by the build, and setting it in the environment
+# interferes with that, so unset it. (That should probably be fixed)
+products=$PRODUCTS
+variants=$VARIANTS
+unset PRODUCTS
+unset VARIANTS
+
+# Don't use lunch from the user's shell
+unset TARGET_PRODUCT
+unset TARGET_BUILD_VARIANT
+
+function die() {
+ format=$1
+ shift
+ printf "$format\nStopping...\n" $@ >&2
+ exit 1;
+}
+
+[[ -f build/make/envsetup.sh ]] || die "Run this script from the root of the tree."
+: ${products:=$(build/soong/soong_ui.bash --dumpvar-mode all_named_products | sed -e "s/ /\n/g" | sort -u )}
+: ${variants:="user userdebug eng"}
+: ${CKATI_BIN:=prebuilts/build-tools/$(build/soong/soong_ui.bash --dumpvar-mode HOST_PREBUILT_TAG)/bin/ckati}
+
+function if_signal_exit() {
+ [[ $1 -lt 128 ]] || exit $1
+}
+
+build/soong/soong_ui.bash --build-mode --all-modules --dir="$(pwd)" product-config-test product-config \
+ || die "Build failed."
+
+echo
+echo Running unit tests
+java -jar out/host/linux-x86/testcases/product-config-test/product-config-test.jar
+unit_tests=$?
+if_signal_exit $unit_tests
+
+failed_baseline_checks=
+for product in $products ; do
+ for variant in $variants ; do
+ echo
+ echo "Checking: lunch $product-$variant"
+
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ echo "*** Combo fails with make, skipping product-config test run for $product-$variant"
+ else
+ rm -rf out/config/$product-$variant
+ TARGET_PRODUCT=$product TARGET_BUILD_VARIANT=$variant product-config \
+ --ckati_bin $CKATI_BIN \
+ --error 1000
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ failed_baseline_checks="$failed_baseline_checks $product-$variant"
+ fi
+ if [ "$CHECK_FOR_RULES" != "" ] ; then
+ # This is a little bit of sleight of hand for good output formatting at the
+ # expense of speed. We've already run the command once without
+ # ALLOW_RULES_IN_PRODUCT_CONFIG, so we know it passes there. We run it again
+ # with ALLOW_RULES_IN_PRODUCT_CONFIG=error to see if it fails, but that will
+ # cause it to only print the first error. But we want to see all of them,
+ # so if it fails we run it a third time with ALLOW_RULES_IN_PRODUCT_CONFIG=warning,
+ # so we can see all the warnings.
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ ALLOW_RULES_IN_PRODUCT_CONFIG=error \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+ exit_status=$?
+ if_signal_exit $exit_status
+ if [ $exit_status -ne 0 ] ; then
+ TARGET_PRODUCT=$product \
+ TARGET_BUILD_VARIANT=$variant \
+ ALLOW_RULES_IN_PRODUCT_CONFIG=warning \
+ build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT > /dev/null
+ failed_rule_checks="$failed_rule_checks $product-$variant"
+ fi
+ fi
+ fi
+ done
+done
+
+echo
+echo
+echo "------------------------------"
+echo SUMMARY
+echo "------------------------------"
+
+echo -n "Unit tests "
+if [ $unit_tests -eq 0 ] ; then echo PASSED ; else echo FAILED ; fi
+
+echo -n "Baseline checks "
+if [ "$failed_baseline_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_baseline_checks ; do
+ echo " ... $combo"
+done
+
+echo -n "Rules checks "
+if [ "$failed_rule_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_rule_checks ; do
+ echo " ... $combo"
+done
+
diff --git a/tools/product_config/test/com/android/build/config/CsvParserTest.java b/tools/product_config/test/com/android/build/config/CsvParserTest.java
new file mode 100644
index 0000000..6f38d68
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/CsvParserTest.java
@@ -0,0 +1,148 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.StringReader;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test for CSV parser class.
+ */
+public class CsvParserTest {
+ public String listsToStrings(String[] expected, List<String> actual) {
+ return "expected=" + Arrays.toString(expected)
+ + " actual=" + Arrays.toString(actual.toArray());
+ }
+
+ public void assertLineEquals(CsvParser.Line actual, int lineno, String... fields) {
+ if (actual.getLine() != lineno) {
+ throw new RuntimeException("lineno mismatch: expected=" + lineno
+ + " actual=" + actual.getLine());
+ }
+ if (fields.length != actual.getFields().size()) {
+ throw new RuntimeException("getFields().size() mismatch: expected=" + fields.length
+ + " actual=" + actual.getFields().size()
+ + " values: " + listsToStrings(fields, actual.getFields()));
+ }
+ for (int i = 0; i < fields.length; i++) {
+ if (!fields[i].equals(actual.getFields().get(i))) {
+ throw new RuntimeException("getFields().get(" + i + ") mismatch: expected="
+ + fields[i] + " actual=" + actual.getFields().get(i)
+ + " values: " + listsToStrings(fields, actual.getFields()));
+
+ }
+ }
+ }
+
+ @Test
+ public void testEmptyString() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ ""));
+
+ Assert.assertEquals(0, lines.size());
+ }
+
+ @Test
+ public void testLexerOneCharacter() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a");
+ }
+
+ @Test
+ public void testLexerTwoFieldsNoNewline() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testLexerTwoFieldsNewline() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b\n"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testEndsWithTwoNewlines() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "a,b\n\n"));
+
+ Assert.assertEquals(1, lines.size());
+ assertLineEquals(lines.get(0), 1, "a", "b");
+ }
+
+ @Test
+ public void testOnlyNewlines() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\n\n\n\n"));
+
+ Assert.assertEquals(0, lines.size());
+ }
+
+
+ @Test
+ public void testLexerComplex() throws Exception {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ ",\"ab\"\"\nc\",,de\n"
+ + "fg,\n"
+ + "\n"
+ + ",\n"
+ + "hijk"));
+
+ Assert.assertEquals(4, lines.size());
+ assertLineEquals(lines.get(0), 2, "", "ab\"\nc", "", "de");
+ assertLineEquals(lines.get(1), 3, "fg", "");
+ assertLineEquals(lines.get(2), 5, "", "");
+ assertLineEquals(lines.get(3), 6, "hijk");
+ }
+
+ @Test
+ public void testEndInsideQuoted() throws Exception {
+ try {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\"asd"));
+ throw new RuntimeException("Didn't throw ParseException");
+ } catch (CsvParser.ParseException ex) {
+ System.out.println("Caught: " + ex);
+ }
+ }
+
+ @Test
+ public void testCharacterAfterQuotedField() throws Exception {
+ try {
+ List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+ "\"\"a"));
+ throw new RuntimeException("Didn't throw ParseException");
+ } catch (CsvParser.ParseException ex) {
+ System.out.println("Caught: " + ex);
+ }
+ }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/ErrorReporterTest.java b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
index 2cde476..b9b25b4 100644
--- a/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
+++ b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
@@ -30,7 +30,7 @@
public void testAdding() {
TestErrors errors = new TestErrors();
- errors.add(errors.ERROR, new Position("a", 12), "Errrororrrr");
+ errors.ERROR.add(new Position("a", 12), "Errrororrrr");
Assert.assertTrue(errors.hadWarningOrError());
Assert.assertTrue(errors.hadError());
@@ -66,7 +66,7 @@
public void testWarning() {
TestErrors errors = new TestErrors();
- errors.add(errors.WARNING, "Waaaaarninggggg");
+ errors.WARNING.add("Waaaaarninggggg");
Assert.assertTrue(errors.hadWarningOrError());
Assert.assertFalse(errors.hadError());
@@ -80,7 +80,7 @@
public void testHidden() {
TestErrors errors = new TestErrors();
- errors.add(errors.HIDDEN, "Hidddeennn");
+ errors.HIDDEN.add("Hidddeennn");
Assert.assertFalse(errors.hadWarningOrError());
Assert.assertFalse(errors.hadError());
diff --git a/tools/product_config/test/com/android/build/config/OptionsTest.java b/tools/product_config/test/com/android/build/config/OptionsTest.java
index 2c36322..459efa5 100644
--- a/tools/product_config/test/com/android/build/config/OptionsTest.java
+++ b/tools/product_config/test/com/android/build/config/OptionsTest.java
@@ -19,12 +19,24 @@
import org.junit.Assert;
import org.junit.Test;
+import java.util.HashMap;
+
public class OptionsTest {
+
+ private Options parse(Errors errors, String[] args) {
+ final HashMap<String, String> env = new HashMap();
+ env.put("TARGET_PRODUCT", "test_product");
+ env.put("TARGET_BUILD_VARIANT", "user");
+ final Options.Parser parser = new Options.Parser(errors, args, env);
+ parser.setSkipRequiredArgValidation();
+ return parser.parse();
+ }
+
@Test
public void testErrorMissingLast() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error"
});
@@ -37,7 +49,7 @@
public void testErrorMissingNotLast() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "--warning", "2"
});
@@ -50,7 +62,7 @@
public void testErrorNotNumeric() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "notgood"
});
@@ -63,7 +75,7 @@
public void testErrorInvalidError() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "50000"
});
@@ -76,7 +88,7 @@
public void testErrorOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--error", "2"
});
@@ -89,7 +101,7 @@
public void testWarningOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--warning", "2"
});
@@ -102,7 +114,7 @@
public void testHideOne() {
final Errors errors = new Errors();
- final Options options = Options.parse(errors, new String[] {
+ final Options options = parse(errors, new String[] {
"--hide", "2"
});
@@ -110,5 +122,16 @@
Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
Assert.assertFalse(errors.hadWarningOrError());
}
+
+ @Test
+ public void testEnv() {
+ final Errors errors = new Errors();
+
+ final Options options = parse(errors, new String[0]);
+
+ Assert.assertEquals("test_product", options.getProduct());
+ Assert.assertEquals("user", options.getVariant());
+ Assert.assertFalse(errors.hadWarningOrError());
+ }
}
diff --git a/tools/product_config/test/com/android/build/config/PositionTest.java b/tools/product_config/test/com/android/build/config/PositionTest.java
new file mode 100644
index 0000000..82b5dd4
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/PositionTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+
+public class PositionTest {
+
+ @Test
+ public void testParseEmpty() {
+ final Position pos = Position.parse("");
+
+ Assert.assertEquals(null, pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseOnlyFile() {
+ final Position pos = Position.parse("asdf");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseBoth() {
+ final Position pos = Position.parse("asdf:1");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(1, pos.getLine());
+ }
+
+ @Test
+ public void testParseEndsWithColon() {
+ final Position pos = Position.parse("asdf:");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+ @Test
+ public void testParseEndsWithSpace() {
+ final Position pos = Position.parse("asdf: ");
+
+ Assert.assertEquals("asdf", pos.getFile());
+ Assert.assertEquals(Position.NO_LINE, pos.getLine());
+ }
+
+
+}
+
diff --git a/tools/product_config/test/com/android/build/config/TestRunner.java b/tools/product_config/test/com/android/build/config/TestRunner.java
new file mode 100644
index 0000000..546518f
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/TestRunner.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.runner.Description;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+import org.junit.runner.notification.RunListener;
+
+public class TestRunner {
+ public static void main(String[] args) {
+ JUnitCore junit = new JUnitCore();
+
+ junit.addListener(new RunListener() {
+ @Override
+ public void testStarted(Description description) {
+ System.out.println("\nSTARTING: " + description.getDisplayName());
+ }
+
+ @Override
+ public void testFailure(Failure failure) {
+ System.out.println("FAILED: "
+ + failure.getDescription().getDisplayName());
+ System.out.println(failure.getTrace());
+ }
+ });
+ Result result = junit.run(CsvParserTest.class,
+ ErrorReporterTest.class,
+ OptionsTest.class,
+ PositionTest.class);
+ if (!result.wasSuccessful()) {
+ System.out.println("\n*** FAILED ***");
+ }
+ }
+}
+
diff --git a/tools/rbcrun/Android.bp b/tools/rbcrun/Android.bp
new file mode 100644
index 0000000..90173ac
--- /dev/null
+++ b/tools/rbcrun/Android.bp
@@ -0,0 +1,40 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+blueprint_go_binary {
+ name: "rbcrun",
+ srcs: ["cmd/rbcrun.go"],
+ deps: ["rbcrun-module"],
+}
+
+bootstrap_go_package {
+ name: "rbcrun-module",
+ srcs: [
+ "host.go",
+ ],
+ testSrcs: [
+ "host_test.go",
+ ],
+ pkgPath: "rbcrun",
+ deps: [
+ "go-starlark-starlark",
+ "go-starlark-starlarkstruct",
+ "go-starlark-starlarktest",
+ ],
+}
diff --git a/tools/rbcrun/README.md b/tools/rbcrun/README.md
new file mode 100644
index 0000000..fb58c89
--- /dev/null
+++ b/tools/rbcrun/README.md
@@ -0,0 +1,84 @@
+# Roboleaf configuration files interpreter
+
+Reads and executes Roboleaf product configuration files.
+
+## Usage
+
+`rbcrun` *options* *VAR=value*... [ *file* ]
+
+A Roboleaf configuration file is a Starlark script. Usually it is read from *file*. The option `-c` allows to provide a
+script directly on the command line. The option `-f` is there to allow the name of a file script to contain (`=`).
+(i.e., `my=file.rbc` sets `my` to `file.rbc`, `-f my=file.rbc` runs the script from `my=file.rbc`).
+
+### Options
+
+`-d` *dir*\
+Root directory for load("//path",...)
+
+`-c` *text*\
+Read script from *text*
+
+`--perf` *file*\
+Gather performance statistics and save it to *file*. Use \
+` go tool prof -top`*file*\
+to show top CPU users
+
+`-f` *file*\
+File to run.
+
+## Extensions
+
+The runner allows Starlark scripts to use the following features that Bazel's Starlark interpreter does not support:
+
+### Load statement URI
+
+Starlark does not define the format of the load statement's first argument.
+The Roboleaf configuration interpreter supports the format that Bazel uses
+(`":file"` or `"//path:file"`). In addition, it allows the URI to end with
+`"|symbol"` which defines a single variable `symbol` with `None` value if a
+module does not exist. Thus,
+
+```
+load(":mymodule.rbc|init", mymodule_init="init")
+```
+
+will load the module `mymodule.rbc` and export a symbol `init` in it as
+`mymodule_init` if `mymodule.rbc` exists. If `mymodule.rbc` is missing,
+`mymodule_init` will be set to `None`
+
+### Predefined Symbols
+
+#### rblf_env
+
+A `struct` containing environment variables. E.g., `rblf_env.USER` is the username when running on Unix.
+
+#### rblf_cli
+
+A `struct` containing the variable set by the interpreter's command line. That is, running
+
+```
+rbcrun FOO=bar myfile.rbc
+```
+
+will have the value of `rblf_cli.FOO` be `"bar"`
+
+### Predefined Functions
+
+#### rblf_file_exists(*file*)
+
+Returns `True` if *file* exists
+
+#### rblf_wildcard(*glob*, *top* = None)
+
+Expands *glob*. If *top* is supplied, expands "*top*/*glob*", then removes
+"*top*/" prefix from the matching file names.
+
+#### rblf_regex(*pattern*, *text*)
+
+Returns *True* if *text* matches *pattern*.
+
+#### rblf_shell(*command*)
+
+Runs `sh -c "`*command*`"`, reads its output, converts all newlines into spaces, chops trailing newline returns this
+string. This is equivalent to Make's
+`shell` builtin function. *This function will be eventually removed*.
diff --git a/tools/rbcrun/cmd/rbcrun.go b/tools/rbcrun/cmd/rbcrun.go
new file mode 100644
index 0000000..7848562
--- /dev/null
+++ b/tools/rbcrun/cmd/rbcrun.go
@@ -0,0 +1,98 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "go.starlark.net/starlark"
+ "os"
+ "rbcrun"
+ "strings"
+)
+
+var (
+ execprog = flag.String("c", "", "execute program `prog`")
+ rootdir = flag.String("d", ".", "the value of // for load paths")
+ file = flag.String("f", "", "file to execute")
+ perfFile = flag.String("perf", "", "save performance data")
+)
+
+func main() {
+ flag.Parse()
+ filename := *file
+ var src interface{}
+ var env []string
+
+ rc := 0
+ for _, arg := range flag.Args() {
+ if strings.Contains(arg, "=") {
+ env = append(env, arg)
+ } else if filename == "" {
+ filename = arg
+ } else {
+ quit("only one file can be executed\n")
+ }
+ }
+ if *execprog != "" {
+ if filename != "" {
+ quit("either -c or file name should be present\n")
+ }
+ filename = "<cmdline>"
+ src = *execprog
+ }
+ if filename == "" {
+ if len(env) > 0 {
+ fmt.Fprintln(os.Stderr,
+ "no file to run -- if your file's name contains '=', use -f to specify it")
+ }
+ flag.Usage()
+ os.Exit(1)
+ }
+ if stat, err := os.Stat(*rootdir); os.IsNotExist(err) || !stat.IsDir() {
+ quit("%s is not a directory\n", *rootdir)
+ }
+ if *perfFile != "" {
+ pprof, err := os.Create(*perfFile)
+ if err != nil {
+ quit("%s: err", *perfFile)
+ }
+ defer pprof.Close()
+ if err := starlark.StartProfile(pprof); err != nil {
+ quit("%s\n", err)
+ }
+ }
+ rbcrun.LoadPathRoot = *rootdir
+ err := rbcrun.Run(filename, src, env)
+ if *perfFile != "" {
+ if err2 := starlark.StopProfile(); err2 != nil {
+ fmt.Fprintln(os.Stderr, err2)
+ rc = 1
+ }
+ }
+ if err != nil {
+ if evalErr, ok := err.(*starlark.EvalError); ok {
+ quit("%s\n", evalErr.Backtrace())
+ } else {
+ quit("%s\n", err)
+ }
+ }
+ os.Exit(rc)
+}
+
+func quit(format string, s ...interface{}) {
+ fmt.Fprintln(os.Stderr, format, s)
+ os.Exit(2)
+}
diff --git a/tools/rbcrun/go.mod b/tools/rbcrun/go.mod
new file mode 100644
index 0000000..a029eb4
--- /dev/null
+++ b/tools/rbcrun/go.mod
@@ -0,0 +1,10 @@
+module rbcrun
+
+require (
+ github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d // indirect
+ go.starlark.net v0.0.0-20201006213952-227f4aabceb5
+)
+
+replace go.starlark.net => ../../../../external/starlark-go
+
+go 1.15
diff --git a/tools/rbcrun/go.sum b/tools/rbcrun/go.sum
new file mode 100644
index 0000000..db4d51e
--- /dev/null
+++ b/tools/rbcrun/go.sum
@@ -0,0 +1,75 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
+github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=
+github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
+golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
new file mode 100644
index 0000000..1e43334
--- /dev/null
+++ b/tools/rbcrun/host.go
@@ -0,0 +1,267 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package rbcrun
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "go.starlark.net/starlark"
+ "go.starlark.net/starlarkstruct"
+)
+
+const callerDirKey = "callerDir"
+
+var LoadPathRoot = "."
+var shellPath string
+
+type modentry struct {
+ globals starlark.StringDict
+ err error
+}
+
+var moduleCache = make(map[string]*modentry)
+
+var builtins starlark.StringDict
+
+func moduleName2AbsPath(moduleName string, callerDir string) (string, error) {
+ path := moduleName
+ if ix := strings.LastIndex(path, ":"); ix >= 0 {
+ path = path[0:ix] + string(os.PathSeparator) + path[ix+1:]
+ }
+ if strings.HasPrefix(path, "//") {
+ return filepath.Abs(filepath.Join(LoadPathRoot, path[2:]))
+ } else if strings.HasPrefix(moduleName, ":") {
+ return filepath.Abs(filepath.Join(callerDir, path[1:]))
+ } else {
+ return filepath.Abs(path)
+ }
+}
+
+// loader implements load statement. The format of the loaded module URI is
+// [//path]:base[|symbol]
+// The file path is $ROOT/path/base if path is present, <caller_dir>/base otherwise.
+// The presence of `|symbol` indicates that the loader should return a single 'symbol'
+// bound to None if file is missing.
+func loader(thread *starlark.Thread, module string) (starlark.StringDict, error) {
+ pipePos := strings.LastIndex(module, "|")
+ mustLoad := pipePos < 0
+ var defaultSymbol string
+ if !mustLoad {
+ defaultSymbol = module[pipePos+1:]
+ module = module[:pipePos]
+ }
+ modulePath, err := moduleName2AbsPath(module, thread.Local(callerDirKey).(string))
+ if err != nil {
+ return nil, err
+ }
+ e, ok := moduleCache[modulePath]
+ if e == nil {
+ if ok {
+ return nil, fmt.Errorf("cycle in load graph")
+ }
+
+ // Add a placeholder to indicate "load in progress".
+ moduleCache[modulePath] = nil
+
+ // Decide if we should load.
+ if !mustLoad {
+ if _, err := os.Stat(modulePath); err == nil {
+ mustLoad = true
+ }
+ }
+
+ // Load or return default
+ if mustLoad {
+ childThread := &starlark.Thread{Name: "exec " + module, Load: thread.Load}
+ // Cheating for the sake of testing:
+ // propagate starlarktest's Reporter key, otherwise testing
+ // the load function may cause panic in starlarktest code.
+ const testReporterKey = "Reporter"
+ if v := thread.Local(testReporterKey); v != nil {
+ childThread.SetLocal(testReporterKey, v)
+ }
+
+ childThread.SetLocal(callerDirKey, filepath.Dir(modulePath))
+ globals, err := starlark.ExecFile(childThread, modulePath, nil, builtins)
+ e = &modentry{globals, err}
+ } else {
+ e = &modentry{starlark.StringDict{defaultSymbol: starlark.None}, nil}
+ }
+
+ // Update the cache.
+ moduleCache[modulePath] = e
+ }
+ return e.globals, e.err
+}
+
+// fileExists returns True if file with given name exists.
+func fileExists(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+ kwargs []starlark.Tuple) (starlark.Value, error) {
+ var path string
+ if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &path); err != nil {
+ return starlark.None, err
+ }
+ if stat, err := os.Stat(path); err != nil || stat.IsDir() {
+ return starlark.False, nil
+ }
+ return starlark.True, nil
+}
+
+// regexMatch(pattern, s) returns True if s matches pattern (a regex)
+func regexMatch(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+ kwargs []starlark.Tuple) (starlark.Value, error) {
+ var pattern, s string
+ if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 2, &pattern, &s); err != nil {
+ return starlark.None, err
+ }
+ match, err := regexp.MatchString(pattern, s)
+ if err != nil {
+ return starlark.None, err
+ }
+ if match {
+ return starlark.True, nil
+ }
+ return starlark.False, nil
+}
+
+// wildcard(pattern, top=None) expands shell's glob pattern. If 'top' is present,
+// the 'top/pattern' is globbed and then 'top/' prefix is removed.
+func wildcard(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+ kwargs []starlark.Tuple) (starlark.Value, error) {
+ var pattern string
+ var top string
+
+ if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &pattern, &top); err != nil {
+ return starlark.None, err
+ }
+
+ var files []string
+ var err error
+ if top == "" {
+ if files, err = filepath.Glob(pattern); err != nil {
+ return starlark.None, err
+ }
+ } else {
+ prefix := top + string(filepath.Separator)
+ if files, err = filepath.Glob(prefix + pattern); err != nil {
+ return starlark.None, err
+ }
+ for i := range files {
+ files[i] = strings.TrimPrefix(files[i], prefix)
+ }
+ }
+ return makeStringList(files), nil
+}
+
+// shell(command) runs OS shell with given command and returns back
+// its output the same way as Make's $(shell ) function. The end-of-lines
+// ("\n" or "\r\n") are replaced with " " in the result, and the trailing
+// end-of-line is removed.
+func shell(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+ kwargs []starlark.Tuple) (starlark.Value, error) {
+ var command string
+ if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &command); err != nil {
+ return starlark.None, err
+ }
+ if shellPath == "" {
+ return starlark.None,
+ fmt.Errorf("cannot run shell, /bin/sh is missing (running on Windows?)")
+ }
+ cmd := exec.Command(shellPath, "-c", command)
+ // We ignore command's status
+ bytes, _ := cmd.Output()
+ output := string(bytes)
+ if strings.HasSuffix(output, "\n") {
+ output = strings.TrimSuffix(output, "\n")
+ } else {
+ output = strings.TrimSuffix(output, "\r\n")
+ }
+
+ return starlark.String(
+ strings.ReplaceAll(
+ strings.ReplaceAll(output, "\r\n", " "),
+ "\n", " ")), nil
+}
+
+func makeStringList(items []string) *starlark.List {
+ elems := make([]starlark.Value, len(items))
+ for i, item := range items {
+ elems[i] = starlark.String(item)
+ }
+ return starlark.NewList(elems)
+}
+
+// propsetFromEnv constructs a propset from the array of KEY=value strings
+func structFromEnv(env []string) *starlarkstruct.Struct {
+ sd := make(map[string]starlark.Value, len(env))
+ for _, x := range env {
+ kv := strings.SplitN(x, "=", 2)
+ sd[kv[0]] = starlark.String(kv[1])
+ }
+ return starlarkstruct.FromStringDict(starlarkstruct.Default, sd)
+}
+
+func setup(env []string) {
+ // Create the symbols that aid makefile conversion. See README.md
+ builtins = starlark.StringDict{
+ "struct": starlark.NewBuiltin("struct", starlarkstruct.Make),
+ "rblf_cli": structFromEnv(env),
+ "rblf_env": structFromEnv(os.Environ()),
+ // To convert makefile's $(wildcard foo)
+ "rblf_file_exists": starlark.NewBuiltin("rblf_file_exists", fileExists),
+ // To convert makefile's $(filter ...)/$(filter-out)
+ "rblf_regex": starlark.NewBuiltin("rblf_regex", regexMatch),
+ // To convert makefile's $(shell cmd)
+ "rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
+ // To convert makefile's $(wildcard foo*)
+ "rblf_wildcard": starlark.NewBuiltin("rblf_wildcard", wildcard),
+ }
+
+ // NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
+ // which always uses /bin/sh to run the command
+ shellPath = "/bin/sh"
+ if _, err := os.Stat(shellPath); err != nil {
+ shellPath = ""
+ }
+}
+
+// Parses, resolves, and executes a Starlark file.
+// filename and src parameters are as for starlark.ExecFile:
+// * filename is the name of the file to execute,
+// and the name that appears in error messages;
+// * src is an optional source of bytes to use instead of filename
+// (it can be a string, or a byte array, or an io.Reader instance)
+// * commandVars is an array of "VAR=value" items. They are accessible from
+// the starlark script as members of the `rblf_cli` propset.
+func Run(filename string, src interface{}, commandVars []string) error {
+ setup(commandVars)
+
+ mainThread := &starlark.Thread{
+ Name: "main",
+ Print: func(_ *starlark.Thread, msg string) { fmt.Println(msg) },
+ Load: loader,
+ }
+ absPath, err := filepath.Abs(filename)
+ if err == nil {
+ mainThread.SetLocal(callerDirKey, filepath.Dir(absPath))
+ _, err = starlark.ExecFile(mainThread, absPath, src, builtins)
+ }
+ return err
+}
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
new file mode 100644
index 0000000..3be5ee6
--- /dev/null
+++ b/tools/rbcrun/host_test.go
@@ -0,0 +1,159 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package rbcrun
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+ "testing"
+
+ "go.starlark.net/resolve"
+ "go.starlark.net/starlark"
+ "go.starlark.net/starlarktest"
+)
+
+// In order to use "assert.star" from go/starlark.net/starlarktest in the tests,
+// provide:
+// * load function that handles "assert.star"
+// * starlarktest.DataFile function that finds its location
+
+func init() {
+ starlarktestSetup()
+}
+
+func starlarktestSetup() {
+ resolve.AllowLambda = true
+ starlarktest.DataFile = func(pkgdir, filename string) string {
+ // The caller expects this function to return the path to the
+ // data file. The implementation assumes that the source file
+ // containing the caller and the data file are in the same
+ // directory. It's ugly. Not sure what's the better way.
+ // TODO(asmundak): handle Bazel case
+ _, starlarktestSrcFile, _, _ := runtime.Caller(1)
+ if filepath.Base(starlarktestSrcFile) != "starlarktest.go" {
+ panic(fmt.Errorf("this function should be called from starlarktest.go, got %s",
+ starlarktestSrcFile))
+ }
+ return filepath.Join(filepath.Dir(starlarktestSrcFile), filename)
+ }
+}
+
+// Common setup for the tests: create thread, change to the test directory
+func testSetup(t *testing.T, env []string) *starlark.Thread {
+ setup(env)
+ thread := &starlark.Thread{
+ Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
+ if module == "assert.star" {
+ return starlarktest.LoadAssertModule()
+ }
+ return nil, fmt.Errorf("load not implemented")
+ }}
+ starlarktest.SetReporter(thread, t)
+ if err := os.Chdir(dataDir()); err != nil {
+ t.Fatal(err)
+ }
+ return thread
+}
+
+func dataDir() string {
+ _, thisSrcFile, _, _ := runtime.Caller(0)
+ return filepath.Join(filepath.Dir(thisSrcFile), "testdata")
+
+}
+
+func exerciseStarlarkTestFile(t *testing.T, starFile string) {
+ // In order to use "assert.star" from go/starlark.net/starlarktest in the tests, provide:
+ // * load function that handles "assert.star"
+ // * starlarktest.DataFile function that finds its location
+ setup(nil)
+ thread := &starlark.Thread{
+ Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
+ if module == "assert.star" {
+ return starlarktest.LoadAssertModule()
+ }
+ return nil, fmt.Errorf("load not implemented")
+ }}
+ starlarktest.SetReporter(thread, t)
+ _, thisSrcFile, _, _ := runtime.Caller(0)
+ filename := filepath.Join(filepath.Dir(thisSrcFile), starFile)
+ if _, err := starlark.ExecFile(thread, filename, nil, builtins); err != nil {
+ if err, ok := err.(*starlark.EvalError); ok {
+ t.Fatal(err.Backtrace())
+ }
+ t.Fatal(err)
+ }
+}
+
+func TestCliAndEnv(t *testing.T) {
+ // TODO(asmundak): convert this to use exerciseStarlarkTestFile
+ if err := os.Setenv("TEST_ENVIRONMENT_FOO", "test_environment_foo"); err != nil {
+ t.Fatal(err)
+ }
+ thread := testSetup(t, []string{"CLI_FOO=foo"})
+ if _, err := starlark.ExecFile(thread, "cli_and_env.star", nil, builtins); err != nil {
+ if err, ok := err.(*starlark.EvalError); ok {
+ t.Fatal(err.Backtrace())
+ }
+ t.Fatal(err)
+ }
+}
+
+func TestFileOps(t *testing.T) {
+ // TODO(asmundak): convert this to use exerciseStarlarkTestFile
+ if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
+ t.Fatal(err)
+ }
+ thread := testSetup(t, nil)
+ if _, err := starlark.ExecFile(thread, "file_ops.star", nil, builtins); err != nil {
+ if err, ok := err.(*starlark.EvalError); ok {
+ t.Fatal(err.Backtrace())
+ }
+ t.Fatal(err)
+ }
+}
+
+func TestLoad(t *testing.T) {
+ // TODO(asmundak): convert this to use exerciseStarlarkTestFile
+ thread := testSetup(t, nil)
+ thread.Load = func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
+ if module == "assert.star" {
+ return starlarktest.LoadAssertModule()
+ } else {
+ return loader(thread, module)
+ }
+ }
+ dir := dataDir()
+ thread.SetLocal(callerDirKey, dir)
+ LoadPathRoot = filepath.Dir(dir)
+ if _, err := starlark.ExecFile(thread, "load.star", nil, builtins); err != nil {
+ if err, ok := err.(*starlark.EvalError); ok {
+ t.Fatal(err.Backtrace())
+ }
+ t.Fatal(err)
+ }
+}
+
+func TestRegex(t *testing.T) {
+ exerciseStarlarkTestFile(t, "testdata/regex.star")
+}
+
+func TestShell(t *testing.T) {
+ if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
+ t.Fatal(err)
+ }
+ exerciseStarlarkTestFile(t, "testdata/shell.star")
+}
diff --git a/tools/rbcrun/testdata/cli_and_env.star b/tools/rbcrun/testdata/cli_and_env.star
new file mode 100644
index 0000000..d6f464a
--- /dev/null
+++ b/tools/rbcrun/testdata/cli_and_env.star
@@ -0,0 +1,11 @@
+# Tests rblf_env access
+load("assert.star", "assert")
+
+
+def test():
+ assert.eq(rblf_env.TEST_ENVIRONMENT_FOO, "test_environment_foo")
+ assert.fails(lambda: rblf_env.FOO_BAR_BAZ, ".*struct has no .FOO_BAR_BAZ attribute$")
+ assert.eq(rblf_cli.CLI_FOO, "foo")
+
+
+test()
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
new file mode 100644
index 0000000..e1f1ac2
--- /dev/null
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -0,0 +1,18 @@
+# Tests file ops builtins
+load("assert.star", "assert")
+
+
+def test():
+ myname = "file_ops.star"
+ assert.true(rblf_file_exists(myname), "the file %s does exist" % myname)
+ assert.true(not rblf_file_exists("no_such_file"), "the file no_such_file does not exist")
+ files = rblf_wildcard("*.star")
+ assert.true(myname in files, "expected %s in %s" % (myname, files))
+ # RBCDATADIR is set by the caller to the path where this file resides
+ files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
+ assert.true(myname in files, "expected %s in %s" % (myname, files))
+ files = rblf_wildcard("*.xxx")
+ assert.true(len(files) == 0, "expansion should be empty but contains %s" % files)
+
+
+test()
diff --git a/tools/rbcrun/testdata/load.star b/tools/rbcrun/testdata/load.star
new file mode 100644
index 0000000..b14f2bb
--- /dev/null
+++ b/tools/rbcrun/testdata/load.star
@@ -0,0 +1,14 @@
+# Test load, simple and conditional
+load("assert.star", "assert")
+load(":module1.star", test1="test")
+load("//testdata:module2.star", test2="test")
+load(":module3|test", test3="test")
+
+
+def test():
+ assert.eq(test1, "module1")
+ assert.eq(test2, "module2")
+ assert.eq(test3, None)
+
+
+test()
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
new file mode 100644
index 0000000..913fb7d
--- /dev/null
+++ b/tools/rbcrun/testdata/module1.star
@@ -0,0 +1,7 @@
+# Module loaded my load.star
+load("assert.star", "assert")
+
+# Make sure that builtins are defined for the loaded module, too
+assert.true(rblf_file_exists("module1.star"))
+assert.true(not rblf_file_exists("no_such file"))
+test = "module1"
diff --git a/tools/rbcrun/testdata/module2.star b/tools/rbcrun/testdata/module2.star
new file mode 100644
index 0000000..f6818a2
--- /dev/null
+++ b/tools/rbcrun/testdata/module2.star
@@ -0,0 +1,2 @@
+# Module loaded my load.star
+test = "module2"
diff --git a/tools/rbcrun/testdata/regex.star b/tools/rbcrun/testdata/regex.star
new file mode 100644
index 0000000..04e1d42
--- /dev/null
+++ b/tools/rbcrun/testdata/regex.star
@@ -0,0 +1,13 @@
+# Tests rblf_regex
+load("assert.star", "assert")
+
+
+def test():
+ pattern = "^(foo.*bar|abc.*d|1.*)$"
+ for w in ("foobar", "fooxbar", "abcxd", "123"):
+ assert.true(rblf_regex(pattern, w), "%s should match %s" % (w, pattern))
+ for w in ("afoobar", "abcde"):
+ assert.true(not rblf_regex(pattern, w), "%s should not match %s" % (w, pattern))
+
+
+test()
diff --git a/tools/rbcrun/testdata/shell.star b/tools/rbcrun/testdata/shell.star
new file mode 100644
index 0000000..ad10697
--- /dev/null
+++ b/tools/rbcrun/testdata/shell.star
@@ -0,0 +1,5 @@
+# Tests "queue" data type
+load("assert.star", "assert")
+
+assert.eq("load.star shell.star", rblf_shell("cd %s && ls -1 shell.star load.star 2>&1" % rblf_env.TEST_DATA_DIR))
+assert.eq("shell.star", rblf_shell("cd %s && echo shell.sta*" % rblf_env.TEST_DATA_DIR))
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 0c84d4f..65c035e 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -22,6 +22,10 @@
// `releasetools_X_defaults` in their defaults.
//
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
python_defaults {
name: "releasetools_add_img_to_target_files_defaults",
srcs: [
@@ -129,6 +133,7 @@
required: [
"brillo_update_payload",
"checkvintf",
+ "minigzip",
"lz4",
"toybox",
"unpack_bootimg",
@@ -447,6 +452,7 @@
required: [
"checkvintf",
"host_init_verifier",
+ "secilc",
],
target: {
darwin: {
@@ -518,6 +524,23 @@
],
}
+python_binary_host {
+ name: "verity_utils",
+ defaults: ["releasetools_binary_defaults"],
+ srcs: [
+ "verity_utils.py",
+ ],
+ libs: [
+ "releasetools_common",
+ ],
+ required: [
+ "append2simg",
+ "build_verity_metadata",
+ "build_verity_tree",
+ "fec",
+ ],
+}
+
//
// Tests.
//
@@ -569,7 +592,6 @@
name: "releasetools_test",
defaults: ["releasetools_test_defaults"],
main: "test_utils.py",
- test_suites: ["general-tests"],
version: {
py2: {
enabled: true,
diff --git a/tools/releasetools/TEST_MAPPING b/tools/releasetools/TEST_MAPPING
deleted file mode 100644
index 3d57960..0000000
--- a/tools/releasetools/TEST_MAPPING
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "presubmit": [
- {
- "name": "releasetools_test",
- "host" : true
- }
- ]
-}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 7839b47..4fe10c6 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -64,7 +64,7 @@
import verity_utils
import ota_metadata_pb2
-from apex_utils import GetApexInfoFromTargetFiles
+from apex_utils import GetSystemApexInfoFromTargetFiles
if sys.hexversion < 0x02070000:
print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -350,6 +350,41 @@
img.Write()
return img.name
+def AddPvmfw(output_zip):
+ """Adds the pvmfw image.
+
+ Uses the image under IMAGES/ if it already exists. Otherwise looks for the
+ image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
+ """
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "pvmfw.img")
+ if os.path.exists(img.name):
+ logger.info("pvmfw.img already exists; no need to rebuild...")
+ return img.name
+
+ pvmfw_prebuilt_path = os.path.join(
+ OPTIONS.input_tmp, "PREBUILT_IMAGES", "pvmfw.img")
+ assert os.path.exists(pvmfw_prebuilt_path)
+ shutil.copy(pvmfw_prebuilt_path, img.name)
+
+ # AVB-sign the image as needed.
+ if OPTIONS.info_dict.get("avb_enable") == "true":
+ # Signing requires +w
+ os.chmod(img.name, os.stat(img.name).st_mode | stat.S_IWUSR)
+
+ avbtool = OPTIONS.info_dict["avb_avbtool"]
+ part_size = OPTIONS.info_dict["pvmfw_size"]
+ # The AVB hash footer will be replaced if already present.
+ cmd = [avbtool, "add_hash_footer", "--image", img.name,
+ "--partition_size", str(part_size), "--partition_name", "pvmfw"]
+ common.AppendAVBSigningArgs(cmd, "pvmfw")
+ args = OPTIONS.info_dict.get("avb_pvmfw_add_hash_footer_args")
+ if args and args.strip():
+ cmd.extend(shlex.split(args))
+ common.RunAndCheckOutput(cmd)
+
+ img.Write()
+ return img.name
+
def AddCustomImages(output_zip, partition_name):
"""Adds and signs custom images in IMAGES/.
@@ -757,7 +792,7 @@
"{}.img".format(partition_name))))
def AddApexInfo(output_zip):
- apex_infos = GetApexInfoFromTargetFiles(OPTIONS.input_tmp)
+ apex_infos = GetSystemApexInfoFromTargetFiles(OPTIONS.input_tmp)
apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
apex_metadata_proto.apex_info.extend(apex_infos)
apex_info_bytes = apex_metadata_proto.SerializeToString()
@@ -948,6 +983,10 @@
banner("dtbo")
partitions['dtbo'] = AddDtbo(output_zip)
+ if OPTIONS.info_dict.get("has_pvmfw") == "true":
+ banner("pvmfw")
+ partitions['pvmfw'] = AddPvmfw(output_zip)
+
# Custom images.
custom_partitions = OPTIONS.info_dict.get(
"avb_custom_images_partition_list", "").strip().split()
@@ -988,8 +1027,9 @@
AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
if OPTIONS.info_dict.get("use_dynamic_partitions") == "true":
- banner("super_empty")
- AddSuperEmpty(output_zip)
+ if OPTIONS.info_dict.get("build_super_empty_partition") == "true":
+ banner("super_empty")
+ AddSuperEmpty(output_zip)
if OPTIONS.info_dict.get("build_super_partition") == "true":
if OPTIONS.info_dict.get(
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 644b92a..1c88053 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -516,7 +516,7 @@
raise ApexInfoError(
'Failed to get type for {}:\n{}'.format(apex_file, e))
-def GetApexInfoFromTargetFiles(input_file):
+def GetSystemApexInfoFromTargetFiles(input_file):
"""
Get information about system APEX stored in the input_file zip
@@ -538,6 +538,11 @@
tmp_dir = UnzipTemp(input_file, ["SYSTEM/apex/*"])
target_dir = os.path.join(tmp_dir, "SYSTEM/apex/")
+ # Partial target-files packages for vendor-only builds may not contain
+ # a system apex directory.
+ if not os.path.exists(target_dir):
+ return []
+
apex_infos = []
debugfs_path = "debugfs"
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 820c128..301d0da 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -73,9 +73,9 @@
"""
cmd = ["find", path, "-print"]
output = common.RunAndCheckOutput(cmd, verbose=False)
- # increase by > 4% as number of files and directories is not whole picture.
+ # increase by > 6% as number of files and directories is not whole picture.
inodes = output.count('\n')
- spare_inodes = inodes * 4 // 100
+ spare_inodes = inodes * 6 // 100
min_spare_inodes = 12
if spare_inodes < min_spare_inodes:
spare_inodes = min_spare_inodes
@@ -308,6 +308,10 @@
build_command.extend(["-C", fs_config])
if "selinux_fc" in prop_dict:
build_command.extend(["-c", prop_dict["selinux_fc"]])
+ if "timestamp" in prop_dict:
+ build_command.extend(["-T", str(prop_dict["timestamp"])])
+ if "uuid" in prop_dict:
+ build_command.extend(["-U", prop_dict["uuid"]])
elif fs_type.startswith("squash"):
build_command = ["mksquashfsimage.sh"]
build_command.extend([in_dir, out_file])
diff --git a/tools/releasetools/check_partition_sizes.py b/tools/releasetools/check_partition_sizes.py
index 745c136..3047ddb 100644
--- a/tools/releasetools/check_partition_sizes.py
+++ b/tools/releasetools/check_partition_sizes.py
@@ -40,6 +40,7 @@
logger = logging.getLogger(__name__)
+
class Expression(object):
def __init__(self, desc, expr, value=None):
# Human-readable description
@@ -62,6 +63,20 @@
else:
logger.log(level, msg)
+ def CheckLt(self, other, level=logging.ERROR):
+ format_args = (self.desc, other.desc, self.expr, self.value,
+ other.expr, other.value)
+ if self.value < other.value:
+ logger.info("%s is less than %s:\n%s == %d < %s == %d",
+ *format_args)
+ else:
+ msg = "{} is greater than or equal to {}:\n{} == {} >= {} == {}".format(
+ *format_args)
+ if level == logging.ERROR:
+ raise RuntimeError(msg)
+ else:
+ logger.log(level, msg)
+
def CheckEq(self, other):
format_args = (self.desc, other.desc, self.expr, self.value,
other.expr, other.value)
@@ -116,7 +131,6 @@
int(info_dict["super_partition_size"])
self.info_dict = info_dict
-
def _ReadSizeOfPartition(self, name):
# Tests uses *_image_size instead (to avoid creating empty sparse images
# on disk)
@@ -124,7 +138,6 @@
return int(self.info_dict[name + "_image_size"])
return sparse_img.GetImagePartitionSize(self.info_dict[name + "_image"])
-
# Round result to BOARD_SUPER_PARTITION_ALIGNMENT
def _RoundPartitionSize(self, size):
alignment = self.info_dict.get("super_partition_alignment")
@@ -132,7 +145,6 @@
return size
return (size + alignment - 1) // alignment * alignment
-
def _CheckSuperPartitionSize(self):
info_dict = self.info_dict
super_block_devices = \
@@ -239,7 +251,20 @@
max_size = Expression(
"BOARD_SUPER_PARTITION_SIZE{}".format(size_limit_suffix),
int(info_dict["super_partition_size"]) // num_slots)
- sum_size.CheckLe(max_size)
+ # Retrofit DAP will build metadata as part of super image.
+ if Dap.Get(info_dict) == Dap.RDAP:
+ sum_size.CheckLe(max_size)
+ return
+
+ sum_size.CheckLt(max_size)
+ # Display a warning if group size + 1M >= super size
+ minimal_metadata_size = 1024 * 1024 # 1MiB
+ sum_size_plus_metadata = Expression(
+ "sum of sizes of {} plus 1M metadata".format(groups),
+ "+".join(str(size) for size in
+ group_size_list + [minimal_metadata_size]),
+ sum(group_size_list) + minimal_metadata_size)
+ sum_size_plus_metadata.CheckLe(max_size, level=logging.WARNING)
def Run(self):
self._CheckAllPartitionSizes()
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 26c4ae8..b6ed8a4 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -109,10 +109,12 @@
# The partitions allowed to be signed by AVB (Android Verified Boot 2.0). Note
# that system_other is not in the list because we don't want to include its
-# descriptor into vbmeta.img.
-AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'recovery', 'system',
- 'system_ext', 'vendor', 'vendor_boot', 'vendor_dlkm',
- 'odm_dlkm')
+# descriptor into vbmeta.img. When adding a new entry here, the
+# AVB_FOOTER_ARGS_BY_PARTITION in sign_target_files_apks need to be updated
+# accordingly.
+AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'pvmfw', 'recovery',
+ 'system', 'system_ext', 'vendor', 'vendor_boot',
+ 'vendor_dlkm', 'odm_dlkm')
# Chained VBMeta partitions.
AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
@@ -276,29 +278,6 @@
return subprocess.Popen(args, **kwargs)
-def RunAndWait(args, verbose=None, **kwargs):
- """Runs the given command waiting for it to complete.
-
- Args:
- args: The command represented as a list of strings.
- verbose: Whether the commands should be shown. Default to the global
- verbosity if unspecified.
- kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
- stdin, etc. stdout and stderr will default to subprocess.PIPE and
- subprocess.STDOUT respectively unless caller specifies any of them.
-
- Raises:
- ExternalError: On non-zero exit from the command.
- """
- proc = Run(args, verbose=verbose, **kwargs)
- proc.wait()
-
- if proc.returncode != 0:
- raise ExternalError(
- "Failed to run command '{}' (exit code {})".format(
- args, proc.returncode))
-
-
def RunAndCheckOutput(args, verbose=None, **kwargs):
"""Runs the given command and returns the output.
@@ -663,7 +642,7 @@
"""Extracts the contents of fn from input zipfile or directory into a file."""
if isinstance(input_file, zipfile.ZipFile):
tmp_file = MakeTempFile(os.path.basename(fn))
- with open(tmp_file, 'w') as f:
+ with open(tmp_file, 'wb') as f:
f.write(input_file.read(fn))
return tmp_file
else:
@@ -672,6 +651,16 @@
raise KeyError(fn)
return file
+class RamdiskFormat(object):
+ LZ4 = 1
+ GZ = 2
+
+def _GetRamdiskFormat(info_dict):
+ if info_dict.get('lz4_ramdisks') == 'true':
+ ramdisk_format = RamdiskFormat.LZ4
+ else:
+ ramdisk_format = RamdiskFormat.GZ
+ return ramdisk_format
def LoadInfoDict(input_file, repacking=False):
"""Loads the key/value pairs from the given input target_files.
@@ -774,13 +763,14 @@
# Load recovery fstab if applicable.
d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
+ ramdisk_format = _GetRamdiskFormat(d)
# Tries to load the build props for all partitions with care_map, including
# system and vendor.
for partition in PARTITIONS_WITH_BUILD_PROP:
partition_prop = "{}.build.prop".format(partition)
d[partition_prop] = PartitionBuildProps.FromInputFile(
- input_file, partition)
+ input_file, partition, ramdisk_format=ramdisk_format)
d["build.prop"] = d["system.build.prop"]
# Set up the salt (based on fingerprint) that will be used when adding AVB
@@ -839,6 +829,9 @@
placeholder_values: A dict of runtime variables' values to replace the
placeholders in the build.prop file. We expect exactly one value for
each of the variables.
+ ramdisk_format: If name is "boot", the format of ramdisk inside the
+ boot image. Otherwise, its value is ignored.
+ Use lz4 to decompress by default. If its value is gzip, use minigzip.
"""
def __init__(self, input_file, name, placeholder_values=None):
@@ -861,11 +854,11 @@
return props
@staticmethod
- def FromInputFile(input_file, name, placeholder_values=None):
+ def FromInputFile(input_file, name, placeholder_values=None, ramdisk_format=RamdiskFormat.LZ4):
"""Loads the build.prop file and builds the attributes."""
if name == "boot":
- data = PartitionBuildProps._ReadBootPropFile(input_file)
+ data = PartitionBuildProps._ReadBootPropFile(input_file, ramdisk_format=ramdisk_format)
else:
data = PartitionBuildProps._ReadPartitionPropFile(input_file, name)
@@ -874,7 +867,7 @@
return props
@staticmethod
- def _ReadBootPropFile(input_file):
+ def _ReadBootPropFile(input_file, ramdisk_format):
"""
Read build.prop for boot image from input_file.
Return empty string if not found.
@@ -884,11 +877,11 @@
except KeyError:
logger.warning('Failed to read IMAGES/boot.img')
return ''
- prop_file = GetBootImageBuildProp(boot_img)
+ prop_file = GetBootImageBuildProp(boot_img, ramdisk_format=ramdisk_format)
if prop_file is None:
return ''
- with open(prop_file) as f:
- return f.read().decode()
+ with open(prop_file, "r") as f:
+ return f.read()
@staticmethod
def _ReadPartitionPropFile(input_file, name):
@@ -1360,6 +1353,35 @@
RunAndCheckOutput(verify_cmd)
+def AppendGkiSigningArgs(cmd):
+ """Append GKI signing arguments for mkbootimg."""
+ # e.g., --gki_signing_key path/to/signing_key
+ # --gki_signing_algorithm SHA256_RSA4096"
+
+ key_path = OPTIONS.info_dict.get("gki_signing_key_path")
+ # It's fine that a non-GKI boot.img has no gki_signing_key_path.
+ if not key_path:
+ return
+
+ if not os.path.exists(key_path) and OPTIONS.search_path:
+ new_key_path = os.path.join(OPTIONS.search_path, key_path)
+ if os.path.exists(new_key_path):
+ key_path = new_key_path
+
+ # Checks key_path exists, before appending --gki_signing_* args.
+ if not os.path.exists(key_path):
+ raise ExternalError('gki_signing_key_path: "{}" not found'.format(key_path))
+
+ algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
+ if key_path and algorithm:
+ cmd.extend(["--gki_signing_key", key_path,
+ "--gki_signing_algorithm", algorithm])
+
+ signature_args = OPTIONS.info_dict.get("gki_signing_signature_args")
+ if signature_args:
+ cmd.extend(["--gki_signing_signature_args", signature_args])
+
+
def BuildVBMeta(image_path, partitions, name, needed_partitions):
"""Creates a VBMeta image.
@@ -1428,7 +1450,8 @@
AddAftlInclusionProof(image_path)
-def _MakeRamdisk(sourcedir, fs_config_file=None, lz4_ramdisks=False):
+def _MakeRamdisk(sourcedir, fs_config_file=None,
+ ramdisk_format=RamdiskFormat.GZ):
ramdisk_img = tempfile.NamedTemporaryFile()
if fs_config_file is not None and os.access(fs_config_file, os.F_OK):
@@ -1437,11 +1460,13 @@
else:
cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
p1 = Run(cmd, stdout=subprocess.PIPE)
- if lz4_ramdisks:
+ if ramdisk_format == RamdiskFormat.LZ4:
p2 = Run(["lz4", "-l", "-12", "--favor-decSpeed"], stdin=p1.stdout,
stdout=ramdisk_img.file.fileno())
- else:
+ elif ramdisk_format == RamdiskFormat.GZ:
p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
+ else:
+ raise ValueError("Only support lz4 or minigzip ramdisk format.")
p2.wait()
p1.wait()
@@ -1488,8 +1513,9 @@
img = tempfile.NamedTemporaryFile()
if has_ramdisk:
- use_lz4 = info_dict.get("lz4_ramdisks") == 'true'
- ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file, lz4_ramdisks=use_lz4)
+ ramdisk_format = _GetRamdiskFormat(info_dict)
+ ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
+ ramdisk_format=ramdisk_format)
# use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
@@ -1541,6 +1567,8 @@
if has_ramdisk:
cmd.extend(["--ramdisk", ramdisk_img.name])
+ AppendGkiSigningArgs(cmd)
+
img_unsigned = None
if info_dict.get("vboot"):
img_unsigned = tempfile.NamedTemporaryFile()
@@ -1675,8 +1703,8 @@
img = tempfile.NamedTemporaryFile()
- use_lz4 = info_dict.get("lz4_ramdisks") == 'true'
- ramdisk_img = _MakeRamdisk(sourcedir, lz4_ramdisks=use_lz4)
+ ramdisk_format = _GetRamdiskFormat(info_dict)
+ ramdisk_img = _MakeRamdisk(sourcedir, ramdisk_format=ramdisk_format)
# use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
@@ -1714,6 +1742,11 @@
cmd.extend(["--vendor_ramdisk", ramdisk_img.name])
cmd.extend(["--vendor_boot", img.name])
+ fn = os.path.join(sourcedir, "vendor_bootconfig")
+ if os.access(fn, os.F_OK):
+ cmd.append("--vendor_bootconfig")
+ cmd.append(fn)
+
ramdisk_fragment_imgs = []
fn = os.path.join(sourcedir, "vendor_ramdisk_fragments")
if os.access(fn, os.F_OK):
@@ -1727,7 +1760,8 @@
ramdisk_fragment_pathname = fn
else:
ramdisk_fragment_root = os.path.join(sourcedir, "RAMDISK_FRAGMENTS", ramdisk_fragment)
- ramdisk_fragment_img = _MakeRamdisk(ramdisk_fragment_root, lz4_ramdisks=use_lz4)
+ ramdisk_fragment_img = _MakeRamdisk(ramdisk_fragment_root,
+ ramdisk_format=ramdisk_format)
ramdisk_fragment_imgs.append(ramdisk_fragment_img)
ramdisk_fragment_pathname = ramdisk_fragment_img.name
cmd.extend(["--vendor_ramdisk_fragment", ramdisk_fragment_pathname])
@@ -1957,12 +1991,13 @@
# filename listed in system.map may contain an additional leading slash
# (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
# results.
- arcname = entry.replace(which, which.upper(), 1).lstrip('/')
-
- # Special handling another case, where files not under /system
+ # And handle another special case, where files not under /system
# (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
- if which == 'system' and not arcname.startswith('SYSTEM'):
+ arcname = entry.lstrip('/')
+ if which == 'system' and not arcname.startswith('system'):
arcname = 'ROOT/' + arcname
+ else:
+ arcname = arcname.replace(which, which.upper(), 1)
assert arcname in input_zip.namelist(), \
"Failed to find the ZIP entry for {}".format(entry)
@@ -3645,12 +3680,12 @@
append('move %s %s' % (p, u.tgt_group))
-def GetBootImageBuildProp(boot_img):
+def GetBootImageBuildProp(boot_img, ramdisk_format=RamdiskFormat.LZ4):
"""
Get build.prop from ramdisk within the boot image
Args:
- boot_img: the boot image file. Ramdisk must be compressed with lz4 format.
+ boot_img: the boot image file. Ramdisk must be compressed with lz4 or minigzip format.
Return:
An extracted file that stores properties in the boot image.
@@ -3663,7 +3698,16 @@
logger.warning('Unable to get boot image timestamp: no ramdisk in boot')
return None
uncompressed_ramdisk = os.path.join(tmp_dir, 'uncompressed_ramdisk')
- RunAndCheckOutput(['lz4', '-d', ramdisk, uncompressed_ramdisk])
+ if ramdisk_format == RamdiskFormat.LZ4:
+ RunAndCheckOutput(['lz4', '-d', ramdisk, uncompressed_ramdisk])
+ elif ramdisk_format == RamdiskFormat.GZ:
+ with open(ramdisk, 'rb') as input_stream:
+ with open(uncompressed_ramdisk, 'wb') as output_stream:
+ p2 = Run(['minigzip', '-d'], stdin=input_stream.fileno(), stdout=output_stream.fileno())
+ p2.wait()
+ else:
+ logger.error('Only support lz4 or minigzip ramdisk format.')
+ return None
abs_uncompressed_ramdisk = os.path.abspath(uncompressed_ramdisk)
extracted_ramdisk = MakeTempDir('extracted_ramdisk')
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 9360d7b..16cab4f 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -93,6 +93,7 @@
import subprocess
import sys
import zipfile
+from xml.etree import ElementTree
import add_img_to_target_files
import build_super_image
@@ -658,6 +659,80 @@
os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
+def compile_split_sepolicy(product_out, partition_map, output_policy):
+ """Uses secilc to compile a split sepolicy file.
+
+ Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
+
+ Args:
+ product_out: PRODUCT_OUT directory, containing partition directories.
+ partition_map: A map of partition name -> relative path within product_out.
+ output_policy: The name of the output policy created by secilc.
+
+ Returns:
+ A command list that can be executed to create the compiled sepolicy.
+ """
+
+ def get_file(partition, path):
+ if partition not in partition_map:
+ logger.warning('Cannot load SEPolicy files for missing partition %s',
+ partition)
+ return None
+ return os.path.join(product_out, partition_map[partition], path)
+
+ # Load the kernel sepolicy version from the FCM. This is normally provided
+ # directly to selinux.cpp as a build flag, but is also available in this file.
+ fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
+ if not fcm_file or not os.path.exists(fcm_file):
+ raise ExternalError('Missing required file for loading sepolicy: %s', fcm)
+ kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
+ 'sepolicy/kernel-sepolicy-version').text
+
+ # Load the vendor's plat sepolicy version. This is the version used for
+ # locating sepolicy mapping files.
+ vendor_plat_version_file = get_file('vendor',
+ 'etc/selinux/plat_sepolicy_vers.txt')
+ if not vendor_plat_version_file or not os.path.exists(
+ vendor_plat_version_file):
+ raise ExternalError('Missing required sepolicy file %s',
+ vendor_plat_version_file)
+ with open(vendor_plat_version_file) as f:
+ vendor_plat_version = f.read().strip()
+
+ # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
+ cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
+ cmd.extend(['-c', kernel_sepolicy_version])
+ cmd.extend(['-o', output_policy])
+ cmd.extend(['-f', '/dev/null'])
+
+ required_policy_files = (
+ ('system', 'etc/selinux/plat_sepolicy.cil'),
+ ('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('vendor', 'etc/selinux/vendor_sepolicy.cil'),
+ ('vendor', 'etc/selinux/plat_pub_versioned.cil'),
+ )
+ for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+ required_policy_files)):
+ if not policy or not os.path.exists(policy):
+ raise ExternalError('Missing required sepolicy file %s', policy)
+ cmd.append(policy)
+
+ optional_policy_files = (
+ ('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
+ ('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
+ ('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('product', 'etc/selinux/product_sepolicy.cil'),
+ ('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+ ('odm', 'etc/selinux/odm_sepolicy.cil'),
+ )
+ for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+ optional_policy_files)):
+ if policy and os.path.exists(policy):
+ cmd.append(policy)
+
+ return cmd
+
+
def process_special_cases(framework_target_files_temp_dir,
vendor_target_files_temp_dir,
output_target_files_temp_dir,
@@ -887,12 +962,12 @@
output_zip,
'-C',
source_dir,
- '-l',
+ '-r',
output_target_files_list,
]
logger.info('creating %s', output_file)
- common.RunAndWait(command, verbose=True)
+ common.RunAndCheckOutput(command, verbose=True)
logger.info('finished creating %s', output_file)
return output_zip
@@ -977,17 +1052,28 @@
raise ValueError('sharedUserId APK error. See %s' %
shareduid_violation_modules)
- # Run host_init_verifier on the combined init rc files.
+ # host_init_verifier and secilc check only the following partitions:
filtered_partitions = {
partition: path
for partition, path in partition_map.items()
- # host_init_verifier checks only the following partitions:
if partition in ['system', 'system_ext', 'product', 'vendor', 'odm']
}
+
+ # Run host_init_verifier on the combined init rc files.
common.RunHostInitVerifier(
product_out=output_target_files_temp_dir,
partition_map=filtered_partitions)
+ # Check that the split sepolicy from the multiple builds can compile.
+ split_sepolicy_cmd = compile_split_sepolicy(
+ product_out=output_target_files_temp_dir,
+ partition_map=filtered_partitions,
+ output_policy=os.path.join(output_target_files_temp_dir,
+ 'META/combined.policy'))
+ logger.info('Compiling split sepolicy: %s', ' '.join(split_sepolicy_cmd))
+ common.RunAndCheckOutput(split_sepolicy_cmd)
+ # TODO(b/178864050): Run tests on the combined.policy file.
+
generate_images(output_target_files_temp_dir, rebuild_recovery)
generate_super_empty_image(output_target_files_temp_dir, output_super_empty)
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 41644d8..02b2b4d 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -211,6 +211,10 @@
Use the specified custom_image to update custom_partition when generating
an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
cus=cus_test.img"
+
+ --disable_vabc
+ Disable Virtual A/B Compression, for builds that have compression enabled
+ by default.
"""
from __future__ import print_function
@@ -230,7 +234,7 @@
import common
import ota_utils
from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
- PropertyFiles)
+ PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME)
import target_files_diff
from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
@@ -272,6 +276,8 @@
OPTIONS.disable_verity_computation = False
OPTIONS.partial = None
OPTIONS.custom_images = {}
+OPTIONS.disable_vabc = False
+OPTIONS.spl_downgrade = False
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -757,10 +763,12 @@
common.ZipDelete(target_file, POSTINSTALL_CONFIG)
return target_file
+
def ParseInfoDict(target_file_path):
with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
return common.LoadInfoDict(zfp)
+
def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
"""Returns a target-files.zip for partial ota update package generation.
@@ -881,7 +889,7 @@
with open(new_ab_partitions, 'w') as f:
for partition in ab_partitions:
if (partition in dynamic_partition_list and
- partition not in super_block_devices):
+ partition not in super_block_devices):
logger.info("Dropping %s from ab_partitions.txt", partition)
continue
f.write(partition + "\n")
@@ -955,32 +963,37 @@
return target_file
+
def GeneratePartitionTimestampFlags(partition_state):
partition_timestamps = [
part.partition_name + ":" + part.version
for part in partition_state]
return ["--partition_timestamps", ",".join(partition_timestamps)]
+
def GeneratePartitionTimestampFlagsDowngrade(
- pre_partition_state, post_partition_state):
+ pre_partition_state, post_partition_state):
assert pre_partition_state is not None
partition_timestamps = {}
for part in pre_partition_state:
partition_timestamps[part.partition_name] = part.version
for part in post_partition_state:
partition_timestamps[part.partition_name] = \
- max(part.version, partition_timestamps[part.partition_name])
+ max(part.version, partition_timestamps[part.partition_name])
return [
"--partition_timestamps",
- ",".join([key + ":" + val for (key, val) in partition_timestamps.items()])
+ ",".join([key + ":" + val for (key, val)
+ in partition_timestamps.items()])
]
+
def IsSparseImage(filepath):
with open(filepath, 'rb') as fp:
# Magic for android sparse image format
# https://source.android.com/devices/bootloader/images
return fp.read(4) == b'\x3A\xFF\x26\xED'
+
def SupportsMainlineGkiUpdates(target_file):
"""Return True if the build supports MainlineGKIUpdates.
@@ -1019,6 +1032,7 @@
pattern = re.compile(r"com\.android\.gki\..*\.apex")
return pattern.search(output) is not None
+
def GenerateAbOtaPackage(target_file, output_file, source_file=None):
"""Generates an Android OTA package that has A/B update payload."""
# Stage the output zip package for package signing.
@@ -1038,8 +1052,10 @@
target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
vendor_prop = source_info.info_dict.get("vendor.build.prop")
- if vendor_prop and \
- vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true":
+ vabc_used = vendor_prop and \
+ vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true" and \
+ not OPTIONS.disable_vabc
+ if vabc_used:
# TODO(zhangkelvin) Remove this once FEC on VABC is supported
logger.info("Virtual AB Compression enabled, disabling FEC")
OPTIONS.disable_fec_computation = True
@@ -1090,10 +1106,13 @@
partition_timestamps_flags = GeneratePartitionTimestampFlags(
metadata.postcondition.partition_state)
+ if OPTIONS.disable_vabc:
+ additional_args += ["--disable_vabc", "true"]
additional_args += ["--max_timestamp", max_timestamp]
if SupportsMainlineGkiUpdates(source_file):
- logger.warning("Detected build with mainline GKI, include full boot image.")
+ logger.warning(
+ "Detected build with mainline GKI, include full boot image.")
additional_args.extend(["--full_boot", "true"])
payload.Generate(
@@ -1127,7 +1146,7 @@
# into A/B OTA package.
target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
if (target_info.get("verity") == "true" or
- target_info.get("avb_enable") == "true"):
+ target_info.get("avb_enable") == "true"):
care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
"META/" + x in target_zip.namelist()]
@@ -1147,7 +1166,7 @@
apex_info_entry = target_zip.getinfo("META/apex_info.pb")
with target_zip.open(apex_info_entry, "r") as zfp:
common.ZipWriteStr(output_zip, "apex_info.pb", zfp.read(),
- compress_type=zipfile.ZIP_STORED)
+ compress_type=zipfile.ZIP_STORED)
except KeyError:
logger.warning("target_file doesn't contain apex_info.pb %s", target_file)
@@ -1257,6 +1276,11 @@
elif o == "--custom_image":
custom_partition, custom_image = a.split("=")
OPTIONS.custom_images[custom_partition] = custom_image
+ elif o == "--disable_vabc":
+ OPTIONS.disable_vabc = True
+ elif o == "--spl_downgrade":
+ OPTIONS.spl_downgrade = True
+ OPTIONS.wipe_user_data = True
else:
return False
return True
@@ -1298,6 +1322,8 @@
"boot_variable_file=",
"partial=",
"custom_image=",
+ "disable_vabc",
+ "spl_downgrade"
], extra_option_handler=option_handler)
if len(args) != 2:
@@ -1325,7 +1351,6 @@
if OPTIONS.incremental_source is None:
raise ValueError("Cannot generate downgradable full OTAs")
-
# TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the
# target-file and reload the info_dict. So the info will be consistent with
# the modified target-file.
@@ -1333,7 +1358,6 @@
logger.info("--- target info ---")
common.DumpInfoDict(OPTIONS.info_dict)
-
# Load the source build dict if applicable.
if OPTIONS.incremental_source is not None:
OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1344,15 +1368,15 @@
if OPTIONS.partial:
OPTIONS.info_dict['ab_partitions'] = \
- list(
- set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
- )
+ list(
+ set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
+ )
if OPTIONS.source_info_dict:
OPTIONS.source_info_dict['ab_partitions'] = \
- list(
- set(OPTIONS.source_info_dict['ab_partitions']) &
- set(OPTIONS.partial)
- )
+ list(
+ set(OPTIONS.source_info_dict['ab_partitions']) &
+ set(OPTIONS.partial)
+ )
# Load OEM dicts if provided.
OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1361,7 +1385,7 @@
# use_dynamic_partitions but target build does.
if (OPTIONS.source_info_dict and
OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
- OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+ OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
raise common.ExternalError(
"Expect to generate incremental OTA for retrofitting dynamic "
@@ -1378,7 +1402,7 @@
allow_non_ab = OPTIONS.info_dict.get("allow_non_ab") == "true"
if OPTIONS.force_non_ab:
assert allow_non_ab,\
- "--force_non_ab only allowed on devices that supports non-A/B"
+ "--force_non_ab only allowed on devices that supports non-A/B"
assert ab_update, "--force_non_ab only allowed on A/B devices"
generate_ab = not OPTIONS.force_non_ab and ab_update
@@ -1393,7 +1417,33 @@
"build/make/target/product/security/testkey")
# Get signing keys
OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+ private_key_path = OPTIONS.package_key + OPTIONS.private_key_suffix
+ if not os.path.exists(private_key_path):
+ raise common.ExternalError(
+ "Private key {} doesn't exist. Make sure you passed the"
+ " correct key path through -k option".format(
+ private_key_path)
+ )
+ if OPTIONS.source_info_dict:
+ source_build_prop = OPTIONS.source_info_dict["build.prop"]
+ target_build_prop = OPTIONS.target_info_dict["build.prop"]
+ source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
+ target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
+ is_spl_downgrade = target_spl < source_spl
+ if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
+ raise common.ExternalError(
+ "Target security patch level {} is older than source SPL {} applying "
+ "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
+ "to override this check. This script expects security patch level to "
+ "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
+ "separators other than -, so as long as it's used consistenly across "
+ "all SPL dates".format(target_spl, source_spl))
+ elif not is_spl_downgrade and OPTIONS.spl_downgrade:
+ raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
+ " detected. Please only pass in this flag if you want a"
+ " SPL downgrade. Target SPL: {} Source SPL: {}"
+ .format(target_spl, source_spl))
if generate_ab:
GenerateAbOtaPackage(
target_file=args[0],
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
index 5da8b84..7aaca6f 100644
--- a/tools/releasetools/ota_metadata.proto
+++ b/tools/releasetools/ota_metadata.proto
@@ -105,4 +105,7 @@
bool retrofit_dynamic_partitions = 7;
// The required size of the cache partition, only valid for non-A/B update.
int64 required_cache = 8;
+
+ // True iff security patch level downgrade is permitted on this OTA.
+ bool spl_downgrade = 9;
}
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
index 27cc930..2552464 100644
--- a/tools/releasetools/ota_metadata_pb2.py
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ota_metadata.proto
-"""Generated protocol buffer code."""
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -17,9 +19,8 @@
name='ota_metadata.proto',
package='build.tools.releasetools',
syntax='proto3',
- serialized_options=b'H\003',
- create_key=_descriptor._internal_create_key,
- serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\x98\x04\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x35\n\tapex_info\x18\t \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+ serialized_options=_b('H\003'),
+ serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3')
)
@@ -29,33 +30,28 @@
full_name='build.tools.releasetools.OtaMetadata.OtaType',
filename=None,
file=DESCRIPTOR,
- create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='AB', index=1, number=1,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='BLOCK', index=2, number=2,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
_descriptor.EnumValueDescriptor(
name='BRICK', index=3, number=3,
serialized_options=None,
- type=None,
- create_key=_descriptor._internal_create_key),
+ type=None),
],
containing_type=None,
serialized_options=None,
- serialized_start=1004,
- serialized_end=1056,
+ serialized_start=972,
+ serialized_end=1024,
)
_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
@@ -66,36 +62,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='device', full_name='build.tools.releasetools.PartitionState.device', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build', full_name='build.tools.releasetools.PartitionState.build', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -119,7 +114,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='device', full_name='build.tools.releasetools.DeviceState.device', index=0,
@@ -127,49 +121,49 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build', full_name='build.tools.releasetools.DeviceState.build', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='build.tools.releasetools.DeviceState.timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
number=5, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='partition_state', full_name='build.tools.releasetools.DeviceState.partition_state', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -193,36 +187,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='package_name', full_name='build.tools.releasetools.ApexInfo.package_name', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='build.tools.releasetools.ApexInfo.version', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_compressed', full_name='build.tools.releasetools.ApexInfo.is_compressed', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='decompressed_size', full_name='build.tools.releasetools.ApexInfo.decompressed_size', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -246,7 +239,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='apex_info', full_name='build.tools.releasetools.ApexMetadata.apex_info', index=0,
@@ -254,7 +246,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -278,36 +270,35 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
- serialized_options=b'8\001',
+ serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
- serialized_start=950,
- serialized_end=1002,
+ serialized_start=918,
+ serialized_end=970,
)
_OTAMETADATA = _descriptor.Descriptor(
@@ -316,7 +307,6 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
- create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='build.tools.releasetools.OtaMetadata.type', index=0,
@@ -324,63 +314,63 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='wipe', full_name='build.tools.releasetools.OtaMetadata.wipe', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='downgrade', full_name='build.tools.releasetools.OtaMetadata.downgrade', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='property_files', full_name='build.tools.releasetools.OtaMetadata.property_files', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='precondition', full_name='build.tools.releasetools.OtaMetadata.precondition', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='postcondition', full_name='build.tools.releasetools.OtaMetadata.postcondition', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='retrofit_dynamic_partitions', full_name='build.tools.releasetools.OtaMetadata.retrofit_dynamic_partitions', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='required_cache', full_name='build.tools.releasetools.OtaMetadata.required_cache', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='apex_info', full_name='build.tools.releasetools.OtaMetadata.apex_info', index=8,
- number=9, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
+ name='spl_downgrade', full_name='build.tools.releasetools.OtaMetadata.spl_downgrade', index=8,
+ number=9, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -395,7 +385,7 @@
oneofs=[
],
serialized_start=520,
- serialized_end=1056,
+ serialized_end=1024,
)
_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
@@ -405,7 +395,6 @@
_OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
_OTAMETADATA.fields_by_name['precondition'].message_type = _DEVICESTATE
_OTAMETADATA.fields_by_name['postcondition'].message_type = _DEVICESTATE
-_OTAMETADATA.fields_by_name['apex_info'].message_type = _APEXINFO
_OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 6bbcc92..104f02f 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -39,6 +39,8 @@
METADATA_NAME = 'META-INF/com/android/metadata'
METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
+
def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
"""Finalizes the metadata and signs an A/B OTA package.
@@ -168,7 +170,7 @@
build_info_set = ComputeRuntimeBuildInfos(build_info,
boot_variable_values)
assert "ab_partitions" in build_info.info_dict,\
- "ab_partitions property required for ab update."
+ "ab_partitions property required for ab update."
ab_partitions = set(build_info.info_dict.get("ab_partitions"))
# delta_generator will error out on unused timestamps,
@@ -317,6 +319,8 @@
metadata_dict['pre-build'] = separator.join(pre_build.build)
metadata_dict['pre-build-incremental'] = pre_build.build_incremental
+ if metadata_proto.spl_downgrade:
+ metadata_dict['spl-downgrade'] = 'yes'
metadata_dict.update(metadata_proto.property_files)
return metadata_dict
@@ -330,6 +334,9 @@
pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
is_downgrade = int(post_timestamp) < int(pre_timestamp)
+ if OPTIONS.spl_downgrade:
+ metadata_proto.spl_downgrade = True
+
if OPTIONS.downgrade:
if not is_downgrade:
raise RuntimeError(
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 176e258..3db5559 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -123,6 +123,17 @@
mounted on the partition (e.g. "--signing_helper /path/to/helper"). The
args will be appended to the existing ones in info dict.
+ --gki_signing_algorithm <algorithm>
+ --gki_signing_key <key>
+ Use the specified algorithm (e.g. SHA256_RSA4096) and the key to generate
+ 'boot signature' in a v4 boot.img. Otherwise it uses the existing values
+ in info dict.
+
+ --gki_signing_extra_args <args>
+ Specify any additional args that are needed to generate 'boot signature'
+ (e.g. --prop foo:bar). The args will be appended to the existing ones
+ in info dict.
+
--android_jar_path <path>
Path to the android.jar to repack the apex file.
"""
@@ -174,23 +185,38 @@
OPTIONS.avb_keys = {}
OPTIONS.avb_algorithms = {}
OPTIONS.avb_extra_args = {}
+OPTIONS.gki_signing_key = None
+OPTIONS.gki_signing_algorithm = None
+OPTIONS.gki_signing_extra_args = None
OPTIONS.android_jar_path = None
AVB_FOOTER_ARGS_BY_PARTITION = {
- 'boot' : 'avb_boot_add_hash_footer_args',
- 'dtbo' : 'avb_dtbo_add_hash_footer_args',
- 'recovery' : 'avb_recovery_add_hash_footer_args',
- 'system' : 'avb_system_add_hashtree_footer_args',
- 'system_other' : 'avb_system_other_add_hashtree_footer_args',
- 'vendor' : 'avb_vendor_add_hashtree_footer_args',
- 'vendor_boot' : 'avb_vendor_boot_add_hash_footer_args',
- 'vbmeta' : 'avb_vbmeta_args',
- 'vbmeta_system' : 'avb_vbmeta_system_args',
- 'vbmeta_vendor' : 'avb_vbmeta_vendor_args',
+ 'boot': 'avb_boot_add_hash_footer_args',
+ 'dtbo': 'avb_dtbo_add_hash_footer_args',
+ 'product': 'avb_product_add_hashtree_footer_args',
+ 'recovery': 'avb_recovery_add_hash_footer_args',
+ 'system': 'avb_system_add_hashtree_footer_args',
+ 'system_ext': 'avb_system_ext_add_hashtree_footer_args',
+ 'system_other': 'avb_system_other_add_hashtree_footer_args',
+ 'odm': 'avb_odm_add_hashtree_footer_args',
+ 'odm_dlkm': 'avb_odm_dlkm_add_hashtree_footer_args',
+ 'pvmfw': 'avb_pvmfw_add_hash_footer_args',
+ 'vendor': 'avb_vendor_add_hashtree_footer_args',
+ 'vendor_boot': 'avb_vendor_boot_add_hash_footer_args',
+ 'vendor_dlkm': "avb_vendor_dlkm_add_hashtree_footer_args",
+ 'vbmeta': 'avb_vbmeta_args',
+ 'vbmeta_system': 'avb_vbmeta_system_args',
+ 'vbmeta_vendor': 'avb_vbmeta_vendor_args',
}
+# Check that AVB_FOOTER_ARGS_BY_PARTITION is in sync with AVB_PARTITIONS.
+for partition in common.AVB_PARTITIONS:
+ if partition not in AVB_FOOTER_ARGS_BY_PARTITION:
+ raise RuntimeError("Missing {} in AVB_FOOTER_ARGS".format(partition))
+
+
def GetApkCerts(certmap):
# apply the key remapping to the contents of the file
for apk, cert in certmap.items():
@@ -329,9 +355,8 @@
"""
unknown_files = []
for info in input_tf_zip.infolist():
- # Handle APEXes first, e.g. SYSTEM/apex/com.android.tzdata.apex.
- if (info.filename.startswith('SYSTEM/apex') and
- info.filename.endswith('.apex')):
+ # Handle APEXes on all partitions
+ if info.filename.endswith('.apex'):
name = os.path.basename(info.filename)
if name not in known_keys:
unknown_files.append(name)
@@ -363,8 +388,7 @@
invalid_apexes = []
for info in input_tf_zip.infolist():
- if (not info.filename.startswith('SYSTEM/apex') or
- not info.filename.endswith('.apex')):
+ if not info.filename.endswith('.apex'):
continue
name = os.path.basename(info.filename)
@@ -445,6 +469,25 @@
return data
+def IsBuildPropFile(filename):
+ return filename in (
+ "SYSTEM/etc/prop.default",
+ "BOOT/RAMDISK/prop.default",
+ "RECOVERY/RAMDISK/prop.default",
+
+ "VENDOR_BOOT/RAMDISK/default.prop",
+ "VENDOR_BOOT/RAMDISK/prop.default",
+
+ # ROOT/default.prop is a legacy path, but may still exist for upgrading
+ # devices that don't support `property_overrides_split_enabled`.
+ "ROOT/default.prop",
+
+ # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
+ # as a symlink in the current code. So it's a no-op here. Keeping the
+ # path here for clarity.
+ "RECOVERY/RAMDISK/default.prop") or filename.endswith("build.prop")
+
+
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
apk_keys, apex_keys, key_passwords,
platform_api_level, codename_to_api_level_map,
@@ -497,8 +540,8 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
- # Sign bundled APEX files.
- elif filename.startswith("SYSTEM/apex") and filename.endswith(".apex"):
+ # Sign bundled APEX files on all partitions
+ elif filename.endswith(".apex"):
name = os.path.basename(filename)
payload_key, container_key = apex_keys[name]
@@ -528,39 +571,8 @@
" (skipped due to special cert string)" % (name,))
common.ZipWriteStr(output_tf_zip, out_info, data)
- # AVB public keys for the installed APEXes, which will be updated later.
- elif (os.path.dirname(filename) == 'SYSTEM/etc/security/apex' and
- filename != 'SYSTEM/etc/security/apex/'):
- continue
-
# System properties.
- elif filename in (
- "SYSTEM/build.prop",
-
- "VENDOR/build.prop",
- "SYSTEM/vendor/build.prop",
-
- "ODM/etc/build.prop",
- "VENDOR/odm/etc/build.prop",
-
- "PRODUCT/build.prop",
- "SYSTEM/product/build.prop",
-
- "SYSTEM_EXT/build.prop",
- "SYSTEM/system_ext/build.prop",
-
- "SYSTEM/etc/prop.default",
- "BOOT/RAMDISK/prop.default",
- "RECOVERY/RAMDISK/prop.default",
-
- # ROOT/default.prop is a legacy path, but may still exist for upgrading
- # devices that don't support `property_overrides_split_enabled`.
- "ROOT/default.prop",
-
- # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
- # as a symlink in the current code. So it's a no-op here. Keeping the
- # path here for clarity.
- "RECOVERY/RAMDISK/default.prop"):
+ elif IsBuildPropFile(filename):
print("Rewriting %s:" % (filename,))
if stat.S_ISLNK(info.external_attr >> 16):
new_data = data
@@ -588,12 +600,7 @@
# Don't copy OTA certs if we're replacing them.
# Replacement of update-payload-key.pub.pem was removed in b/116660991.
- elif (
- OPTIONS.replace_ota_keys and
- filename in (
- "BOOT/RAMDISK/system/etc/security/otacerts.zip",
- "RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
- "SYSTEM/etc/security/otacerts.zip")):
+ elif OPTIONS.replace_ota_keys and filename.endswith("/otacerts.zip"):
pass
# Skip META/misc_info.txt since we will write back the new values later.
@@ -684,6 +691,9 @@
if misc_info.get('avb_enable') == 'true':
RewriteAvbProps(misc_info)
+ # Replace the GKI signing key for boot.img, if any.
+ ReplaceGkiSigningKey(misc_info)
+
# Write back misc_info with the latest values.
ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info)
@@ -861,21 +871,12 @@
print("META/otakeys.txt has no keys; using %s for OTA package"
" verification." % (mapped_keys[0],))
- # recovery now uses the same x509.pem version of the keys.
- # extra_recovery_keys are used only in recovery.
- if misc_info.get("recovery_as_boot") == "true":
- recovery_keys_location = "BOOT/RAMDISK/system/etc/security/otacerts.zip"
- else:
- recovery_keys_location = "RECOVERY/RAMDISK/system/etc/security/otacerts.zip"
-
- WriteOtacerts(output_tf_zip, recovery_keys_location,
- mapped_keys + extra_recovery_keys)
-
- # SystemUpdateActivity uses the x509.pem version of the keys, but
- # put into a zipfile system/etc/security/otacerts.zip.
- # We DO NOT include the extra_recovery_keys (if any) here.
- WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
-
+ otacerts = [info
+ for info in input_tf_zip.infolist()
+ if info.filename.endswith("/otacerts.zip")]
+ for info in otacerts:
+ print("Rewriting OTA key:", info.filename, mapped_keys)
+ WriteOtacerts(output_tf_zip, info.filename, mapped_keys)
def ReplaceVerityPublicKey(output_zip, filename, key_path):
@@ -1011,6 +1012,28 @@
misc_info[args_key] = result
+def ReplaceGkiSigningKey(misc_info):
+ """Replaces the GKI signing key."""
+
+ key = OPTIONS.gki_signing_key
+ if not key:
+ return
+
+ algorithm = OPTIONS.gki_signing_algorithm
+ if not algorithm:
+ raise ValueError("Missing --gki_signing_algorithm")
+
+ print('Replacing GKI signing key with "%s" (%s)' % (key, algorithm))
+ misc_info["gki_signing_algorithm"] = algorithm
+ misc_info["gki_signing_key_path"] = key
+
+ extra_args = OPTIONS.gki_signing_extra_args
+ if extra_args:
+ print('Setting extra GKI signing args: "%s"' % (extra_args))
+ misc_info["gki_signing_signature_args"] = (
+ misc_info.get("gki_signing_signature_args", '') + ' ' + extra_args)
+
+
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
@@ -1242,6 +1265,12 @@
# 'oem=--signing_helper_with_files=/tmp/avbsigner.sh'.
partition, extra_args = a.split("=", 1)
OPTIONS.avb_extra_args[partition] = extra_args
+ elif o == "--gki_signing_key":
+ OPTIONS.gki_signing_key = a
+ elif o == "--gki_signing_algorithm":
+ OPTIONS.gki_signing_algorithm = a
+ elif o == "--gki_signing_extra_args":
+ OPTIONS.gki_signing_extra_args = a
else:
return False
return True
@@ -1289,6 +1318,9 @@
"avb_extra_custom_image_key=",
"avb_extra_custom_image_algorithm=",
"avb_extra_custom_image_extra_args=",
+ "gki_signing_key=",
+ "gki_signing_algorithm=",
+ "gki_signing_extra_args=",
],
extra_option_handler=option_handler)
diff --git a/tools/releasetools/test_check_partition_sizes.py b/tools/releasetools/test_check_partition_sizes.py
index ed20873..073d229 100644
--- a/tools/releasetools/test_check_partition_sizes.py
+++ b/tools/releasetools/test_check_partition_sizes.py
@@ -27,8 +27,8 @@
dynamic_partition_list=system vendor product
super_partition_groups=group
super_group_partition_list=system vendor product
- super_partition_size=200
- super_super_device_size=200
+ super_partition_size=202
+ super_super_device_size=202
super_group_group_size=100
system_image_size=50
vendor_image_size=20
@@ -41,8 +41,8 @@
def test_non_ab(self):
self.info_dict.update(common.LoadDictionaryFromLines("""
ab_update=false
- super_partition_size=100
- super_super_device_size=100
+ super_partition_size=101
+ super_super_device_size=101
""".split("\n")))
CheckPartitionSizes(self.info_dict)
@@ -112,8 +112,8 @@
def test_vab(self):
self.info_dict.update(common.LoadDictionaryFromLines("""
virtual_ab=true
- super_partition_size=100
- super_super_device_size=100
+ super_partition_size=101
+ super_super_device_size=101
""".split("\n")))
CheckPartitionSizes(self.info_dict)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index ecd759c..a516366 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1670,6 +1670,127 @@
common.OPTIONS.aftl_key_path]
common.RunAndCheckOutput(verify_cmd)
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendGkiSigningArgs_NoSigningKeyPath(self):
+ # A non-GKI boot.img has no gki_signing_key_path.
+ common.OPTIONS.info_dict = {
+ # 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+
+ # Tests no --gki_signing_* args are appended if there is no
+ # gki_signing_key_path.
+ cmd = ['mkbootimg', '--header_version', '4']
+ expected_cmd = ['mkbootimg', '--header_version', '4']
+ common.AppendGkiSigningArgs(cmd)
+ self.assertEqual(cmd, expected_cmd)
+
+ def test_AppendGkiSigningArgs_NoSigningAlgorithm(self):
+ pubkey = os.path.join(self.testdata_dir, 'testkey_gki.pem')
+ with open(pubkey, 'wb') as f:
+ f.write(b'\x00' * 100)
+ self.assertTrue(os.path.exists(pubkey))
+
+ # Tests no --gki_signing_* args are appended if there is no
+ # gki_signing_algorithm.
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ # 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+
+ cmd = ['mkbootimg', '--header_version', '4']
+ expected_cmd = ['mkbootimg', '--header_version', '4']
+ common.AppendGkiSigningArgs(cmd)
+ self.assertEqual(cmd, expected_cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendGkiSigningArgs(self):
+ pubkey = os.path.join(self.testdata_dir, 'testkey_gki.pem')
+ with open(pubkey, 'wb') as f:
+ f.write(b'\x00' * 100)
+ self.assertTrue(os.path.exists(pubkey))
+
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ cmd = ['mkbootimg', '--header_version', '4']
+ common.AppendGkiSigningArgs(cmd)
+
+ expected_cmd = [
+ 'mkbootimg', '--header_version', '4',
+ '--gki_signing_key', pubkey,
+ '--gki_signing_algorithm', 'SHA256_RSA4096',
+ '--gki_signing_signature_args', '--prop foo:bar'
+ ]
+ self.assertEqual(cmd, expected_cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendGkiSigningArgs_KeyPathNotFound(self):
+ pubkey = os.path.join(self.testdata_dir, 'no_testkey_gki.pem')
+ self.assertFalse(os.path.exists(pubkey))
+
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ cmd = ['mkbootimg', '--header_version', '4']
+ self.assertRaises(common.ExternalError, common.AppendGkiSigningArgs, cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendGkiSigningArgs_SearchKeyPath(self):
+ pubkey = 'testkey_gki.pem'
+ self.assertFalse(os.path.exists(pubkey))
+
+ # Tests it should replace the pubkey with an existed key under
+ # OPTIONS.search_path, i.e., os.path.join(OPTIONS.search_path, pubkey).
+ search_path_dir = common.MakeTempDir()
+ search_pubkey = os.path.join(search_path_dir, pubkey)
+ with open(search_pubkey, 'wb') as f:
+ f.write(b'\x00' * 100)
+ self.assertTrue(os.path.exists(search_pubkey))
+
+ common.OPTIONS.search_path = search_path_dir
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ cmd = ['mkbootimg', '--header_version', '4']
+ common.AppendGkiSigningArgs(cmd)
+
+ expected_cmd = [
+ 'mkbootimg', '--header_version', '4',
+ '--gki_signing_key', search_pubkey,
+ '--gki_signing_algorithm', 'SHA256_RSA4096',
+ '--gki_signing_signature_args', '--prop foo:bar'
+ ]
+ self.assertEqual(cmd, expected_cmd)
+
+ @test_utils.SkipIfExternalToolsUnavailable()
+ def test_AppendGkiSigningArgs_SearchKeyPathNotFound(self):
+ pubkey = 'no_testkey_gki.pem'
+ self.assertFalse(os.path.exists(pubkey))
+
+ # Tests it should raise ExternalError if no key found under
+ # OPTIONS.search_path.
+ search_path_dir = common.MakeTempDir()
+ search_pubkey = os.path.join(search_path_dir, pubkey)
+ self.assertFalse(os.path.exists(search_pubkey))
+
+ common.OPTIONS.search_path = search_path_dir
+ common.OPTIONS.info_dict = {
+ 'gki_signing_key_path': pubkey,
+ 'gki_signing_algorithm': 'SHA256_RSA4096',
+ 'gki_signing_signature_args': '--prop foo:bar',
+ }
+ cmd = ['mkbootimg', '--header_version', '4']
+ self.assertRaises(common.ExternalError, common.AppendGkiSigningArgs, cmd)
+
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 7ea7f96..072bb01 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -18,12 +18,11 @@
import common
import test_utils
-from merge_target_files import (validate_config_lists,
- DEFAULT_FRAMEWORK_ITEM_LIST,
- DEFAULT_VENDOR_ITEM_LIST,
- DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
- item_list_to_partition_set,
- process_apex_keys_apk_certs_common)
+from merge_target_files import (
+ validate_config_lists, DEFAULT_FRAMEWORK_ITEM_LIST,
+ DEFAULT_VENDOR_ITEM_LIST, DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
+ item_list_to_partition_set, process_apex_keys_apk_certs_common,
+ compile_split_sepolicy)
class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
@@ -235,3 +234,43 @@
]
partition_set = item_list_to_partition_set(item_list)
self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
+
+ def test_compile_split_sepolicy(self):
+ product_out_dir = common.MakeTempDir()
+
+ def write_temp_file(path, data=''):
+ full_path = os.path.join(product_out_dir, path)
+ if not os.path.exists(os.path.dirname(full_path)):
+ os.makedirs(os.path.dirname(full_path))
+ with open(full_path, 'w') as f:
+ f.write(data)
+
+ write_temp_file(
+ 'system/etc/vintf/compatibility_matrix.device.xml', """
+ <compatibility-matrix>
+ <sepolicy>
+ <kernel-sepolicy-version>30</kernel-sepolicy-version>
+ </sepolicy>
+ </compatibility-matrix>""")
+ write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
+
+ write_temp_file('system/etc/selinux/plat_sepolicy.cil')
+ write_temp_file('system/etc/selinux/mapping/30.0.cil')
+ write_temp_file('product/etc/selinux/mapping/30.0.cil')
+ write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
+ write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
+
+ cmd = compile_split_sepolicy(product_out_dir, {
+ 'system': 'system',
+ 'product': 'product',
+ 'vendor': 'vendor',
+ }, os.path.join(product_out_dir, 'policy'))
+ self.assertEqual(' '.join(cmd),
+ ('secilc -m -M true -G -N -c 30 '
+ '-o {OTP}/policy -f /dev/null '
+ '{OTP}/system/etc/selinux/plat_sepolicy.cil '
+ '{OTP}/system/etc/selinux/mapping/30.0.cil '
+ '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
+ '{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
+ '{OTP}/product/etc/selinux/mapping/30.0.cil').format(
+ OTP=product_out_dir))
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index b556b3a..9f64849 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -33,7 +33,7 @@
GetTargetFilesZipWithoutPostinstallConfig,
Payload, PayloadSigner, POSTINSTALL_CONFIG,
StreamingPropertyFiles, AB_PARTITIONS)
-from apex_utils import GetApexInfoFromTargetFiles
+from apex_utils import GetSystemApexInfoFromTargetFiles
from test_utils import PropertyFilesTestCase
@@ -281,20 +281,20 @@
metadata)
@test_utils.SkipIfExternalToolsUnavailable()
- def test_GetApexInfoFromTargetFiles(self):
+ def test_GetSystemApexInfoFromTargetFiles(self):
target_files = construct_target_files(compressedApex=True)
- apex_infos = GetApexInfoFromTargetFiles(target_files)
+ apex_infos = GetSystemApexInfoFromTargetFiles(target_files)
self.assertEqual(len(apex_infos), 1)
self.assertEqual(apex_infos[0].package_name, "com.android.apex.compressed")
self.assertEqual(apex_infos[0].version, 1)
self.assertEqual(apex_infos[0].is_compressed, True)
# Compare the decompressed APEX size with the original uncompressed APEX
original_apex_name = 'com.android.apex.compressed.v1_original.apex'
- original_apex_filepath = os.path.join(test_utils.get_current_dir(), original_apex_name)
+ original_apex_filepath = os.path.join(
+ test_utils.get_current_dir(), original_apex_name)
uncompressed_apex_size = os.path.getsize(original_apex_filepath)
self.assertEqual(apex_infos[0].decompressed_size, uncompressed_apex_size)
-
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
common.OPTIONS.retrofit_dynamic_partitions = True
@@ -343,7 +343,10 @@
common.OPTIONS.incremental_source = ''
common.OPTIONS.downgrade = True
common.OPTIONS.wipe_user_data = True
+ common.OPTIONS.spl_downgrade = True
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+ # Reset spl_downgrade so other tests are unaffected
+ common.OPTIONS.spl_downgrade = False
self.assertDictEqual(
{
@@ -359,6 +362,7 @@
'pre-device': 'product-device',
'pre-build': 'build-fingerprint-source',
'pre-build-incremental': 'build-version-incremental-source',
+ 'spl-downgrade': 'yes',
},
metadata)
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 18e4858..64e27a2 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -23,8 +23,8 @@
import test_utils
from sign_target_files_apks import (
CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
- ReplaceCerts, ReplaceVerityKeyId, RewriteAvbProps, RewriteProps,
- WriteOtacerts)
+ ReplaceCerts, ReplaceGkiSigningKey, ReplaceVerityKeyId, RewriteAvbProps,
+ RewriteProps, WriteOtacerts)
class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -588,3 +588,52 @@
'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
'build/make/target/product/security/testkey'),
}, keys_info)
+
+ def test_ReplaceGkiSigningKey(self):
+ common.OPTIONS.gki_signing_key = 'release_gki_key'
+ common.OPTIONS.gki_signing_algorithm = 'release_gki_algorithm'
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ expected_dict = {
+ 'gki_signing_key_path': 'release_gki_key',
+ 'gki_signing_algorithm': 'release_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args release_gki_signature_extra_args',
+ }
+ ReplaceGkiSigningKey(misc_info)
+ self.assertDictEqual(expected_dict, misc_info)
+
+ def test_ReplaceGkiSigningKey_MissingSigningAlgorithm(self):
+ common.OPTIONS.gki_signing_key = 'release_gki_key'
+ common.OPTIONS.gki_signing_algorithm = None
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ self.assertRaises(ValueError, ReplaceGkiSigningKey, misc_info)
+
+ def test_ReplaceGkiSigningKey_MissingSigningKeyNop(self):
+ common.OPTIONS.gki_signing_key = None
+ common.OPTIONS.gki_signing_algorithm = 'release_gki_algorithm'
+ common.OPTIONS.gki_signing_extra_args = 'release_gki_signature_extra_args'
+
+ # No change to misc_info if common.OPTIONS.gki_signing_key is missing.
+ misc_info = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ expected_dict = {
+ 'gki_signing_key_path': 'default_gki_key',
+ 'gki_signing_algorithm': 'default_gki_algorithm',
+ 'gki_signing_signature_args': 'default_gki_signature_args',
+ }
+ ReplaceGkiSigningKey(misc_info)
+ self.assertDictEqual(expected_dict, misc_info)
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index fc83689..8faa2d1 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -14,6 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+"""
+Signs a given image using avbtool
+
+Usage: verity_utils properties_file output_image
+"""
+
from __future__ import print_function
import logging
@@ -31,6 +37,9 @@
BLOCK_SIZE = common.BLOCK_SIZE
FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
+# From external/avb/avbtool.py
+MAX_VBMETA_SIZE = 64 * 1024
+MAX_FOOTER_SIZE = 4096
class BuildVerityImageError(Exception):
"""An Exception raised during verity image building."""
@@ -714,3 +723,55 @@
signing_args)
return builder
+
+
+def GetDiskUsage(path):
+ """Returns the number of bytes that "path" occupies on host.
+
+ Args:
+ path: The directory or file to calculate size on.
+
+ Returns:
+ The number of bytes based on a 1K block_size.
+ """
+ cmd = ["du", "-b", "-k", "-s", path]
+ output = common.RunAndCheckOutput(cmd, verbose=False)
+ return int(output.split()[0]) * 1024
+
+
+def main(argv):
+ if len(argv) != 2:
+ print(__doc__)
+ sys.exit(1)
+
+ common.InitLogging()
+
+ dict_file = argv[0]
+ out_file = argv[1]
+
+ prop_dict = {}
+ with open(dict_file, 'r') as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ continue
+ k, v = line.split("=", 1)
+ prop_dict[k] = v
+
+ builder = CreateVerityImageBuilder(prop_dict)
+
+ if "partition_size" not in prop_dict:
+ image_size = GetDiskUsage(out_file)
+ # make sure that the image is big enough to hold vbmeta and footer
+ image_size = image_size + (MAX_VBMETA_SIZE + MAX_FOOTER_SIZE)
+ size = builder.CalculateDynamicPartitionSize(image_size)
+ prop_dict["partition_size"] = size
+
+ builder.Build(out_file)
+
+
+if __name__ == '__main__':
+ try:
+ main(sys.argv[1:])
+ finally:
+ common.Cleanup()
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index b90f010..bee6a6f 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -16,6 +16,10 @@
// the signapk tool (a .jar application used to sign packages)
// ============================================================
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
java_binary_host {
name: "signapk",
srcs: ["src/**/*.java"],
diff --git a/tools/signtos/Android.bp b/tools/signtos/Android.bp
index 688e7b8..cd41acc 100644
--- a/tools/signtos/Android.bp
+++ b/tools/signtos/Android.bp
@@ -16,6 +16,10 @@
// the signtos tool - signs Trusty images
// ============================================================
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
java_library_host {
name: "signtos",
srcs: ["SignTos.java"],
diff --git a/tools/test_extract_kernel.py b/tools/test_extract_kernel.py
index 1a1cfcb..002e387 100644
--- a/tools/test_extract_kernel.py
+++ b/tools/test_extract_kernel.py
@@ -15,16 +15,16 @@
# limitations under the License.
import unittest
-from extract_kernel import get_version, dump_version
+from extract_kernel import dump_version
class ExtractKernelTest(unittest.TestCase):
def test_extract_version(self):
- self.assertEqual("4.9.100", get_version(
- b'Linux version 4.9.100-a123 (a@a) (a) a\n\x00', 0))
- self.assertEqual("4.9.123", get_version(
- b'Linux version 4.9.123 (@) () \n\x00', 0))
+ self.assertEqual("4.9.100", dump_version(
+ b'Linux version 4.9.100-a123 (a@a) (a) a\n\x00'))
+ self.assertEqual("4.9.123", dump_version(
+ b'Linux version 4.9.123 (@) () \n\x00'))
def test_dump_self(self):
self.assertEqual("4.9.1", dump_version(
b"trash\x00Linux version 4.8.8\x00trash\x00"
- "other trash Linux version 4.9.1-g3 (2@s) (2) a\n\x00"))
+ b"other trash Linux version 4.9.1-g3 (2@s) (2) a\n\x00"))
diff --git a/tools/test_post_process_props.py b/tools/test_post_process_props.py
index 12d52e5..dd5f8ec 100644
--- a/tools/test_post_process_props.py
+++ b/tools/test_post_process_props.py
@@ -53,7 +53,7 @@
p.make_as_comment()
self.assertTrue(p.is_comment())
- self.assertTrue("# a comment\n#a=b", str(p))
+ self.assertEqual("# a comment\n#a=b", str(p))
class PropListTestcase(unittest.TestCase):
def setUp(self):
@@ -251,5 +251,37 @@
# because it's explicitly allowed
self.assertTrue(override_optional_props(props, allow_dup=True))
+ def test_validateGrfProps(self):
+ stderr_redirect = io.StringIO()
+ with contextlib.redirect_stderr(stderr_redirect):
+ props = PropList("hello")
+ props.put("ro.board.first_api_level","25")
+
+ # ro.board.first_api_level must be less than or equal to the sdk version
+ self.assertFalse(validate_and_add_grf_props(props, 20))
+ self.assertTrue(validate_and_add_grf_props(props, 26))
+ # ro.board.api_level is automatically set
+ self.assertEqual(props.get_value("ro.board.api_level"), "25")
+
+ props.get_all_props()[-1].make_as_comment()
+ self.assertTrue(validate_and_add_grf_props(props, 35))
+ # ro.board.api_level is automatically set to the required GRF version
+ self.assertEqual(props.get_value("ro.board.api_level"), "33")
+
+ props.get_all_props()[-1].make_as_comment()
+ # manually set ro.board.api_level to an invalid value
+ props.put("ro.board.api_level","20")
+ self.assertFalse(validate_and_add_grf_props(props, 26))
+
+ props.get_all_props()[-1].make_as_comment()
+ # manually set ro.board.api_level to a valid value
+ props.put("ro.board.api_level","26")
+ self.assertTrue(validate_and_add_grf_props(props, 26))
+ # ro.board.api_level must be less than or equal to the sdk version
+ self.assertFalse(validate_and_add_grf_props(props, 25))
+ # ro.board.api_level must be greater than or equal to the required GRF
+ # version
+ self.assertFalse(validate_and_add_grf_props(props, 30))
+
if __name__ == '__main__':
unittest.main(verbosity=2)
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 1ebf4eb..8cab04c 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -4,6 +4,10 @@
// Zip alignment tool
//
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
cc_defaults {
name: "zipalign_defaults",
target: {
diff --git a/tools/zipalign/ZipAlignMain.cpp b/tools/zipalign/ZipAlignMain.cpp
index 49be916..47ebd12 100644
--- a/tools/zipalign/ZipAlignMain.cpp
+++ b/tools/zipalign/ZipAlignMain.cpp
@@ -39,7 +39,7 @@
" <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
fprintf(stderr, " -c: check alignment only (does not modify file)\n");
fprintf(stderr, " -f: overwrite existing outfile.zip\n");
- fprintf(stderr, " -p: memory page alignment for stored shared object files\n");
+ fprintf(stderr, " -p: page-align uncompressed .so files\n");
fprintf(stderr, " -v: verbose output\n");
fprintf(stderr, " -z: recompress using Zopfli\n");
}
diff --git a/tools/ziptime/Android.bp b/tools/ziptime/Android.bp
index 5ef45ed..fa46b30 100644
--- a/tools/ziptime/Android.bp
+++ b/tools/ziptime/Android.bp
@@ -18,6 +18,10 @@
// Zip timestamp removal tool
//
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
cc_binary_host {
srcs: [