Merge "mgrep: add output to environment commands"
diff --git a/core/binary.mk b/core/binary.mk
index 116e261..c8406d8 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -201,6 +201,8 @@
b_lib :=
endif
+my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LINKER)
+
include $(BUILD_SYSTEM)/config_sanitizers.mk
ifeq ($(strip $($(LOCAL_2ND_ARCH_VAR_PREFIX)WITHOUT_$(my_prefix)CLANG)),true)
@@ -361,6 +363,7 @@
my_cxx := $(SYNTAX_TOOLS_PREFIX)/cxx-syntax $(my_syntax_arch) "$(my_cxx)"
endif
endif
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LINKER := $(my_linker)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX := $(my_cxx)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLANG := $(my_clang)
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index cd25604..5ac5477 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -70,3 +70,4 @@
# Address sanitizer clang config
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-arm-android
$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RPATH := /data/vendor/lib:/$(TARGET_COPY_OUT_VENDOR)/lib:/data/lib
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 0861568..5120a6d 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -68,3 +68,4 @@
# Address sanitizer clang config
ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-arm64-android
ADDRESS_SANITIZER_RPATH := /data/vendor/lib64:/$(TARGET_COPY_OUT_VENDOR)/lib64:/data/lib64
+ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index b43b1cc..c28fff1 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -85,12 +85,14 @@
my_ldlibs += -lm -lpthread
my_ldflags += -Wl,--no-as-needed
else
+ my_cflags += -mllvm -asan-globals=0
# ASan runtime library must be the first in the link order.
my_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RUNTIME_LIBRARY) \
$(my_shared_libraries) \
$(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
my_static_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)
my_ldflags += -Wl,-rpath,$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RPATH)
+ my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
endif
endif
diff --git a/core/definitions.mk b/core/definitions.mk
index 442501e..1988cc1 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1510,7 +1510,7 @@
define transform-o-to-executable-inner
$(hide) $(PRIVATE_CXX) -pie \
-nostdlib -Bdynamic \
- -Wl,-dynamic-linker,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)TARGET_LINKER) \
+ -Wl,-dynamic-linker,$(PRIVATE_LINKER) \
-Wl,--gc-sections \
-Wl,-z,nocopyreloc \
$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 6ed9ca2..a007f81 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -16,6 +16,7 @@
from collections import deque, OrderedDict
from hashlib import sha1
+import common
import heapq
import itertools
import multiprocessing
@@ -460,9 +461,20 @@
if free_string:
out.append("".join(free_string))
- # sanity check: abort if we're going to need more than 512 MB if
- # stash space
- assert max_stashed_blocks * self.tgt.blocksize < (512 << 20)
+ if self.version >= 2:
+ # Sanity check: abort if we're going to need more stash space than
+ # the allowed size (cache_size * threshold). There are two purposes
+ # of having a threshold here. a) Part of the cache may have been
+ # occupied by some recovery logs. b) It will buy us some time to deal
+ # with the oversize issue.
+ cache_size = common.OPTIONS.cache_size
+ stash_threshold = common.OPTIONS.stash_threshold
+ max_allowed = cache_size * stash_threshold
+ assert max_stashed_blocks * self.tgt.blocksize < max_allowed, \
+ 'Stash size %d (%d * %d) exceeds the limit %d (%d * %.2f)' % (
+ max_stashed_blocks * self.tgt.blocksize, max_stashed_blocks,
+ self.tgt.blocksize, max_allowed, cache_size,
+ stash_threshold)
# Zero out extended blocks as a workaround for bug 20881595.
if self.tgt.extended:
@@ -489,8 +501,11 @@
f.write(i)
if self.version >= 2:
- print("max stashed blocks: %d (%d bytes)\n" % (
- max_stashed_blocks, max_stashed_blocks * self.tgt.blocksize))
+ max_stashed_size = max_stashed_blocks * self.tgt.blocksize
+ max_allowed = common.OPTIONS.cache_size * common.OPTIONS.stash_threshold
+ print("max stashed blocks: %d (%d bytes), limit: %d bytes (%.2f%%)\n" % (
+ max_stashed_blocks, max_stashed_size, max_allowed,
+ max_stashed_size * 100.0 / max_allowed))
def ComputePatches(self, prefix):
print("Reticulating splines...")
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 8fe4864..531b432 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -84,6 +84,9 @@
Specifies the number of worker-threads that will be used when
generating patches for incremental updates (defaults to 3).
+ --stash_threshold <float>
+ Specifies the threshold that will be used to compute the maximum
+ allowed stash size (defaults to 0.8).
"""
import sys
@@ -122,6 +125,10 @@
OPTIONS.oem_source = None
OPTIONS.fallback_to_full = True
OPTIONS.full_radio = False
+# Stash size cannot exceed cache_size * threshold.
+OPTIONS.cache_size = None
+OPTIONS.stash_threshold = 0.8
+
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
@@ -1504,6 +1511,12 @@
OPTIONS.updater_binary = a
elif o in ("--no_fallback_to_full",):
OPTIONS.fallback_to_full = False
+ elif o == "--stash_threshold":
+ try:
+ OPTIONS.stash_threshold = float(a)
+ except ValueError:
+ raise ValueError("Cannot parse value %r for option %r - expecting "
+ "a float" % (a, o))
else:
return False
return True
@@ -1527,6 +1540,7 @@
"oem_settings=",
"verify",
"no_fallback_to_full",
+ "stash_threshold=",
], extra_option_handler=option_handler)
if len(args) != 2:
@@ -1576,6 +1590,11 @@
output_zip = zipfile.ZipFile(temp_zip_file, "w",
compression=zipfile.ZIP_DEFLATED)
+ cache_size = OPTIONS.info_dict.get("cache_size", None)
+ if cache_size is None:
+ raise RuntimeError("can't determine the cache partition size")
+ OPTIONS.cache_size = cache_size
+
if OPTIONS.incremental_source is None:
WriteFullOTAPackage(input_zip, output_zip)
if OPTIONS.package_key is None: