Merge "Fix reference to uncleared variable in dex_preopt_odex_install.mk"
diff --git a/core/Makefile b/core/Makefile
index 38302a7..b70948b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -5956,7 +5956,7 @@
FUZZ_SHARED_DEPS := $(call copy-many-files,$(strip $(FUZZ_TARGET_SHARED_DEPS_INSTALL_PAIRS)))
# -----------------------------------------------------------------
-# The rule to build all fuzz targets, and package them.
+# The rule to build all fuzz targets for C++ and Rust, and package them.
# Note: The packages are created in Soong, and in a perfect world,
# we'd be able to create the phony rule there. But, if we want to
# have dist goals for the fuzz target, we need to have the PHONY
@@ -5968,3 +5968,7 @@
.PHONY: haiku
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
+
+.PHONY: haiku-rust
+haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
+$(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 5120e7e..a9b3720 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -132,12 +132,16 @@
ifneq (,$(my_strip_module))
$(strip_output): PRIVATE_STRIP_ARGS := $(my_strip_args)
$(strip_output): PRIVATE_TOOLS_PREFIX := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)TOOLS_PREFIX)
- $(strip_output): $(strip_input) $(SOONG_STRIP_PATH)
+ $(strip_output): $(strip_input) $(SOONG_STRIP_PATH) $(XZ)
@echo "$($(PRIVATE_PREFIX)DISPLAY) Strip: $(PRIVATE_MODULE) ($@)"
CLANG_BIN=$(LLVM_PREBUILTS_PATH) \
CROSS_COMPILE=$(PRIVATE_TOOLS_PREFIX) \
XZ=$(XZ) \
+ CREATE_MINIDEBUGINFO=${CREATE_MINIDEBUGINFO} \
$(SOONG_STRIP_PATH) -i $< -o $@ -d $@.strip.d $(PRIVATE_STRIP_ARGS)
+ ifneq ($(HOST_OS),darwin)
+ $(strip_output): $(CREATE_MINIDEBUGINFO)
+ endif
$(call include-depfile,$(strip_output).strip.d,$(strip_output))
else
# Don't strip the binary, just copy it. We can't skip this step
diff --git a/envsetup.sh b/envsetup.sh
index e08fb03..8a995c7 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1458,13 +1458,17 @@
# Verifies that module-info.txt exists, creating it if it doesn't.
function verifymodinfo() {
if [ ! "$ANDROID_PRODUCT_OUT" ]; then
- echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+ if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
+ echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
+ fi
return 1
fi
if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
- echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
- refreshmod || return 1
+ if [ "$QUIET_VERIFYMODINFO" != "true" ] ; then
+ echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
+ fi
+ return 1
fi
}
@@ -1602,7 +1606,7 @@
function _complete_android_module_names() {
local word=${COMP_WORDS[COMP_CWORD]}
- COMPREPLY=( $(allmod | grep -E "^$word") )
+ COMPREPLY=( $(QUIET_VERIFYMODINFO=true allmod | grep -E "^$word") )
}
# Print colored exit condition
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index a2150ad..c577870 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -41,6 +41,10 @@
BOARD_AVB_SYSTEM_ALGORITHM := SHA256_RSA2048
BOARD_AVB_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
+
+# Using sha256 for dm-verity partitions. b/156162446
+BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+
ifdef BUILDING_GSI
# super.img spec for GSI targets
BOARD_SUPER_PARTITION_SIZE := 3229614080
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 25fa68b..82af45f 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -42,6 +42,9 @@
# Enable dynamic partition size
PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
+# Enable various debugfs restrictions
+PRODUCT_SET_DEBUGFS_RESTRICTIONS := true
+
# GSI targets should install "unflattened" APEXes in /system
TARGET_FLATTEN_APEX := false
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 301d0da..2492da9 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -386,13 +386,14 @@
in_dir, du_str,
int(prop_dict.get("partition_reserved_size", 0)),
int(prop_dict.get("partition_reserved_size", 0)) // BYTES_IN_MB))
- print(
- "The max image size for filesystem files is {} bytes ({} MB), out of a "
- "total partition size of {} bytes ({} MB).".format(
- int(prop_dict["image_size"]),
- int(prop_dict["image_size"]) // BYTES_IN_MB,
- int(prop_dict["partition_size"]),
- int(prop_dict["partition_size"]) // BYTES_IN_MB))
+ if ("image_size" in prop_dict and "partition_size" in prop_dict):
+ print(
+ "The max image size for filesystem files is {} bytes ({} MB), "
+ "out of a total partition size of {} bytes ({} MB).".format(
+ int(prop_dict["image_size"]),
+ int(prop_dict["image_size"]) // BYTES_IN_MB,
+ int(prop_dict["partition_size"]),
+ int(prop_dict["partition_size"]) // BYTES_IN_MB))
raise
if run_e2fsck and prop_dict.get("skip_fsck") != "true":
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index abbcfa0..61c8212 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -215,6 +215,12 @@
--disable_vabc
Disable Virtual A/B Compression, for builds that have compression enabled
by default.
+
+ --vabc_downgrade
+ Don't disable Virtual A/B Compression for downgrading OTAs.
+ For VABC downgrades, we must finish merging before doing data wipe, and
+ since data wipe is required for downgrading OTA, this might cause long
+ wait time in recovery.
"""
from __future__ import print_function
@@ -278,6 +284,7 @@
OPTIONS.custom_images = {}
OPTIONS.disable_vabc = False
OPTIONS.spl_downgrade = False
+OPTIONS.vabc_downgrade = False
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -1284,6 +1291,8 @@
elif o == "--spl_downgrade":
OPTIONS.spl_downgrade = True
OPTIONS.wipe_user_data = True
+ elif o == "--vabc_downgrade":
+ OPTIONS.vabc_downgrade = True
else:
return False
return True
@@ -1326,7 +1335,8 @@
"partial=",
"custom_image=",
"disable_vabc",
- "spl_downgrade"
+ "spl_downgrade",
+ "vabc_downgrade",
], extra_option_handler=option_handler)
if len(args) != 2:
@@ -1347,7 +1357,14 @@
else:
OPTIONS.info_dict = ParseInfoDict(args[0])
- if OPTIONS.downgrade:
+ if OPTIONS.wipe_user_data:
+ if not OPTIONS.vabc_downgrade:
+ logger.info("Detected downgrade/datawipe OTA."
+ "When wiping userdata, VABC OTA makes the user "
+ "wait in recovery mode for merge to finish. Disable VABC by "
+ "default. If you really want to do VABC downgrade, pass "
+ "--vabc_downgrade")
+ OPTIONS.disable_vabc = True
# We should only allow downgrading incrementals (as opposed to full).
# Otherwise the device may go back from arbitrary build with this full
# OTA package.
diff --git a/tools/warn/.pylintrc b/tools/warn/.pylintrc
new file mode 100644
index 0000000..6aeaed6
--- /dev/null
+++ b/tools/warn/.pylintrc
@@ -0,0 +1,4 @@
+[FORMAT]
+
+# Two spaces for each indentation level.
+indent-string=' '
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index bed25ed..ac5d4b7 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -144,12 +144,11 @@
2D warnings array where warnings[p][s] is # of warnings in project name p of
severity level s
"""
- # pylint:disable=invalid-name
warnings = {p: {s.value: 0 for s in Severity.levels} for p in project_names}
- for i in warn_patterns:
- s = i['severity'].value
- for p in i['projects']:
- warnings[p][s] += i['projects'][p]
+ for pattern in warn_patterns:
+ value = pattern['severity'].value
+ for project in pattern['projects']:
+ warnings[project][value] += pattern['projects'][project]
return warnings
@@ -173,11 +172,11 @@
"""Returns list of HTML-formatted content for severity stats."""
stats_header = ['Project']
- for s in Severity.levels:
- if total_by_severity[s.value]:
+ for severity in Severity.levels:
+ if total_by_severity[severity.value]:
stats_header.append(
'<span style=\'background-color:{}\'>{}</span>'.format(
- s.color, s.column_header))
+ severity.color, severity.column_header))
stats_header.append('TOTAL')
return stats_header
@@ -200,15 +199,15 @@
total_all_projects = 0
stats_rows = []
- for p in project_names:
- if total_by_project[p]:
- one_row = [p]
- for s in Severity.levels:
- if total_by_severity[s.value]:
- one_row.append(warnings[p][s.value])
- one_row.append(total_by_project[p])
+ for p_name in project_names:
+ if total_by_project[p_name]:
+ one_row = [p_name]
+ for severity in Severity.levels:
+ if total_by_severity[severity.value]:
+ one_row.append(warnings[p_name][severity.value])
+ one_row.append(total_by_project[p_name])
stats_rows.append(one_row)
- total_all_projects += total_by_project[p]
+ total_all_projects += total_by_project[p_name]
return total_all_projects, stats_rows
@@ -226,10 +225,10 @@
total_all_severities = 0
one_row = ['<b>TOTAL</b>']
- for s in Severity.levels:
- if total_by_severity[s.value]:
- one_row.append(total_by_severity[s.value])
- total_all_severities += total_by_severity[s.value]
+ for severity in Severity.levels:
+ if total_by_severity[severity.value]:
+ one_row.append(total_by_severity[severity.value])
+ total_all_severities += total_by_severity[severity.value]
one_row.append(total_all_projects)
stats_rows.append(one_row)
writer('<script>')
@@ -328,8 +327,8 @@
for text in fixed_patterns:
cur_row_class = 1 - cur_row_class
# remove last '\n'
- t = text[:-1] if text[-1] == '\n' else text
- writer('<tr><td class="c' + str(cur_row_class) + '">' + t + '</td></tr>')
+ out_text = text[:-1] if text[-1] == '\n' else text
+ writer('<tr><td class="c' + str(cur_row_class) + '">' + out_text + '</td></tr>')
writer('</table></div>')
writer('</blockquote>')
@@ -339,10 +338,10 @@
total = 0
for pattern in warn_patterns:
if pattern['severity'] == sev and pattern['members']:
- n = len(pattern['members'])
- total += n
+ num_members = len(pattern['members'])
+ total += num_members
warning = kind + ': ' + (pattern['description'] or '?')
- csvwriter.writerow([n, '', warning])
+ csvwriter.writerow([num_members, '', warning])
# print number of warnings for each project, ordered by project name
projects = sorted(pattern['projects'].keys())
for project in projects:
@@ -355,8 +354,8 @@
"""Dump number of warnings in CSV format to writer."""
sort_warnings(warn_patterns)
total = 0
- for s in Severity.levels:
- total += write_severity(csvwriter, s, s.column_header, warn_patterns)
+ for severity in Severity.levels:
+ total += write_severity(csvwriter, severity, severity.column_header, warn_patterns)
csvwriter.writerow([total, '', 'All warnings'])
@@ -379,35 +378,35 @@
csvwriter.writerow(output)
-# Return s with escaped backslash and quotation characters.
-def escape_string(s):
- return s.replace('\\', '\\\\').replace('"', '\\"')
+# Return line with escaped backslash and quotation characters.
+def escape_string(line):
+ return line.replace('\\', '\\\\').replace('"', '\\"')
-# Return s without trailing '\n' and escape the quotation characters.
-def strip_escape_string(s):
- if not s:
- return s
- s = s[:-1] if s[-1] == '\n' else s
- return escape_string(s)
+# Return line without trailing '\n' and escape the quotation characters.
+def strip_escape_string(line):
+ if not line:
+ return line
+ line = line[:-1] if line[-1] == '\n' else line
+ return escape_string(line)
def emit_warning_array(name, writer, warn_patterns):
writer('var warning_{} = ['.format(name))
- for w in warn_patterns:
+ for pattern in warn_patterns:
if name == 'severity':
- writer('{},'.format(w[name].value))
+ writer('{},'.format(pattern[name].value))
else:
- writer('{},'.format(w[name]))
+ writer('{},'.format(pattern[name]))
writer('];')
def emit_warning_arrays(writer, warn_patterns):
emit_warning_array('severity', writer, warn_patterns)
writer('var warning_description = [')
- for w in warn_patterns:
- if w['members']:
- writer('"{}",'.format(escape_string(w['description'])))
+ for pattern in warn_patterns:
+ if pattern['members']:
+ writer('"{}",'.format(escape_string(pattern['description'])))
else:
writer('"",') # no such warning
writer('];')
@@ -566,32 +565,32 @@
# Emit a JavaScript const integer array.
def emit_const_int_array(name, array, writer):
writer('const ' + name + ' = [')
- for n in array:
- writer(str(n) + ',')
+ for item in array:
+ writer(str(item) + ',')
writer('];')
# Emit a JavaScript const string array.
def emit_const_string_array(name, array, writer):
writer('const ' + name + ' = [')
- for s in array:
- writer('"' + strip_escape_string(s) + '",')
+ for item in array:
+ writer('"' + strip_escape_string(item) + '",')
writer('];')
# Emit a JavaScript const string array for HTML.
def emit_const_html_string_array(name, array, writer):
writer('const ' + name + ' = [')
- for s in array:
- writer('"' + html.escape(strip_escape_string(s)) + '",')
+ for item in array:
+ writer('"' + html.escape(strip_escape_string(item)) + '",')
writer('];')
# Emit a JavaScript const object array.
def emit_const_object_array(name, array, writer):
writer('const ' + name + ' = [')
- for x in array:
- writer(str(x) + ',')
+ for item in array:
+ writer(str(item) + ',')
writer('];')
@@ -671,8 +670,8 @@
warning_links, warning_records, header_str):
"""Write warnings html file."""
if html_path:
- with open(html_path, 'w') as f:
- dump_html(flags, f, warning_messages, warning_links, warning_records,
+ with open(html_path, 'w') as outf:
+ dump_html(flags, outf, warning_messages, warning_links, warning_records,
header_str, warn_patterns, project_names)
@@ -680,12 +679,12 @@
warning_records, header_str, project_names):
"""Write warnings csv file."""
if flags.csvpath:
- with open(flags.csvpath, 'w') as f:
- dump_csv(csv.writer(f, lineterminator='\n'), warn_patterns)
+ with open(flags.csvpath, 'w') as outf:
+ dump_csv(csv.writer(outf, lineterminator='\n'), warn_patterns)
if flags.csvwithdescription:
- with open(flags.csvwithdescription, 'w') as f:
- dump_csv_with_description(csv.writer(f, lineterminator='\n'),
+ with open(flags.csvwithdescription, 'w') as outf:
+ dump_csv_with_description(csv.writer(outf, lineterminator='\n'),
warning_records, warning_messages,
warn_patterns, project_names)
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index d69050f..844f629 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -116,22 +116,20 @@
def find_project_index(line, project_patterns):
"""Return the index to the project pattern array."""
- # pylint:disable=invalid-name
- for i, p in enumerate(project_patterns):
- if p.match(line):
- return i
+ for idx, pattern in enumerate(project_patterns):
+ if pattern.match(line):
+ return idx
return -1
def classify_one_warning(warning, link, results, project_patterns,
warn_patterns):
"""Classify one warning line."""
- # pylint:disable=invalid-name
- for i, w in enumerate(warn_patterns):
- for cpat in w['compiled_patterns']:
+ for idx, pattern in enumerate(warn_patterns):
+ for cpat in pattern['compiled_patterns']:
if cpat.match(warning):
- p = find_project_index(warning, project_patterns)
- results.append([warning, link, i, p])
+ project_idx = find_project_index(warning, project_patterns)
+ results.append([warning, link, idx, project_idx])
return
# If we end up here, there was a problem parsing the log
# probably caused by 'make -j' mixing the output from
@@ -310,7 +308,6 @@
# Remove the duplicated warnings save ~8% of time when parsing
# one typical build log than before
unique_warnings = dict()
- # pylint:disable=invalid-name
for line in infile:
if warning_pattern.match(line):
normalized_line = normalize_warning_line(line, flags)
@@ -318,17 +315,17 @@
unique_warnings[normalized_line] = generate_cs_link(line, flags)
elif (platform_version == 'unknown' or board_name == 'unknown' or
architecture == 'unknown'):
- m = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
- if m is not None:
+ result = re.match(r'.+Package:.+chromeos-base/chromeos-chrome-', line)
+ if result is not None:
platform_version = 'R' + line.split('chrome-')[1].split('_')[0]
continue
- m = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
- if m is not None:
- board_name = m.group(1).split('/')[2]
+ result = re.match(r'.+Source\sunpacked\sin\s(.+)', line)
+ if result is not None:
+ board_name = result.group(1).split('/')[2]
continue
- m = re.match(r'.+USE:\s*([^\s]*).*', line)
- if m is not None:
- architecture = m.group(1)
+ result = re.match(r'.+USE:\s*([^\s]*).*', line)
+ if result is not None:
+ architecture = result.group(1)
continue
header_str = '%s - %s - %s' % (platform_version, board_name, architecture)
@@ -396,22 +393,21 @@
line, flags, android_root, unique_warnings)
continue
- # pylint:disable=invalid-name
if line_counter < 100:
# save a little bit of time by only doing this for the first few lines
line_counter += 1
- m = re.search('(?<=^PLATFORM_VERSION=).*', line)
- if m is not None:
- platform_version = m.group(0)
- m = re.search('(?<=^TARGET_PRODUCT=).*', line)
- if m is not None:
- target_product = m.group(0)
- m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
- if m is not None:
- target_variant = m.group(0)
- m = re.search('(?<=^TOP=).*', line)
- if m is not None:
- android_root = m.group(1)
+ result = re.search('(?<=^PLATFORM_VERSION=).*', line)
+ if result is not None:
+ platform_version = result.group(0)
+ result = re.search('(?<=^TARGET_PRODUCT=).*', line)
+ if result is not None:
+ target_product = result.group(0)
+ result = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
+ if result is not None:
+ target_variant = result.group(0)
+ result = re.search('(?<=^TOP=).*', line)
+ if result is not None:
+ android_root = result.group(1)
if android_root:
new_unique_warnings = dict()
@@ -458,12 +454,11 @@
other_patterns.warn_patterns)
else:
raise Exception('platform name %s is not valid' % platform)
- # pylint:disable=invalid-name
- for w in warn_patterns:
- w['members'] = []
+ for pattern in warn_patterns:
+ pattern['members'] = []
# Each warning pattern has a 'projects' dictionary, that
# maps a project name to number of warnings in that project.
- w['projects'] = {}
+ pattern['projects'] = {}
return warn_patterns