Merge "Introduce EROFS compress hints to the Android build system"
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
index d87a83d..bd1c440 100644
--- a/orchestrator/core/api_assembly.py
+++ b/orchestrator/core/api_assembly.py
@@ -14,11 +14,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import collections
import json
import os
+import sys
-def assemble_apis(inner_trees):
+import api_assembly_cc
+import ninja_tools
+
+ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
+
+def assemble_apis(context, inner_trees):
# Find all of the contributions from the inner tree
contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
@@ -27,25 +34,33 @@
contributions = []
for tree_key, filenames in contribution_files_dict.items():
for filename in filenames:
- contribution_data = load_contribution_file(filename)
- if not contribution_data:
+ json_data = load_contribution_file(filename)
+ if not json_data:
continue
# TODO: Validate the configs, especially that the domains match what we asked for
# from the lunch config.
- contributions.append(contribution_data)
+ contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
# Group contributions by language and API surface
stub_libraries = collate_contributions(contributions)
- # Iterate through all of the stub libraries and generate rules to assemble them
- # and Android.bp/BUILD files to make those available to inner trees.
- # TODO: Parallelize? Skip unnecessary work?
- ninja_file = NinjaFile() # TODO: parameters?
- build_file = BuildFile() # TODO: parameters?
- for stub_library in stub_libraries:
- STUB_LANGUAGE_HANDLERS[stub_library.language](ninja_file, build_file, stub_library)
+ # Initialize the ninja file writer
+ with open(context.out.api_ninja_file(), "w") as ninja_file:
+ ninja = ninja_tools.Ninja(context, ninja_file)
- # TODO: Handle host_executables separately or as a StubLibrary language?
+ # Initialize the build file writer
+ build_file = BuildFile() # TODO: parameters?
+
+ # Iterate through all of the stub libraries and generate rules to assemble them
+ # and Android.bp/BUILD files to make those available to inner trees.
+ # TODO: Parallelize? Skip unnecessary work?
+ for stub_library in stub_libraries:
+ STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
+
+ # TODO: Handle host_executables separately or as a StubLibrary language?
+
+ # Finish writing the ninja file
+ ninja.write()
def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
@@ -72,7 +87,8 @@
class StubLibraryContribution(object):
- def __init__(self, api_domain, library_contribution):
+ def __init__(self, inner_tree, api_domain, library_contribution):
+ self.inner_tree = inner_tree
self.api_domain = api_domain
self.library_contribution = library_contribution
@@ -96,54 +112,42 @@
grouped = {}
for contribution in contributions:
for language in STUB_LANGUAGE_HANDLERS.keys():
- for library in contribution.get(language, []):
- key = (language, contribution["name"], contribution["version"], library["name"])
+ for library in contribution.json_data.get(language, []):
+ key = (language, contribution.json_data["name"],
+ contribution.json_data["version"], library["name"])
stub_library = grouped.get(key)
if not stub_library:
- stub_library = StubLibrary(language, contribution["name"],
- contribution["version"], library["name"])
+ stub_library = StubLibrary(language, contribution.json_data["name"],
+ contribution.json_data["version"], library["name"])
grouped[key] = stub_library
- stub_library.add_contribution(StubLibraryContribution(
- contribution["api_domain"], library))
+ stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
+ contribution.json_data["api_domain"], library))
return list(grouped.values())
-def assemble_cc_api_library(ninja_file, build_file, stub_library):
- print("assembling cc_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
- stub_library.name))
+def assemble_java_api_library(context, ninja, build_file, stub_library):
+ print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name))
for contrib in stub_library.contributions:
print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
# TODO: Implement me
-def assemble_java_api_library(ninja_file, build_file, stub_library):
- print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
- stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-def assemble_resource_api_library(ninja_file, build_file, stub_library):
- print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface, stub_library.api_surface_version,
- stub_library.name))
+def assemble_resource_api_library(context, ninja, build_file, stub_library):
+ print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name))
for contrib in stub_library.contributions:
print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
# TODO: Implement me
STUB_LANGUAGE_HANDLERS = {
- "cc_libraries": assemble_cc_api_library,
+ "cc_libraries": api_assembly_cc.assemble_cc_api_library,
"java_libraries": assemble_java_api_library,
"resource_libraries": assemble_resource_api_library,
}
-class NinjaFile(object):
- "Generator for build actions and dependencies."
- pass
-
-
class BuildFile(object):
"Abstract generator for Android.bp files and BUILD files."
pass
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
new file mode 100644
index 0000000..15bc98a
--- /dev/null
+++ b/orchestrator/core/api_assembly_cc.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+def assemble_cc_api_library(context, ninja, build_file, stub_library):
+ print("\nassembling cc_api_library %s-%s %s from:" % (stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name))
+ for contrib in stub_library.contributions:
+ print(" %s %s" % (contrib.api_domain, contrib.library_contribution))
+
+ staging_dir = context.out.api_library_dir(stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name)
+ work_dir = context.out.api_library_work_dir(stub_library.api_surface,
+ stub_library.api_surface_version, stub_library.name)
+ print("staging_dir=%s" % (staging_dir))
+ print("work_dir=%s" % (work_dir))
+
+ # Generate rules to copy headers
+ includes = []
+ include_dir = os.path.join(staging_dir, "include")
+ for contrib in stub_library.contributions:
+ for headers in contrib.library_contribution["headers"]:
+ root = headers["root"]
+ for file in headers["files"]:
+ # TODO: Deal with collisions of the same name from multiple contributions
+ include = os.path.join(include_dir, file)
+ ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
+ includes.append(include)
+
+ # Generate rule to run ndkstubgen
+
+
+ # Generate rule to compile stubs to library
+
+ # Generate phony rule to build the library
+ # TODO: This name probably conflictgs with something
+ ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
+ stub_library.name)), includes)
+
+ # Generate build files
+
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
new file mode 100644
index 0000000..693a716
--- /dev/null
+++ b/orchestrator/core/final_packaging.py
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ninja_tools
+import ninja_syntax # Has to be after ninja_tools because of the path hack
+
+def final_packaging(context):
+ """Pull together all of the previously defined rules into the final build stems."""
+
+ with open(context.out.outer_ninja_file(), "w") as ninja_file:
+ ninja = ninja_tools.Ninja(context, ninja_file)
+
+ # Add the api surfaces file
+ ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
+
+ # Finish writing the ninja file
+ ninja.write()
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
index cdb0d85..4383dd8 100644
--- a/orchestrator/core/inner_tree.py
+++ b/orchestrator/core/inner_tree.py
@@ -56,13 +56,13 @@
class InnerTree(object):
- def __init__(self, root, product):
+ def __init__(self, context, root, product):
"""Initialize with the inner tree root (relative to the workspace root)"""
self.root = root
self.product = product
self.domains = {}
# TODO: Base directory on OUT_DIR
- self.out = OutDirLayout(os.path.join("out", "trees", root))
+ self.out = OutDirLayout(context.out.inner_tree_dir(root))
def __str__(self):
return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
@@ -134,7 +134,14 @@
return result
+ def get(self, tree_key):
+ """Get an inner tree for tree_key"""
+ return self.trees.get(tree_key)
+
class OutDirLayout(object):
+ """Encapsulates the logic about the layout of the inner tree out directories.
+ See also context.OutDir for outer tree out dir contents."""
+
def __init__(self, root):
"Initialize with the root of the OUT_DIR for the inner tree."
self._root = root
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
new file mode 100644
index 0000000..906f1ae
--- /dev/null
+++ b/orchestrator/core/ninja_runner.py
@@ -0,0 +1,36 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+
+def run_ninja(context, targets):
+ """Run ninja.
+ """
+
+ # Construct the command
+ cmd = [
+ context.tools.ninja(),
+ "-f",
+ context.out.outer_ninja_file(),
+ ] + targets
+
+ # Run the command
+ process = subprocess.run(cmd, shell=False)
+
+ # TODO: Probably want better handling of inner tree failures
+ if process.returncode:
+ sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
+ sys.exit(1)
+
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
new file mode 100644
index 0000000..c676907
--- /dev/null
+++ b/orchestrator/core/ninja_tools.py
@@ -0,0 +1,46 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+# Workaround for python include path
+_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
+if _ninja_dir not in sys.path:
+ sys.path.append(_ninja_dir)
+import ninja_writer
+from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
+
+
+class Ninja(ninja_writer.Writer):
+ """Some higher level constructs on top of raw ninja writing.
+ TODO: Not sure where these should be."""
+ def __init__(self, context, file):
+ super(Ninja, self).__init__(file)
+ self._context = context
+ self._did_copy_file = False
+
+ def add_copy_file(self, copy_to, copy_from):
+ if not self._did_copy_file:
+ self._did_copy_file = True
+ rule = Rule("copy_file")
+ rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
+ + " -f ${in} ${out}")
+ self.add_rule(rule)
+ build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
+ implicits=[self._context.tools.acp()])
+ build_action.add_variable("out_dir", os.path.dirname(copy_to))
+ self.add_build_action(build_action)
+
+
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
index e99c956..bb0885d 100755
--- a/orchestrator/core/orchestrator.py
+++ b/orchestrator/core/orchestrator.py
@@ -22,9 +22,12 @@
import api_assembly
import api_domain
import api_export
+import final_packaging
import inner_tree
import interrogate
import lunch
+import ninja_runner
+import utils
EXIT_STATUS_OK = 0
EXIT_STATUS_ERROR = 1
@@ -33,14 +36,14 @@
API_DOMAIN_VENDOR = "vendor"
API_DOMAIN_MODULE = "module"
-def process_config(lunch_config):
+def process_config(context, lunch_config):
"""Returns a InnerTrees object based on the configuration requested in the lunch config."""
def add(domain_name, tree_root, product):
tree_key = inner_tree.InnerTreeKey(tree_root, product)
if tree_key in trees:
tree = trees[tree_key]
else:
- tree = inner_tree.InnerTree(tree_root, product)
+ tree = inner_tree.InnerTree(context, tree_root, product)
trees[tree_key] = tree
domain = api_domain.ApiDomain(domain_name, tree, product)
domains[domain_name] = domain
@@ -68,6 +71,9 @@
# Load lunch combo
#
+ # Choose the out directory, set up error handling, etc.
+ context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
+
# Read the config file
try:
config_file, config, variant = lunch.load_current_config()
@@ -77,7 +83,7 @@
sys.stdout.write(lunch.make_config_header(config_file, config, variant))
# Construct the trees and domains dicts
- inner_trees = process_config(config)
+ inner_trees = process_config(context, config)
#
# 1. Interrogate the trees
@@ -93,7 +99,7 @@
#
# 2b. API Surface Assembly
#
- api_assembly.assemble_apis(inner_trees)
+ api_assembly.assemble_apis(context, inner_trees)
#
# 3a. API Domain Analysis
@@ -102,11 +108,17 @@
#
# 3b. Final Packaging Rules
#
+ final_packaging.final_packaging(context)
#
# 4. Build Execution
#
-
+ # TODO: Decide what we want the UX for selecting targets to be across
+ # branches... since there are very likely to be conflicting soong short
+ # names.
+ print("Running ninja...")
+ targets = ["public_api-1-libhwui", "public_api-1-libc"]
+ ninja_runner.run_ninja(context, targets)
#
# Success!
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
new file mode 100644
index 0000000..bb7f8ad
--- /dev/null
+++ b/orchestrator/core/utils.py
@@ -0,0 +1,120 @@
+#!/usr/bin/python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import platform
+
+class Context(object):
+ """Mockable container for global state."""
+ def __init__(self, out_root, errors):
+ self.out = OutDir(out_root)
+ self.errors = errors
+ self.tools = HostTools()
+
+class TestContext(Context):
+ "Context for testing. The real Context is manually constructed in orchestrator.py."
+
+ def __init__(self, test_work_dir, test_name):
+ super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
+ Errors(None))
+
+
+class OutDir(object):
+ """Encapsulates the logic about the out directory at the outer-tree level.
+ See also inner_tree.OutDirLayout for inner tree out dir contents."""
+
+ def __init__(self, root):
+ "Initialize with the root of the OUT_DIR for the outer tree."
+ self._root = root
+ self._intermediates = "intermediates"
+
+ def root(self):
+ return self._root
+
+ def inner_tree_dir(self, tree_root):
+ """Root directory for inner tree inside the out dir."""
+ return os.path.join(self._root, "trees", tree_root)
+
+ def api_ninja_file(self):
+ """The ninja file that assembles API surfaces."""
+ return os.path.join(self._root, "api_surfaces.ninja")
+
+ def api_library_dir(self, surface, version, library):
+ """Directory for all the contents of a library inside an API surface, including
+ the build files. Any intermediates should go in api_library_work_dir."""
+ return os.path.join(self._root, "api_surfaces", surface, str(version), library)
+
+ def api_library_work_dir(self, surface, version, library):
+ """Intermediates / scratch directory for library inside an API surface."""
+ return os.path.join(self._root, self._intermediates, "api_surfaces", surface, str(version),
+ library)
+
+ def outer_ninja_file(self):
+ return os.path.join(self._root, "multitree.ninja")
+
+
+class Errors(object):
+ """Class for reporting and tracking errors."""
+ def __init__(self, stream):
+ """Initialize Error reporter with a file-like object."""
+ self._stream = stream
+ self._all = []
+
+ def error(self, message):
+ """Record the error message."""
+ s = str(s)
+ if s[-1] != "\n":
+ s += "\n"
+ self._all.append(s)
+ if self._stream:
+ self._stream.write(s)
+
+ def had_error(self):
+ """Return if there were any errors reported."""
+ return len(self._all)
+
+ def get_errors(self):
+ """Get all errors that were reported."""
+ return self._all
+
+
+class HostTools(object):
+ def __init__(self):
+ if platform.system() == "Linux":
+ self._arch = "linux-x86"
+ else:
+ raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
+
+ # Some of these are called a lot, so pre-compute the strings to save memory
+ self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
+ self._acp = os.path.join(self._prebuilts, "acp")
+ self._ninja = os.path.join(self._prebuilts, "ninja")
+
+ def acp(self):
+ return self._acp
+
+ def ninja(self):
+ return self._ninja
+
+
+def choose_out_dir():
+ """Get the root of the out dir, either from the environment or by picking
+ a default."""
+ result = os.environ.get("OUT_DIR")
+ if result:
+ return result
+ else:
+ return "out"
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
index 328c99c..df97b68 100644
--- a/orchestrator/ninja/ninja_syntax.py
+++ b/orchestrator/ninja/ninja_syntax.py
@@ -159,7 +159,7 @@
self.chDir = chDir
# TODO(spandandas): Update the syntax when aosp/2064612 lands
- def stream() -> Iterator[str]:
+ def stream(self) -> Iterator[str]:
yield f"subninja {self.subninja}"
class Line(Node):
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
index e3070bb..9e80b4b 100644
--- a/orchestrator/ninja/ninja_writer.py
+++ b/orchestrator/ninja/ninja_writer.py
@@ -49,6 +49,10 @@
def add_subninja(self, subninja: Subninja):
self.nodes.append(subninja)
+ def add_phony(self, name, deps):
+ build_action = BuildAction(name, "phony", inputs=deps)
+ self.add_build_action(build_action)
+
def write(self):
for node in self.nodes:
for line in node.stream():
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 3fa822a..09ebf30 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -662,15 +662,26 @@
var warningsOfFiles = {};
var warningsOfDirs = {};
var subDirs = {};
- function addOneWarning(map, key) {
- map[key] = 1 + ((key in map) ? map[key] : 0);
+ function addOneWarning(map, key, type, unique) {
+ function increaseCounter(idx) {
+ map[idx] = 1 + ((idx in map) ? map[idx] : 0);
+ }
+ increaseCounter(key)
+ if (type != "") {
+ increaseCounter(type + " " + key)
+ if (unique) {
+ increaseCounter(type + " *")
+ }
+ }
}
for (var i = 0; i < numWarnings; i++) {
- var file = WarningMessages[i].replace(/:.*/, "");
- addOneWarning(warningsOfFiles, file);
+ var message = WarningMessages[i]
+ var file = message.replace(/:.*/, "");
+ var warningType = message.endsWith("]") ? message.replace(/.*\[/, "[") : "";
+ addOneWarning(warningsOfFiles, file, warningType, true);
var dirs = file.split("/");
var dir = dirs[0];
- addOneWarning(warningsOfDirs, dir);
+ addOneWarning(warningsOfDirs, dir, warningType, true);
for (var d = 1; d < dirs.length - 1; d++) {
var subDir = dir + "/" + dirs[d];
if (!(dir in subDirs)) {
@@ -678,7 +689,7 @@
}
subDirs[dir][subDir] = 1;
dir = subDir;
- addOneWarning(warningsOfDirs, dir);
+ addOneWarning(warningsOfDirs, dir, warningType, false);
}
}
var minDirWarnings = numWarnings*(LimitPercentWarnings/100);
@@ -725,27 +736,33 @@
document.getElementById(divName));
table.draw(view, {allowHtml: true, alternatingRowStyle: true});
}
- addTable("Directory", "top_dirs_table", TopDirs, "selectDir");
- addTable("File", "top_files_table", TopFiles, "selectFile");
+ addTable("[Warning Type] Directory", "top_dirs_table", TopDirs, "selectDir");
+ addTable("[Warning Type] File", "top_files_table", TopFiles, "selectFile");
}
function selectDirFile(idx, rows, dirFile) {
if (rows.length <= idx) {
return;
}
var name = rows[idx][2];
+ var type = "";
+ if (name.startsWith("[")) {
+ type = " " + name.replace(/ .*/, "");
+ name = name.replace(/.* /, "");
+ }
var spanName = "selected_" + dirFile + "_name";
- document.getElementById(spanName).innerHTML = name;
+ document.getElementById(spanName).innerHTML = name + type;
var divName = "selected_" + dirFile + "_warnings";
var numWarnings = rows[idx][1].v;
var prefix = name.replace(/\\.\\.\\.$/, "");
var data = new google.visualization.DataTable();
- data.addColumn('string', numWarnings + ' warnings in ' + name);
+ data.addColumn('string', numWarnings + type + ' warnings in ' + name);
var getWarningMessage = (FlagPlatform == "chrome")
? ((x) => addURLToLine(WarningMessages[Warnings[x][2]],
WarningLinks[Warnings[x][3]]))
: ((x) => addURL(WarningMessages[Warnings[x][2]]));
for (var i = 0; i < Warnings.length; i++) {
- if (WarningMessages[Warnings[i][2]].startsWith(prefix)) {
+ if ((prefix.startsWith("*") || WarningMessages[Warnings[i][2]].startsWith(prefix)) &&
+ (type == "" || WarningMessages[Warnings[i][2]].endsWith(type))) {
data.addRow([getWarningMessage(i)]);
}
}
@@ -827,14 +844,14 @@
def section2():
dump_dir_file_section(
writer, 'directory', 'top_dirs_table',
- 'Directories with at least ' +
- str(LIMIT_PERCENT_WARNINGS) + '% warnings')
+ 'Directories/Warnings with at least ' +
+ str(LIMIT_PERCENT_WARNINGS) + '% of all cases')
def section3():
dump_dir_file_section(
writer, 'file', 'top_files_table',
- 'Files with at least ' +
- str(LIMIT_PERCENT_WARNINGS) + '% or ' +
- str(LIMIT_WARNINGS_PER_FILE) + ' warnings')
+ 'Files/Warnings with at least ' +
+ str(LIMIT_PERCENT_WARNINGS) + '% of all or ' +
+ str(LIMIT_WARNINGS_PER_FILE) + ' cases')
def section4():
writer('<script>')
emit_js_data(writer, flags, warning_messages, warning_links,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 61c8676..f24cfb7 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -228,6 +228,14 @@
return ''
+def find_android_root_top_dirs(root_dir):
+ """Return a list of directories under the root_dir, if it exists."""
+ if not os.path.isdir(root_dir):
+ return None
+ return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
+ os.listdir(root_dir)))
+
+
def find_android_root(buildlog):
"""Guess android source root from common prefix of file paths."""
# Use the longest common prefix of the absolute file paths
@@ -239,8 +247,8 @@
# We want to find android_root of a local build machine.
# Do not use RBE warning lines, which has '/b/f/w/' path prefix.
# Do not use /tmp/ file warnings.
- if warning_pattern.match(line) and (
- '/b/f/w' not in line and not line.startswith('/tmp/')):
+ if ('/b/f/w' not in line and not line.startswith('/tmp/') and
+ warning_pattern.match(line)):
warning_lines.append(line)
count += 1
if count > 9999:
@@ -251,15 +259,23 @@
path = os.path.normpath(re.sub(':.*$', '', line))
android_root = find_warn_py_and_android_root(path)
if android_root:
- return android_root
+ return android_root, find_android_root_top_dirs(android_root)
# Do not use common prefix of a small number of paths.
+ android_root = ''
if count > 10:
# pytype: disable=wrong-arg-types
root_path = os.path.commonprefix(warning_lines)
# pytype: enable=wrong-arg-types
if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
- return root_path[:-1]
- return ''
+ android_root = root_path[:-1]
+ if android_root and os.path.isdir(android_root):
+ return android_root, find_android_root_top_dirs(android_root)
+ # When the build.log file is moved to a different machine where
+ # android_root is not found, use the location of this script
+ # to find the android source tree root and its sub directories.
+ # This __file__ is /..../build/make/tools/warn/warn_common.py
+ script_root = __file__.replace('/build/make/tools/warn/warn_common.py', '')
+ return android_root, find_android_root_top_dirs(script_root)
def remove_android_root_prefix(path, android_root):
@@ -310,8 +326,6 @@
warning_pattern = re.compile(chrome_warning_pattern)
# Collect all unique warning lines
- # Remove the duplicated warnings save ~8% of time when parsing
- # one typical build log than before
unique_warnings = dict()
for line in infile:
if warning_pattern.match(line):
@@ -354,7 +368,7 @@
target_variant = 'unknown'
build_id = 'unknown'
use_rbe = False
- android_root = find_android_root(infile)
+ android_root, root_top_dirs = find_android_root(infile)
infile.seek(0)
# rustc warning messages have two lines that should be combined:
@@ -367,24 +381,39 @@
# C/C++ compiler warning messages have line and column numbers:
# some/path/file.c:line_number:column_number: warning: description
warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)')
- warning_without_file = re.compile('^warning: .*')
rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
- # If RBE was used, try to reclaim some warning lines mixed with some
- # leading chars from other concurrent job's stderr output .
+ # If RBE was used, try to reclaim some warning lines (from stdout)
+ # that contain leading characters from stderr.
# The leading characters can be any character, including digits and spaces.
- # It's impossible to correctly identify the starting point of the source
- # file path without the file directory name knowledge.
- # Here we can only be sure to recover lines containing "/b/f/w/".
- rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
- # Collect all unique warning lines
- # Remove the duplicated warnings save ~8% of time when parsing
- # one typical build log than before
+ # If a warning line's source file path contains the special RBE prefix
+ # /b/f/w/, we can remove all leading chars up to and including the "/b/f/w/".
+ bfw_warning_pattern = re.compile('.*/b/f/w/([^ ]*: warning: .*)')
+
+ # When android_root is known and available, we find its top directories
+ # and remove all leading chars before a top directory name.
+ # We assume that the leading chars from stderr do not contain "/".
+ # For example,
+ # 10external/...
+ # 12 warningsexternal/...
+ # 413 warningexternal/...
+ # 5 warnings generatedexternal/...
+ # Suppressed 1000 warnings (packages/modules/...
+ if root_top_dirs:
+ extra_warning_pattern = re.compile(
+ '^.[^/]*((' + '|'.join(root_top_dirs) +
+ ')/[^ ]*: warning: .*)')
+ else:
+ extra_warning_pattern = re.compile('^[^/]* ([^ /]*/[^ ]*: warning: .*)')
+
+ # Collect all unique warning lines
unique_warnings = dict()
+ checked_warning_lines = dict()
line_counter = 0
prev_warning = ''
for line in infile:
+ line_counter += 1
if prev_warning:
if rustc_file_position.match(line):
# must be a rustc warning, combine 2 lines into one warning
@@ -399,14 +428,32 @@
prev_warning, flags, android_root, unique_warnings)
prev_warning = ''
- if use_rbe and rbe_warning_pattern.match(line):
- cleaned_up_line = re.sub('.*/b/f/w/', '', line)
- unique_warnings = add_normalized_line_to_warnings(
- cleaned_up_line, flags, android_root, unique_warnings)
+ # re.match is slow, with several warning line patterns and
+ # long input lines like "TIMEOUT: ...".
+ # We save significant time by skipping non-warning lines.
+ # But do not skip the first 100 lines, because we want to
+ # catch build variables.
+ if line_counter > 100 and line.find('warning: ') < 0:
continue
+ # A large clean build output can contain up to 90% of duplicated
+ # "warning:" lines. If we can skip them quickly, we can
+ # speed up this for-loop 3X to 5X.
+ if line in checked_warning_lines:
+ continue
+ checked_warning_lines[line] = True
+
+ # Clean up extra prefix if RBE is used.
+ if use_rbe:
+ if '/b/f/w/' in line:
+ result = bfw_warning_pattern.search(line)
+ else:
+ result = extra_warning_pattern.search(line)
+ if result is not None:
+ line = result.group(1)
+
if warning_pattern.match(line):
- if warning_without_file.match(line):
+ if line.startswith('warning: '):
# save this line and combine it with the next line
prev_warning = line
else:
@@ -416,7 +463,6 @@
if line_counter < 100:
# save a little bit of time by only doing this for the first few lines
- line_counter += 1
result = re.search('(?<=^PLATFORM_VERSION=).*', line)
if result is not None:
platform_version = result.group(0)