blob: 4c1e38b879863a03bcc948bcf9cadd52dea30d09 [file] [log] [blame]
Joe Onorato88ede352023-12-19 02:56:38 +00001#!/usr/bin/env python3
2# Copyright (C) 2023 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16import sys
17if __name__ == "__main__":
18 sys.dont_write_bytecode = True
19
20import argparse
21import dataclasses
22import datetime
23import json
24import os
25import pathlib
26import statistics
27import zoneinfo
28
29import pretty
30import utils
31
32# TODO:
33# - Flag if the last postroll build was more than 15 seconds or something. That's
34# an indicator that something is amiss.
35# - Add a mode to print all of the values for multi-iteration runs
36# - Add a flag to reorder the tags
37# - Add a flag to reorder the headers in order to show grouping more clearly.
38
39
40def FindSummaries(args):
41 def find_summaries(directory):
42 return [str(p.resolve()) for p in pathlib.Path(directory).glob("**/summary.json")]
43 if not args:
44 # If they didn't give an argument, use the default dir
45 root = utils.get_root()
46 if not root:
47 return []
48 return find_summaries(root.joinpath("..", utils.DEFAULT_REPORT_DIR))
49 results = list()
50 for arg in args:
51 if os.path.isfile(arg):
52 # If it's a file add that
53 results.append(arg)
54 elif os.path.isdir(arg):
55 # If it's a directory, find all of the files there
56 results += find_summaries(arg)
57 else:
58 sys.stderr.write(f"Invalid summary argument: {arg}\n")
59 sys.exit(1)
60 return sorted(list(results))
61
62
63def LoadSummary(filename):
64 with open(filename) as f:
65 return json.load(f)
66
67# Columns:
68# Date
69# Branch
70# Tag
71# --
72# Lunch
73# Rows:
74# Benchmark
75
76@dataclasses.dataclass(frozen=True)
77class Key():
78 pass
79
80class Column():
81 def __init__(self):
82 pass
83
84def lunch_str(d):
85 "Convert a lunch dict to a string"
86 return f"{d['TARGET_PRODUCT']}-{d['TARGET_RELEASE']}-{d['TARGET_BUILD_VARIANT']}"
87
88def group_by(l, key):
89 "Return a list of tuples, grouped by key, sorted by key"
90 result = {}
91 for item in l:
92 result.setdefault(key(item), []).append(item)
93 return [(k, v) for k, v in result.items()]
94
95
96class Table:
97 def __init__(self):
98 self._data = {}
99 self._rows = []
100 self._cols = []
101
102 def Set(self, column_key, row_key, data):
103 self._data[(column_key, row_key)] = data
104 if not column_key in self._cols:
105 self._cols.append(column_key)
106 if not row_key in self._rows:
107 self._rows.append(row_key)
108
109 def Write(self, out):
110 table = []
111 # Expand the column items
112 for row in zip(*self._cols):
113 if row.count(row[0]) == len(row):
114 continue
115 table.append([""] + [col for col in row])
116 if table:
117 table.append(pretty.SEPARATOR)
118 # Populate the data
119 for row in self._rows:
120 table.append([str(row)] + [str(self._data.get((col, row), "")) for col in self._cols])
121 out.write(pretty.FormatTable(table))
122
123
124def format_duration_sec(ns):
125 "Format a duration in ns to second precision"
126 sec = round(ns / 1000000000)
127 h, sec = divmod(sec, 60*60)
128 m, sec = divmod(sec, 60)
129 result = ""
130 if h > 0:
131 result += f"{h:2d}h "
132 if h > 0 or m > 0:
133 result += f"{m:2d}m "
134 return result + f"{sec:2d}s"
135
136def main(argv):
137 parser = argparse.ArgumentParser(
138 prog="format_benchmarks",
139 allow_abbrev=False, # Don't let people write unsupportable scripts.
140 description="Print analysis tables for benchmarks")
141
142 parser.add_argument("summaries", nargs="*",
143 help="A summary.json file or a directory in which to look for summaries.")
144
145 args = parser.parse_args()
146
147 # Load the summaries
148 summaries = [(s, LoadSummary(s)) for s in FindSummaries(args.summaries)]
149
150 # Convert to MTV time
151 for filename, s in summaries:
152 dt = datetime.datetime.fromisoformat(s["start_time"])
153 dt = dt.astimezone(zoneinfo.ZoneInfo("America/Los_Angeles"))
154 s["datetime"] = dt
155 s["date"] = datetime.date(dt.year, dt.month, dt.day)
156
157 # Sort the summaries
158 summaries.sort(key=lambda s: (s[1]["date"], s[1]["branch"], s[1]["tag"]))
159
160 # group the benchmarks by column and iteration
161 def bm_key(b):
162 return (
163 lunch_str(b["lunch"]),
164 )
165 for filename, summary in summaries:
166 summary["columns"] = [(key, group_by(bms, lambda b: b["id"])) for key, bms
167 in group_by(summary["benchmarks"], bm_key)]
168
169 # Build the table
170 table = Table()
171 for filename, summary in summaries:
172 for key, column in summary["columns"]:
173 for id, cell in column:
174 duration_ns = statistics.median([b["duration_ns"] for b in cell])
175 table.Set(tuple([summary["date"].strftime("YYYY-MM-DD"),
176 summary["branch"],
177 summary["tag"]]
178 + list(key)),
179 cell[0]["title"], format_duration_sec(duration_ns))
180
181 table.Write(sys.stdout)
182
183if __name__ == "__main__":
184 main(sys.argv)
185