Add a stretchy benchmark

Test: this
Bug: 187718492
Change-Id: Idaf3a90567a4f90c7089159b496ddf4aac24bf05
diff --git a/libs/hwui/tests/macrobench/TestSceneRunner.cpp b/libs/hwui/tests/macrobench/TestSceneRunner.cpp
index 13ac367..cf9b0c5 100644
--- a/libs/hwui/tests/macrobench/TestSceneRunner.cpp
+++ b/libs/hwui/tests/macrobench/TestSceneRunner.cpp
@@ -62,53 +62,23 @@
     T mAverage;
 };
 
+using BenchmarkResults = std::vector<benchmark::BenchmarkReporter::Run>;
+
 void outputBenchmarkReport(const TestScene::Info& info, const TestScene::Options& opts,
-                           benchmark::BenchmarkReporter* reporter, RenderProxy* proxy,
-                           double durationInS) {
-    using namespace benchmark;
-
-    struct ReportInfo {
-        int percentile;
-        const char* suffix;
-    };
-
-    static std::array<ReportInfo, 4> REPORTS = {
-            ReportInfo{50, "_50th"}, ReportInfo{90, "_90th"}, ReportInfo{95, "_95th"},
-            ReportInfo{99, "_99th"},
-    };
-
-    // Although a vector is used, it must stay with only a single element
-    // otherwise the BenchmarkReporter will automatically compute
-    // mean and stddev which doesn't make sense for our usage
-    std::vector<BenchmarkReporter::Run> reports;
-    BenchmarkReporter::Run report;
+                           double durationInS, int repetationIndex, BenchmarkResults* reports) {
+    benchmark::BenchmarkReporter::Run report;
+    report.repetitions = opts.repeatCount;
+    report.repetition_index = repetationIndex;
     report.run_name.function_name = info.name;
-    report.iterations = static_cast<int64_t>(opts.count);
+    report.iterations = static_cast<int64_t>(opts.frameCount);
     report.real_accumulated_time = durationInS;
     report.cpu_accumulated_time = durationInS;
-    report.counters["items_per_second"] = opts.count / durationInS;
-    reports.push_back(report);
-    reporter->ReportRuns(reports);
-
-    // Pretend the percentiles are single-iteration runs of the test
-    // If rendering offscreen skip this as it's fps that's more interesting
-    // in that test case than percentiles.
-    if (!opts.renderOffscreen) {
-        for (auto& ri : REPORTS) {
-            reports[0].run_name.function_name = info.name;
-            reports[0].run_name.function_name += ri.suffix;
-            durationInS = proxy->frameTimePercentile(ri.percentile) / 1000.0;
-            reports[0].real_accumulated_time = durationInS;
-            reports[0].cpu_accumulated_time = durationInS;
-            reports[0].iterations = 1;
-            reports[0].counters["items_per_second"] = 0;
-            reporter->ReportRuns(reports);
-        }
-    }
+    report.counters["items_per_second"] = opts.frameCount / durationInS;
+    reports->push_back(report);
 }
 
-void run(const TestScene::Info& info, const TestScene::Options& opts,
-         benchmark::BenchmarkReporter* reporter) {
+static void doRun(const TestScene::Info& info, const TestScene::Options& opts, int repetitionIndex,
+                  BenchmarkResults* reports) {
     Properties::forceDrawFrame = true;
     TestContext testContext;
     testContext.setRenderOffscreen(opts.renderOffscreen);
@@ -158,7 +128,7 @@
     ModifiedMovingAverage<double> avgMs(opts.reportFrametimeWeight);
 
     nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
-    for (int i = 0; i < opts.count; i++) {
+    for (int i = 0; i < opts.frameCount; i++) {
         testContext.waitForVsync();
         nsecs_t vsync = systemTime(SYSTEM_TIME_MONOTONIC);
         {
@@ -182,9 +152,24 @@
     proxy->fence();
     nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
 
-    if (reporter) {
-        outputBenchmarkReport(info, opts, reporter, proxy.get(), (end - start) / (double)s2ns(1));
+    if (reports) {
+        outputBenchmarkReport(info, opts, (end - start) / (double)s2ns(1), repetitionIndex,
+                              reports);
     } else {
         proxy->dumpProfileInfo(STDOUT_FILENO, DumpFlags::JankStats);
     }
 }
+
+void run(const TestScene::Info& info, const TestScene::Options& opts,
+         benchmark::BenchmarkReporter* reporter) {
+    BenchmarkResults results;
+    for (int i = 0; i < opts.repeatCount; i++) {
+        doRun(info, opts, i, reporter ? &results : nullptr);
+    }
+    if (reporter) {
+        reporter->ReportRuns(results);
+        if (results.size() > 1) {
+            // TODO: Report summary
+        }
+    }
+}
diff --git a/libs/hwui/tests/macrobench/main.cpp b/libs/hwui/tests/macrobench/main.cpp
index 174a140..acbbb95 100644
--- a/libs/hwui/tests/macrobench/main.cpp
+++ b/libs/hwui/tests/macrobench/main.cpp
@@ -40,9 +40,9 @@
 using namespace android::uirenderer;
 using namespace android::uirenderer::test;
 
-static int gRepeatCount = 1;
 static std::vector<TestScene::Info> gRunTests;
 static TestScene::Options gOpts;
+static bool gRunLeakCheck = true;
 std::unique_ptr<benchmark::BenchmarkReporter> gBenchmarkReporter;
 
 void run(const TestScene::Info& info, const TestScene::Options& opts,
@@ -69,6 +69,7 @@
                        are offscreen rendered
   --benchmark_format   Set output format. Possible values are tabular, json, csv
   --renderer=TYPE      Sets the render pipeline to use. May be skiagl or skiavk
+  --skip-leak-check    Skips the memory leak check
 )");
 }
 
@@ -170,6 +171,7 @@
     Onscreen,
     Offscreen,
     Renderer,
+    SkipLeakCheck,
 };
 }
 
@@ -185,6 +187,7 @@
         {"onscreen", no_argument, nullptr, LongOpts::Onscreen},
         {"offscreen", no_argument, nullptr, LongOpts::Offscreen},
         {"renderer", required_argument, nullptr, LongOpts::Renderer},
+        {"skip-leak-check", no_argument, nullptr, LongOpts::SkipLeakCheck},
         {0, 0, 0, 0}};
 
 static const char* SHORT_OPTIONS = "c:r:h";
@@ -214,20 +217,20 @@
                 break;
 
             case 'c':
-                gOpts.count = atoi(optarg);
-                if (!gOpts.count) {
+                gOpts.frameCount = atoi(optarg);
+                if (!gOpts.frameCount) {
                     fprintf(stderr, "Invalid frames argument '%s'\n", optarg);
                     error = true;
                 }
                 break;
 
             case 'r':
-                gRepeatCount = atoi(optarg);
-                if (!gRepeatCount) {
+                gOpts.repeatCount = atoi(optarg);
+                if (!gOpts.repeatCount) {
                     fprintf(stderr, "Invalid repeat argument '%s'\n", optarg);
                     error = true;
                 } else {
-                    gRepeatCount = (gRepeatCount > 0 ? gRepeatCount : INT_MAX);
+                    gOpts.repeatCount = (gOpts.repeatCount > 0 ? gOpts.repeatCount : INT_MAX);
                 }
                 break;
 
@@ -283,6 +286,10 @@
                 gOpts.renderOffscreen = true;
                 break;
 
+            case LongOpts::SkipLeakCheck:
+                gRunLeakCheck = false;
+                break;
+
             case 'h':
                 printHelp();
                 exit(EXIT_SUCCESS);
@@ -322,9 +329,6 @@
 }
 
 int main(int argc, char* argv[]) {
-    // set defaults
-    gOpts.count = 150;
-
     Typeface::setRobotoTypefaceForTest();
 
     parseOptions(argc, argv);
@@ -345,10 +349,8 @@
         gBenchmarkReporter->ReportContext(context);
     }
 
-    for (int i = 0; i < gRepeatCount; i++) {
-        for (auto&& test : gRunTests) {
-            run(test, gOpts, gBenchmarkReporter.get());
-        }
+    for (auto&& test : gRunTests) {
+        run(test, gOpts, gBenchmarkReporter.get());
     }
 
     if (gBenchmarkReporter) {
@@ -358,6 +360,8 @@
     renderthread::RenderProxy::trimMemory(100);
     HardwareBitmapUploader::terminate();
 
-    LeakChecker::checkForLeaks();
+    if (gRunLeakCheck) {
+        LeakChecker::checkForLeaks();
+    }
     return 0;
 }