Remove multiproduct_kati and build_test.bash
This hasn't been running in CI for over a year, and when it was
it was too slow and too impossible to keep green. It contained
some good optimizations to check multiple configs, but with
incremental analysis we have an opportunity to rethink how to
accomplish the same goals with less resources.
Test: treehugger
Bug: 374816306
Change-Id: Id19d9c117d7474c3a5ef6ba011ada0c6b4d1dadc
diff --git a/build_test.bash b/build_test.bash
deleted file mode 100755
index defdd82..0000000
--- a/build_test.bash
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash -eu
-#
-# Copyright 2017 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# This file is used in our continous build infrastructure to run a variety of
-# tests related to the build system.
-#
-# Currently, it's used to build and run multiproduct_kati, so it'll attempt
-# to build ninja files for every product in the tree. I expect this to
-# evolve as we find interesting things to test or track performance for.
-#
-
-# Products that are broken or otherwise don't work with multiproduct_kati
-SKIPPED_PRODUCTS=(
- # These products are for soong-only builds, and will fail the kati stage.
- linux_bionic
- mainline_sdk
- ndk
-
- # New architecture bringup, fails without ALLOW_MISSING_DEPENDENCIES=true
- aosp_riscv64
-)
-
-# To track how long we took to startup.
-case $(uname -s) in
- Darwin)
- export TRACE_BEGIN_SOONG=`$T/prebuilts/build-tools/path/darwin-x86/date +%s%3N`
- ;;
- *)
- export TRACE_BEGIN_SOONG=$(date +%s%N)
- ;;
-esac
-
-# Remove BUILD_NUMBER so that incremental builds on build servers don't
-# re-read makefiles every time.
-unset BUILD_NUMBER
-
-export TOP=$(cd $(dirname ${BASH_SOURCE[0]})/../..; PWD= /bin/pwd)
-cd "${TOP}"
-source "${TOP}/build/soong/scripts/microfactory.bash"
-
-case $(uname) in
- Linux)
- if [[ -f /lib/x86_64-linux-gnu/libSegFault.so ]]; then
- export LD_PRELOAD=/lib/x86_64-linux-gnu/libSegFault.so
- export SEGFAULT_USE_ALTSTACK=1
- fi
- ulimit -a
- ;;
-esac
-
-echo
-echo "Free disk space:"
-# Ignore df errors because it errors out on gvfsd file systems
-# but still displays most of the useful info we need
-df -h || true
-
-echo
-echo "Running Bazel smoke test..."
-STANDALONE_BAZEL=true "${TOP}/build/bazel/bin/bazel" --batch --max_idle_secs=1 help
-
-echo
-echo "Running Soong test..."
-soong_build_go multiproduct_kati android/soong/cmd/multiproduct_kati
-exec "$(getoutdir)/multiproduct_kati" --skip-products "$(echo "${SKIPPED_PRODUCTS[@]-}" | tr ' ' ',')" "$@"
diff --git a/cmd/multiproduct_kati/Android.bp b/cmd/multiproduct_kati/Android.bp
deleted file mode 100644
index 20ca2a3..0000000
--- a/cmd/multiproduct_kati/Android.bp
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2017 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package {
- default_applicable_licenses: ["Android-Apache-2.0"],
-}
-
-blueprint_go_binary {
- name: "multiproduct_kati",
- deps: [
- "soong-ui-logger",
- "soong-ui-signal",
- "soong-ui-terminal",
- "soong-ui-tracer",
- "soong-zip",
- ],
- srcs: [
- "main.go",
- ],
- testSrcs: [
- "main_test.go",
- ],
- linux: {
- srcs: [
- "main_linux.go",
- ],
- },
- darwin: {
- srcs: [
- "main_darwin.go",
- ],
- },
-}
diff --git a/cmd/multiproduct_kati/main.go b/cmd/multiproduct_kati/main.go
deleted file mode 100644
index c3b0381..0000000
--- a/cmd/multiproduct_kati/main.go
+++ /dev/null
@@ -1,598 +0,0 @@
-// Copyright 2017 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "bufio"
- "context"
- "flag"
- "fmt"
- "io"
- "io/ioutil"
- "log"
- "os"
- "os/exec"
- "path/filepath"
- "regexp"
- "runtime"
- "strings"
- "sync"
- "syscall"
- "time"
-
- "android/soong/ui/logger"
- "android/soong/ui/signal"
- "android/soong/ui/status"
- "android/soong/ui/terminal"
- "android/soong/ui/tracer"
- "android/soong/zip"
-)
-
-var numJobs = flag.Int("j", 0, "number of parallel jobs [0=autodetect]")
-
-var keepArtifacts = flag.Bool("keep", false, "keep archives of artifacts")
-var incremental = flag.Bool("incremental", false, "run in incremental mode (saving intermediates)")
-
-var outDir = flag.String("out", "", "path to store output directories (defaults to tmpdir under $OUT when empty)")
-var alternateResultDir = flag.Bool("dist", false, "write select results to $DIST_DIR (or <out>/dist when empty)")
-
-var bazelMode = flag.Bool("bazel-mode", false, "use bazel for analysis of certain modules")
-var bazelModeStaging = flag.Bool("bazel-mode-staging", false, "use bazel for analysis of certain near-ready modules")
-
-var onlyConfig = flag.Bool("only-config", false, "Only run product config (not Soong or Kati)")
-var onlySoong = flag.Bool("only-soong", false, "Only run product config and Soong (not Kati)")
-
-var buildVariant = flag.String("variant", "eng", "build variant to use")
-
-var shardCount = flag.Int("shard-count", 1, "split the products into multiple shards (to spread the build onto multiple machines, etc)")
-var shard = flag.Int("shard", 1, "1-indexed shard to execute")
-
-var skipProducts multipleStringArg
-var includeProducts multipleStringArg
-
-func init() {
- flag.Var(&skipProducts, "skip-products", "comma-separated list of products to skip (known failures, etc)")
- flag.Var(&includeProducts, "products", "comma-separated list of products to build")
-}
-
-// multipleStringArg is a flag.Value that takes comma separated lists and converts them to a
-// []string. The argument can be passed multiple times to append more values.
-type multipleStringArg []string
-
-func (m *multipleStringArg) String() string {
- return strings.Join(*m, `, `)
-}
-
-func (m *multipleStringArg) Set(s string) error {
- *m = append(*m, strings.Split(s, ",")...)
- return nil
-}
-
-const errorLeadingLines = 20
-const errorTrailingLines = 20
-
-func errMsgFromLog(filename string) string {
- if filename == "" {
- return ""
- }
-
- data, err := ioutil.ReadFile(filename)
- if err != nil {
- return ""
- }
-
- lines := strings.Split(strings.TrimSpace(string(data)), "\n")
- if len(lines) > errorLeadingLines+errorTrailingLines+1 {
- lines[errorLeadingLines] = fmt.Sprintf("... skipping %d lines ...",
- len(lines)-errorLeadingLines-errorTrailingLines)
-
- lines = append(lines[:errorLeadingLines+1],
- lines[len(lines)-errorTrailingLines:]...)
- }
- var buf strings.Builder
- for _, line := range lines {
- buf.WriteString("> ")
- buf.WriteString(line)
- buf.WriteString("\n")
- }
- return buf.String()
-}
-
-// TODO(b/70370883): This tool uses a lot of open files -- over the default
-// soft limit of 1024 on some systems. So bump up to the hard limit until I fix
-// the algorithm.
-func setMaxFiles(log logger.Logger) {
- var limits syscall.Rlimit
-
- err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &limits)
- if err != nil {
- log.Println("Failed to get file limit:", err)
- return
- }
-
- log.Verbosef("Current file limits: %d soft, %d hard", limits.Cur, limits.Max)
- if limits.Cur == limits.Max {
- return
- }
-
- limits.Cur = limits.Max
- err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &limits)
- if err != nil {
- log.Println("Failed to increase file limit:", err)
- }
-}
-
-func inList(str string, list []string) bool {
- for _, other := range list {
- if str == other {
- return true
- }
- }
- return false
-}
-
-func copyFile(from, to string) error {
- fromFile, err := os.Open(from)
- if err != nil {
- return err
- }
- defer fromFile.Close()
-
- toFile, err := os.Create(to)
- if err != nil {
- return err
- }
- defer toFile.Close()
-
- _, err = io.Copy(toFile, fromFile)
- return err
-}
-
-type mpContext struct {
- Logger logger.Logger
- Status status.ToolStatus
-
- SoongUi string
- MainOutDir string
- MainLogsDir string
-}
-
-func findNamedProducts(soongUi string, log logger.Logger) []string {
- cmd := exec.Command(soongUi, "--dumpvars-mode", "--vars=all_named_products")
- output, err := cmd.Output()
- if err != nil {
- log.Fatalf("Cannot determine named products: %v", err)
- }
-
- rx := regexp.MustCompile(`^all_named_products='(.*)'$`)
- match := rx.FindStringSubmatch(strings.TrimSpace(string(output)))
- return strings.Fields(match[1])
-}
-
-// ensureEmptyFileExists ensures that the containing directory exists, and the
-// specified file exists. If it doesn't exist, it will write an empty file.
-func ensureEmptyFileExists(file string, log logger.Logger) {
- if _, err := os.Stat(file); os.IsNotExist(err) {
- f, err := os.Create(file)
- if err != nil {
- log.Fatalf("Error creating %s: %q\n", file, err)
- }
- f.Close()
- } else if err != nil {
- log.Fatalf("Error checking %s: %q\n", file, err)
- }
-}
-
-func outDirBase() string {
- outDirBase := os.Getenv("OUT_DIR")
- if outDirBase == "" {
- return "out"
- } else {
- return outDirBase
- }
-}
-
-func distDir(outDir string) string {
- if distDir := os.Getenv("DIST_DIR"); distDir != "" {
- return filepath.Clean(distDir)
- } else {
- return filepath.Join(outDir, "dist")
- }
-}
-
-func forceAnsiOutput() bool {
- value := os.Getenv("SOONG_UI_ANSI_OUTPUT")
- return value == "1" || value == "y" || value == "yes" || value == "on" || value == "true"
-}
-
-func getBazelArg() string {
- count := 0
- str := ""
- if *bazelMode {
- count++
- str = "--bazel-mode"
- }
- if *bazelModeStaging {
- count++
- str = "--bazel-mode-staging"
- }
-
- if count > 1 {
- // Can't set more than one
- fmt.Errorf("Only one bazel mode is permitted to be set.")
- os.Exit(1)
- }
-
- return str
-}
-
-func main() {
- stdio := terminal.StdioImpl{}
-
- output := terminal.NewStatusOutput(stdio.Stdout(), "", false, false,
- forceAnsiOutput())
- log := logger.New(output)
- defer log.Cleanup()
-
- for _, v := range os.Environ() {
- log.Println("Environment: " + v)
- }
-
- log.Printf("Argv: %v\n", os.Args)
-
- flag.Parse()
-
- _, cancel := context.WithCancel(context.Background())
- defer cancel()
-
- trace := tracer.New(log)
- defer trace.Close()
-
- stat := &status.Status{}
- defer stat.Finish()
- stat.AddOutput(output)
-
- var failures failureCount
- stat.AddOutput(&failures)
-
- signal.SetupSignals(log, cancel, func() {
- trace.Close()
- log.Cleanup()
- stat.Finish()
- })
-
- soongUi := "build/soong/soong_ui.bash"
-
- var outputDir string
- if *outDir != "" {
- outputDir = *outDir
- } else {
- name := "multiproduct"
- if !*incremental {
- name += "-" + time.Now().Format("20060102150405")
- }
- outputDir = filepath.Join(outDirBase(), name)
- }
-
- log.Println("Output directory:", outputDir)
-
- // The ninja_build file is used by our buildbots to understand that the output
- // can be parsed as ninja output.
- if err := os.MkdirAll(outputDir, 0777); err != nil {
- log.Fatalf("Failed to create output directory: %v", err)
- }
- ensureEmptyFileExists(filepath.Join(outputDir, "ninja_build"), log)
-
- logsDir := filepath.Join(outputDir, "logs")
- os.MkdirAll(logsDir, 0777)
-
- var configLogsDir string
- if *alternateResultDir {
- configLogsDir = filepath.Join(distDir(outDirBase()), "logs")
- } else {
- configLogsDir = outputDir
- }
-
- log.Println("Logs dir: " + configLogsDir)
-
- os.MkdirAll(configLogsDir, 0777)
- log.SetOutput(filepath.Join(configLogsDir, "soong.log"))
- trace.SetOutput(filepath.Join(configLogsDir, "build.trace"))
-
- var jobs = *numJobs
- if jobs < 1 {
- jobs = runtime.NumCPU() / 4
-
- ramGb := int(detectTotalRAM() / (1024 * 1024 * 1024))
- if ramJobs := ramGb / 40; ramGb > 0 && jobs > ramJobs {
- jobs = ramJobs
- }
-
- if jobs < 1 {
- jobs = 1
- }
- }
- log.Verbosef("Using %d parallel jobs", jobs)
-
- setMaxFiles(log)
-
- allProducts := findNamedProducts(soongUi, log)
- var productsList []string
-
- if len(includeProducts) > 0 {
- var missingProducts []string
- for _, product := range includeProducts {
- if inList(product, allProducts) {
- productsList = append(productsList, product)
- } else {
- missingProducts = append(missingProducts, product)
- }
- }
- if len(missingProducts) > 0 {
- log.Fatalf("Products don't exist: %s\n", missingProducts)
- }
- } else {
- productsList = allProducts
- }
-
- finalProductsList := make([]string, 0, len(productsList))
- skipProduct := func(p string) bool {
- for _, s := range skipProducts {
- if p == s {
- return true
- }
- }
- return false
- }
- for _, product := range productsList {
- if !skipProduct(product) {
- finalProductsList = append(finalProductsList, product)
- } else {
- log.Verbose("Skipping: ", product)
- }
- }
-
- if *shard < 1 {
- log.Fatalf("--shard value must be >= 1, not %d\n", *shard)
- } else if *shardCount < 1 {
- log.Fatalf("--shard-count value must be >= 1, not %d\n", *shardCount)
- } else if *shard > *shardCount {
- log.Fatalf("--shard (%d) must not be greater than --shard-count (%d)\n", *shard,
- *shardCount)
- } else if *shardCount > 1 {
- finalProductsList = splitList(finalProductsList, *shardCount)[*shard-1]
- }
-
- log.Verbose("Got product list: ", finalProductsList)
-
- s := stat.StartTool()
- s.SetTotalActions(len(finalProductsList))
-
- mpCtx := &mpContext{
- Logger: log,
- Status: s,
- SoongUi: soongUi,
- MainOutDir: outputDir,
- MainLogsDir: logsDir,
- }
-
- products := make(chan string, len(productsList))
- go func() {
- defer close(products)
- for _, product := range finalProductsList {
- products <- product
- }
- }()
-
- var wg sync.WaitGroup
- for i := 0; i < jobs; i++ {
- wg.Add(1)
- // To smooth out the spikes in memory usage, skew the
- // initial starting time of the jobs by a small amount.
- time.Sleep(15 * time.Second)
- go func() {
- defer wg.Done()
- for {
- select {
- case product := <-products:
- if product == "" {
- return
- }
- runSoongUiForProduct(mpCtx, product)
- }
- }
- }()
- }
- wg.Wait()
-
- if *alternateResultDir {
- args := zip.ZipArgs{
- FileArgs: []zip.FileArg{
- {GlobDir: logsDir, SourcePrefixToStrip: logsDir},
- },
- OutputFilePath: filepath.Join(distDir(outDirBase()), "logs.zip"),
- NumParallelJobs: runtime.NumCPU(),
- CompressionLevel: 5,
- }
- log.Printf("Logs zip: %v\n", args.OutputFilePath)
- if err := zip.Zip(args); err != nil {
- log.Fatalf("Error zipping logs: %v", err)
- }
- }
-
- s.Finish()
-
- if failures.count == 1 {
- log.Fatal("1 failure")
- } else if failures.count > 1 {
- log.Fatalf("%d failures %q", failures.count, failures.fails)
- } else {
- fmt.Fprintln(output, "Success")
- }
-}
-
-func cleanupAfterProduct(outDir, productZip string) {
- if *keepArtifacts {
- args := zip.ZipArgs{
- FileArgs: []zip.FileArg{
- {
- GlobDir: outDir,
- SourcePrefixToStrip: outDir,
- },
- },
- OutputFilePath: productZip,
- NumParallelJobs: runtime.NumCPU(),
- CompressionLevel: 5,
- }
- if err := zip.Zip(args); err != nil {
- log.Fatalf("Error zipping artifacts: %v", err)
- }
- }
- if !*incremental {
- os.RemoveAll(outDir)
- }
-}
-
-func runSoongUiForProduct(mpctx *mpContext, product string) {
- outDir := filepath.Join(mpctx.MainOutDir, product)
- logsDir := filepath.Join(mpctx.MainLogsDir, product)
- productZip := filepath.Join(mpctx.MainOutDir, product+".zip")
- consoleLogPath := filepath.Join(logsDir, "std.log")
-
- if err := os.MkdirAll(outDir, 0777); err != nil {
- mpctx.Logger.Fatalf("Error creating out directory: %v", err)
- }
- if err := os.MkdirAll(logsDir, 0777); err != nil {
- mpctx.Logger.Fatalf("Error creating log directory: %v", err)
- }
-
- consoleLogFile, err := os.Create(consoleLogPath)
- if err != nil {
- mpctx.Logger.Fatalf("Error creating console log file: %v", err)
- }
- defer consoleLogFile.Close()
-
- consoleLogWriter := bufio.NewWriter(consoleLogFile)
- defer consoleLogWriter.Flush()
-
- args := []string{"--make-mode", "--skip-soong-tests", "--skip-ninja"}
-
- if !*keepArtifacts {
- args = append(args, "--empty-ninja-file")
- }
-
- if *onlyConfig {
- args = append(args, "--config-only")
- } else if *onlySoong {
- args = append(args, "--soong-only")
- }
-
- bazelStr := getBazelArg()
- if bazelStr != "" {
- args = append(args, bazelStr)
- }
-
- cmd := exec.Command(mpctx.SoongUi, args...)
- cmd.Stdout = consoleLogWriter
- cmd.Stderr = consoleLogWriter
- cmd.Env = append(os.Environ(),
- "OUT_DIR="+outDir,
- "TARGET_PRODUCT="+product,
- "TARGET_BUILD_VARIANT="+*buildVariant,
- "TARGET_BUILD_TYPE=release",
- "TARGET_BUILD_APPS=",
- "TARGET_BUILD_UNBUNDLED=",
- "USE_RBE=false") // Disabling RBE saves ~10 secs per product
-
- if *alternateResultDir {
- cmd.Env = append(cmd.Env,
- "DIST_DIR="+filepath.Join(distDir(outDirBase()), "products/"+product))
- }
-
- action := &status.Action{
- Description: product,
- Outputs: []string{product},
- }
-
- mpctx.Status.StartAction(action)
- defer cleanupAfterProduct(outDir, productZip)
-
- before := time.Now()
- err = cmd.Run()
-
- if !*onlyConfig && !*onlySoong {
- katiBuildNinjaFile := filepath.Join(outDir, "build-"+product+".ninja")
- if after, err := os.Stat(katiBuildNinjaFile); err == nil && after.ModTime().After(before) {
- err := copyFile(consoleLogPath, filepath.Join(filepath.Dir(consoleLogPath), "std_full.log"))
- if err != nil {
- log.Fatalf("Error copying log file: %s", err)
- }
- }
- }
- var errOutput string
- if err == nil {
- errOutput = ""
- } else {
- errOutput = errMsgFromLog(consoleLogPath)
- }
-
- mpctx.Status.FinishAction(status.ActionResult{
- Action: action,
- Error: err,
- Output: errOutput,
- })
-}
-
-type failureCount struct {
- count int
- fails []string
-}
-
-func (f *failureCount) StartAction(action *status.Action, counts status.Counts) {}
-
-func (f *failureCount) FinishAction(result status.ActionResult, counts status.Counts) {
- if result.Error != nil {
- f.count += 1
- f.fails = append(f.fails, result.Action.Description)
- }
-}
-
-func (f *failureCount) Message(level status.MsgLevel, message string) {
- if level >= status.ErrorLvl {
- f.count += 1
- }
-}
-
-func (f *failureCount) Flush() {}
-
-func (f *failureCount) Write(p []byte) (int, error) {
- // discard writes
- return len(p), nil
-}
-
-func splitList(list []string, shardCount int) (ret [][]string) {
- each := len(list) / shardCount
- extra := len(list) % shardCount
- for i := 0; i < shardCount; i++ {
- count := each
- if extra > 0 {
- count += 1
- extra -= 1
- }
- ret = append(ret, list[:count])
- list = list[count:]
- }
- return
-}
diff --git a/cmd/multiproduct_kati/main_darwin.go b/cmd/multiproduct_kati/main_darwin.go
deleted file mode 100644
index 3d1b12a..0000000
--- a/cmd/multiproduct_kati/main_darwin.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2017 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-func detectTotalRAM() uint64 {
- // unimplemented stub on darwin
- return 0
-}
diff --git a/cmd/multiproduct_kati/main_linux.go b/cmd/multiproduct_kati/main_linux.go
deleted file mode 100644
index db74496..0000000
--- a/cmd/multiproduct_kati/main_linux.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2017 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "syscall"
-)
-
-func detectTotalRAM() uint64 {
- var info syscall.Sysinfo_t
- err := syscall.Sysinfo(&info)
- if err != nil {
- panic(err)
- }
- return info.Totalram * uint64(info.Unit)
-}
diff --git a/cmd/multiproduct_kati/main_test.go b/cmd/multiproduct_kati/main_test.go
deleted file mode 100644
index 263a124..0000000
--- a/cmd/multiproduct_kati/main_test.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2019 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
- "fmt"
- "reflect"
- "testing"
-)
-
-func TestSplitList(t *testing.T) {
- testcases := []struct {
- inputCount int
- shardCount int
- want [][]string
- }{
- {
- inputCount: 1,
- shardCount: 1,
- want: [][]string{{"1"}},
- },
- {
- inputCount: 1,
- shardCount: 2,
- want: [][]string{{"1"}, {}},
- },
- {
- inputCount: 4,
- shardCount: 2,
- want: [][]string{{"1", "2"}, {"3", "4"}},
- },
- {
- inputCount: 19,
- shardCount: 10,
- want: [][]string{
- {"1", "2"},
- {"3", "4"},
- {"5", "6"},
- {"7", "8"},
- {"9", "10"},
- {"11", "12"},
- {"13", "14"},
- {"15", "16"},
- {"17", "18"},
- {"19"},
- },
- },
- {
- inputCount: 15,
- shardCount: 10,
- want: [][]string{
- {"1", "2"},
- {"3", "4"},
- {"5", "6"},
- {"7", "8"},
- {"9", "10"},
- {"11"},
- {"12"},
- {"13"},
- {"14"},
- {"15"},
- },
- },
- }
-
- for _, tc := range testcases {
- t.Run(fmt.Sprintf("%d/%d", tc.inputCount, tc.shardCount), func(t *testing.T) {
- input := []string{}
- for i := 1; i <= tc.inputCount; i++ {
- input = append(input, fmt.Sprintf("%d", i))
- }
-
- got := splitList(input, tc.shardCount)
-
- if !reflect.DeepEqual(got, tc.want) {
- t.Errorf("unexpected result for splitList([]string{...%d...}, %d):\nwant: %v\n got: %v\n",
- tc.inputCount, tc.shardCount, tc.want, got)
- }
- })
- }
-}
diff --git a/scripts/diff_build_graphs.sh b/scripts/diff_build_graphs.sh
deleted file mode 100755
index 8d01124..0000000
--- a/scripts/diff_build_graphs.sh
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/bin/bash -eu
-#
-# Copyright 2017 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-# This file makes it easy to confirm that a set of changes in source code don't result in any
-# changes to the generated ninja files. This is to reduce the effort required to be confident
-# in the correctness of refactorings
-
-function die() {
- echo "$@" >&2
- exit 1
-}
-
-function usage() {
- violation="$1"
- die "$violation
-
- Usage: diff_build_graphs.sh [--products=product1,product2...] <OLD_VERSIONS> <NEW_VERSIONS>
-
- This file builds and parses the build files (Android.mk, Android.bp, etc) for each requested
- product and for both sets of versions, and checks whether the ninja files (which implement
- the build graph) changed between the two versions.
-
- Example: diff_build_graphs.sh 'build/soong:work^ build/blueprint:work^' 'build/soong:work build/blueprint:work'
-
- Options:
- --products=PRODUCTS comma-separated list of products to check"
-}
-
-PRODUCTS_ARG=""
-OLD_VERSIONS=""
-NEW_VERSIONS=""
-function parse_args() {
- # parse optional arguments
- while true; do
- arg="${1-}"
- case "$arg" in
- --products=*) PRODUCTS_ARG="$arg";;
- *) break;;
- esac
- shift
- done
- # parse required arguments
- if [ "$#" != "2" ]; then
- usage ""
- fi
- #argument validation
- OLD_VERSIONS="$1"
- NEW_VERSIONS="$2"
-
-}
-parse_args "$@"
-
-
-# find some file paths
-cd "$(dirname $0)"
-SCRIPT_DIR="$PWD"
-cd ../../..
-CHECKOUT_ROOT="$PWD"
-OUT_DIR="${OUT_DIR-}"
-if [ -z "$OUT_DIR" ]; then
- OUT_DIR=out
-fi
-WORK_DIR="$OUT_DIR/diff"
-OUT_DIR_OLD="$WORK_DIR/out_old"
-OUT_DIR_NEW="$WORK_DIR/out_new"
-OUT_DIR_TEMP="$WORK_DIR/out_temp"
-
-
-function checkout() {
- versionSpecs="$1"
- for versionSpec in $versionSpecs; do
- project="$(echo $versionSpec | sed 's|\([^:]*\):\([^:]*\)|\1|')"
- ref="$(echo $versionSpec | sed 's|\([^:]*\):\([^:]*\)|\2|')"
- echo "checking out ref $ref in project $project"
- git -C "$project" checkout "$ref"
- done
-}
-
-function run_build() {
- echo
- echo "Starting build"
- # rebuild multiproduct_kati, in case it was missing before,
- # or in case it is affected by some of the changes we're testing
- make blueprint_tools
- # find multiproduct_kati and have it build the ninja files for each product
- builder="$(echo $OUT_DIR/host/*/bin/multiproduct_kati)"
- BUILD_NUMBER=sample "$builder" $PRODUCTS_ARG --keep --out "$OUT_DIR_TEMP" || true
- echo
-}
-
-function diffProduct() {
- product="$1"
-
- zip1="$OUT_DIR_OLD/${product}.zip"
- unzipped1="$OUT_DIR_OLD/$product"
-
- zip2="$OUT_DIR_NEW/${product}.zip"
- unzipped2="$OUT_DIR_NEW/$product"
-
- unzip -qq "$zip1" -d "$unzipped1"
- unzip -qq "$zip2" -d "$unzipped2"
-
- #do a diff of the ninja files
- diffFile="$WORK_DIR/diff.txt"
- diff -r "$unzipped1" "$unzipped2" -x build_date.txt -x build_number.txt -x '\.*' -x '*.log' -x build_fingerprint.txt -x build.ninja.d -x '*.zip' > $diffFile || true
- if [[ -s "$diffFile" ]]; then
- # outputs are different, so remove the unzipped versions but keep the zipped versions
- echo "First few differences (total diff linecount=$(wc -l $diffFile)) for product $product:"
- cat "$diffFile" | head -n 10
- echo "End of differences for product $product"
- rm -rf "$unzipped1" "$unzipped2"
- else
- # outputs are the same, so remove all of the outputs
- rm -rf "$zip1" "$unzipped1" "$zip2" "$unzipped2"
- fi
-}
-
-function do_builds() {
- #reset work dir
- rm -rf "$WORK_DIR"
- mkdir "$WORK_DIR"
-
- #build new code
- checkout "$NEW_VERSIONS"
- run_build
- mv "$OUT_DIR_TEMP" "$OUT_DIR_NEW"
-
- #build old code
- #TODO do we want to cache old results? Maybe by the time we care to cache old results this will
- #be running on a remote server somewhere and be completely different
- checkout "$OLD_VERSIONS"
- run_build
- mv "$OUT_DIR_TEMP" "$OUT_DIR_OLD"
-
- #cleanup
- echo created "$OUT_DIR_OLD" and "$OUT_DIR_NEW"
-}
-
-function main() {
- do_builds
- checkout "$NEW_VERSIONS"
-
- #find all products
- productsFile="$WORK_DIR/all_products.txt"
- find $OUT_DIR_OLD $OUT_DIR_NEW -mindepth 1 -maxdepth 1 -name "*.zip" | sed "s|^$OUT_DIR_OLD/||" | sed "s|^$OUT_DIR_NEW/||" | sed "s|\.zip$||" | sort | uniq > "$productsFile"
- echo Diffing products
- for product in $(cat $productsFile); do
- diffProduct "$product"
- done
- echo Done diffing products
- echo "Any differing outputs can be seen at $OUT_DIR_OLD/*.zip and $OUT_DIR_NEW/*.zip"
- echo "See $WORK_DIR/diff.txt for the full list of differences for the latest product checked"
-}
-
-main