Merge remote-tracking branch 'goog/upstream-master'.

Bug: 30989362
Test: update_engine_unittests
Change-Id: I04417ac21508f4da3ead29b1dea34886f2fc84af
Merged-In: I04417ac21508f4da3ead29b1dea34886f2fc84af
diff --git a/common/utils.h b/common/utils.h
index e4ffcf8..c102a16 100644
--- a/common/utils.h
+++ b/common/utils.h
@@ -228,20 +228,6 @@
   HexDumpArray(vect.data(), vect.size());
 }
 
-template<typename KeyType, typename ValueType>
-bool MapContainsKey(const std::map<KeyType, ValueType>& m, const KeyType& k) {
-  return m.find(k) != m.end();
-}
-template<typename KeyType>
-bool SetContainsKey(const std::set<KeyType>& s, const KeyType& k) {
-  return s.find(k) != s.end();
-}
-
-template<typename T>
-bool VectorContainsValue(const std::vector<T>& vect, const T& value) {
-  return std::find(vect.begin(), vect.end(), value) != vect.end();
-}
-
 template<typename T>
 bool VectorIndexOf(const std::vector<T>& vect, const T& value,
                    typename std::vector<T>::size_type* out_index) {
diff --git a/connection_manager.cc b/connection_manager.cc
index d15faf0..be707bc 100644
--- a/connection_manager.cc
+++ b/connection_manager.cc
@@ -16,6 +16,7 @@
 
 #include "update_engine/connection_manager.h"
 
+#include <memory>
 #include <set>
 #include <string>
 
diff --git a/payload_consumer/delta_performer.cc b/payload_consumer/delta_performer.cc
index c2c43db..a619d1d 100644
--- a/payload_consumer/delta_performer.cc
+++ b/payload_consumer/delta_performer.cc
@@ -65,7 +65,7 @@
 namespace chromeos_update_engine {
 
 const uint64_t DeltaPerformer::kSupportedMajorPayloadVersion = 2;
-const uint32_t DeltaPerformer::kSupportedMinorPayloadVersion = 4;
+const uint32_t DeltaPerformer::kSupportedMinorPayloadVersion = 5;
 
 const unsigned DeltaPerformer::kProgressLogMaxChunks = 10;
 const unsigned DeltaPerformer::kProgressLogTimeoutSeconds = 30;
diff --git a/payload_consumer/delta_performer_unittest.cc b/payload_consumer/delta_performer_unittest.cc
index 88df98a..5335b5e 100644
--- a/payload_consumer/delta_performer_unittest.cc
+++ b/payload_consumer/delta_performer_unittest.cc
@@ -18,6 +18,7 @@
 
 #include <endian.h>
 #include <inttypes.h>
+#include <time.h>
 
 #include <string>
 #include <vector>
@@ -736,16 +737,17 @@
 }
 
 TEST_F(DeltaPerformerTest, BrilloMetadataSignatureSizeTest) {
+  unsigned int seed = time(nullptr);
   EXPECT_TRUE(performer_.Write(kDeltaMagic, sizeof(kDeltaMagic)));
 
   uint64_t major_version = htobe64(kBrilloMajorPayloadVersion);
   EXPECT_TRUE(performer_.Write(&major_version, 8));
 
-  uint64_t manifest_size = 222;
+  uint64_t manifest_size = rand_r(&seed) % 256;
   uint64_t manifest_size_be = htobe64(manifest_size);
   EXPECT_TRUE(performer_.Write(&manifest_size_be, 8));
 
-  uint32_t metadata_signature_size = 111;
+  uint32_t metadata_signature_size = rand_r(&seed) % 256;
   uint32_t metadata_signature_size_be = htobe32(metadata_signature_size);
   EXPECT_TRUE(performer_.Write(&metadata_signature_size_be, 4));
 
@@ -847,8 +849,10 @@
   // Non-official build, non-existing public-key, key in response -> true
   fake_hardware_.SetIsOfficialBuild(false);
   performer_.public_key_path_ = non_existing_file;
-  // result of 'echo "Test" | base64'
-  install_plan_.public_key_rsa = "VGVzdAo=";
+  // This is the result of 'echo "Test" | base64' and is not meant to be a
+  // valid public key, but it is valid base-64.
+  constexpr char kBase64TestKey[] = "VGVzdAo=";
+  install_plan_.public_key_rsa = kBase64TestKey;
   EXPECT_TRUE(performer_.GetPublicKeyFromResponse(&key_path));
   EXPECT_FALSE(key_path.empty());
   EXPECT_EQ(unlink(key_path.value().c_str()), 0);
@@ -859,8 +863,7 @@
   // Non-official build, existing public-key, key in response -> false
   fake_hardware_.SetIsOfficialBuild(false);
   performer_.public_key_path_ = existing_file;
-  // result of 'echo "Test" | base64'
-  install_plan_.public_key_rsa = "VGVzdAo=";
+  install_plan_.public_key_rsa = kBase64TestKey;
   EXPECT_FALSE(performer_.GetPublicKeyFromResponse(&key_path));
   // Same with official build -> false
   fake_hardware_.SetIsOfficialBuild(true);
diff --git a/payload_consumer/payload_constants.cc b/payload_consumer/payload_constants.cc
index 9338d29..e679316 100644
--- a/payload_consumer/payload_constants.cc
+++ b/payload_consumer/payload_constants.cc
@@ -25,7 +25,8 @@
 const uint32_t kInPlaceMinorPayloadVersion = 1;
 const uint32_t kSourceMinorPayloadVersion = 2;
 const uint32_t kOpSrcHashMinorPayloadVersion = 3;
-const uint32_t kPuffdiffMinorPayloadVersion = 4;
+const uint32_t kBrotliBsdiffMinorPayloadVersion = 4;
+const uint32_t kPuffdiffMinorPayloadVersion = 5;
 
 const uint64_t kMaxPayloadHeaderSize = 24;
 
diff --git a/payload_consumer/payload_constants.h b/payload_consumer/payload_constants.h
index b3bd5e7..ac3e882 100644
--- a/payload_consumer/payload_constants.h
+++ b/payload_consumer/payload_constants.h
@@ -43,6 +43,9 @@
 // The minor version that allows per-operation source hash.
 extern const uint32_t kOpSrcHashMinorPayloadVersion;
 
+// The minor version that allows BROTLI_BSDIFF, ZERO and DISCARD operation.
+extern const uint32_t kBrotliBsdiffMinorPayloadVersion;
+
 // The minor version that allows PUFFDIFF operation.
 extern const uint32_t kPuffdiffMinorPayloadVersion;
 
diff --git a/payload_generator/cycle_breaker.cc b/payload_generator/cycle_breaker.cc
index 52a6f60..a8a04ab 100644
--- a/payload_generator/cycle_breaker.cc
+++ b/payload_generator/cycle_breaker.cc
@@ -18,14 +18,15 @@
 
 #include <inttypes.h>
 
+#include <limits>
 #include <set>
 #include <string>
 #include <utility>
 
+#include <base/stl_util.h>
 #include <base/strings/string_util.h>
 #include <base/strings/stringprintf.h>
 
-#include "update_engine/common/utils.h"
 #include "update_engine/payload_generator/graph_utils.h"
 #include "update_engine/payload_generator/tarjan.h"
 
@@ -83,7 +84,7 @@
            jt != component_indexes.end(); ++jt) {
         // If there's a link from *it -> *jt in the graph,
         // add a subgraph_ edge
-        if (utils::MapContainsKey(subgraph_[*it].out_edges, *jt))
+        if (base::ContainsKey(subgraph_[*it].out_edges, *jt))
           subgraph_[*it].subgraph_edges.insert(*jt);
       }
     }
@@ -146,7 +147,7 @@
   for (vector<Vertex::Index>::const_iterator it = ++stack_.begin(),
            e = stack_.end(); it != e; ++it) {
     Edge edge = make_pair(*(it - 1), *it);
-    if (utils::SetContainsKey(cut_edges_, edge)) {
+    if (base::ContainsKey(cut_edges_, edge)) {
       return true;
     }
   }
diff --git a/payload_generator/cycle_breaker_unittest.cc b/payload_generator/cycle_breaker_unittest.cc
index e92bc30..7554dbb 100644
--- a/payload_generator/cycle_breaker_unittest.cc
+++ b/payload_generator/cycle_breaker_unittest.cc
@@ -22,9 +22,9 @@
 #include <vector>
 
 #include <base/logging.h>
+#include <base/stl_util.h>
 #include <gtest/gtest.h>
 
-#include "update_engine/common/utils.h"
 #include "update_engine/payload_generator/graph_types.h"
 
 using std::make_pair;
@@ -83,14 +83,14 @@
   // C->D->E
   // G->H
 
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_a, n_e)) ||
-              utils::SetContainsKey(broken_edges, make_pair(n_e, n_b)) ||
-              utils::SetContainsKey(broken_edges, make_pair(n_b, n_a)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_c, n_d)) ||
-              utils::SetContainsKey(broken_edges, make_pair(n_d, n_e)) ||
-              utils::SetContainsKey(broken_edges, make_pair(n_e, n_c)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_g, n_h)) ||
-              utils::SetContainsKey(broken_edges, make_pair(n_h, n_g)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_a, n_e)) ||
+              base::ContainsKey(broken_edges, make_pair(n_e, n_b)) ||
+              base::ContainsKey(broken_edges, make_pair(n_b, n_a)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_c, n_d)) ||
+              base::ContainsKey(broken_edges, make_pair(n_d, n_e)) ||
+              base::ContainsKey(broken_edges, make_pair(n_e, n_c)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_g, n_h)) ||
+              base::ContainsKey(broken_edges, make_pair(n_h, n_g)));
   EXPECT_EQ(3U, broken_edges.size());
 }
 
@@ -217,11 +217,11 @@
   breaker.BreakCycles(graph, &broken_edges);
 
   // These are required to be broken:
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_b, n_a)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_b, n_c)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_d, n_e)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_f, n_g)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_h, n_i)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_b, n_a)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_b, n_c)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_d, n_e)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_f, n_g)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_h, n_i)));
 }
 
 TEST(CycleBreakerTest, UnblockGraphTest) {
@@ -248,8 +248,8 @@
   breaker.BreakCycles(graph, &broken_edges);
 
   // These are required to be broken:
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_a, n_b)));
-  EXPECT_TRUE(utils::SetContainsKey(broken_edges, make_pair(n_a, n_c)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_a, n_b)));
+  EXPECT_TRUE(base::ContainsKey(broken_edges, make_pair(n_a, n_c)));
 }
 
 TEST(CycleBreakerTest, SkipOpsTest) {
diff --git a/payload_generator/inplace_generator.cc b/payload_generator/inplace_generator.cc
index b858c2b..febdcce 100644
--- a/payload_generator/inplace_generator.cc
+++ b/payload_generator/inplace_generator.cc
@@ -23,6 +23,8 @@
 #include <utility>
 #include <vector>
 
+#include <base/stl_util.h>
+
 #include "update_engine/common/utils.h"
 #include "update_engine/payload_consumer/payload_constants.h"
 #include "update_engine/payload_generator/cycle_breaker.h"
@@ -341,7 +343,7 @@
   vector<Vertex::Index> new_op_indexes;
   new_op_indexes.reserve(op_indexes->size());
   for (Vertex::Index vertex_index : *op_indexes) {
-    if (utils::SetContainsKey(deleted_nodes, vertex_index))
+    if (base::ContainsKey(deleted_nodes, vertex_index))
       continue;
     new_op_indexes.push_back(vertex_index);
   }
diff --git a/payload_generator/payload_generation_config.cc b/payload_generator/payload_generation_config.cc
index 2c5d6d6..15d4ab5 100644
--- a/payload_generator/payload_generation_config.cc
+++ b/payload_generator/payload_generation_config.cc
@@ -128,6 +128,7 @@
                         minor == kInPlaceMinorPayloadVersion ||
                         minor == kSourceMinorPayloadVersion ||
                         minor == kOpSrcHashMinorPayloadVersion ||
+                        minor == kBrotliBsdiffMinorPayloadVersion ||
                         minor == kPuffdiffMinorPayloadVersion);
   return true;
 }
@@ -151,7 +152,7 @@
       // The implementation of these operations had a bug in earlier versions
       // that prevents them from being used in any payload. We will enable
       // them for delta payloads for now.
-      return minor >= kPuffdiffMinorPayloadVersion;
+      return minor >= kBrotliBsdiffMinorPayloadVersion;
 
     // Delta operations:
     case InstallOperation::MOVE:
@@ -166,10 +167,10 @@
       return minor >= kSourceMinorPayloadVersion;
 
     case InstallOperation::BROTLI_BSDIFF:
-      return minor >= kPuffdiffMinorPayloadVersion;
-    // TODO(*) Revert the disablement of puffdiff after we fix b/72815313.
+      return minor >= kBrotliBsdiffMinorPayloadVersion;
+
     case InstallOperation::PUFFDIFF:
-      return false;
+      return minor >= kPuffdiffMinorPayloadVersion;
   }
   return false;
 }
diff --git a/payload_generator/tarjan.cc b/payload_generator/tarjan.cc
index 98e29f9..d99ae12 100644
--- a/payload_generator/tarjan.cc
+++ b/payload_generator/tarjan.cc
@@ -19,8 +19,7 @@
 #include <vector>
 
 #include <base/logging.h>
-
-#include "update_engine/common/utils.h"
+#include <base/stl_util.h>
 
 using std::min;
 using std::vector;
@@ -59,7 +58,7 @@
       Tarjan(vertex_next, graph);
       (*graph)[vertex].lowlink = min((*graph)[vertex].lowlink,
                                      (*graph)[vertex_next].lowlink);
-    } else if (utils::VectorContainsValue(stack_, vertex_next)) {
+    } else if (base::ContainsValue(stack_, vertex_next)) {
       (*graph)[vertex].lowlink = min((*graph)[vertex].lowlink,
                                      (*graph)[vertex_next].index);
     }
@@ -73,7 +72,7 @@
       component.push_back(other_vertex);
     } while (other_vertex != vertex && !stack_.empty());
 
-    if (utils::VectorContainsValue(component, required_vertex_)) {
+    if (base::ContainsValue(component, required_vertex_)) {
       components_.resize(components_.size() + 1);
       component.swap(components_.back());
     }
diff --git a/payload_generator/tarjan_unittest.cc b/payload_generator/tarjan_unittest.cc
index c29cbdc..b271227 100644
--- a/payload_generator/tarjan_unittest.cc
+++ b/payload_generator/tarjan_unittest.cc
@@ -20,9 +20,9 @@
 #include <utility>
 
 #include <base/logging.h>
+#include <base/stl_util.h>
 #include <gtest/gtest.h>
 
-#include "update_engine/common/utils.h"
 #include "update_engine/payload_generator/graph_types.h"
 
 using std::make_pair;
@@ -66,11 +66,11 @@
     tarjan.Execute(i, &graph, &vertex_indexes);
 
     EXPECT_EQ(5U, vertex_indexes.size());
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_a));
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_b));
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_c));
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_d));
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_e));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_a));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_b));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_c));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_d));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_e));
   }
 
   {
@@ -78,7 +78,7 @@
     tarjan.Execute(n_f, &graph, &vertex_indexes);
 
     EXPECT_EQ(1U, vertex_indexes.size());
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_f));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_f));
   }
 
   for (Vertex::Index i = n_g; i <= n_h; i++) {
@@ -86,8 +86,8 @@
     tarjan.Execute(i, &graph, &vertex_indexes);
 
     EXPECT_EQ(2U, vertex_indexes.size());
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_g));
-    EXPECT_TRUE(utils::VectorContainsValue(vertex_indexes, n_h));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_g));
+    EXPECT_TRUE(base::ContainsValue(vertex_indexes, n_h));
   }
 }
 
diff --git a/payload_generator/xz_chromeos.cc b/payload_generator/xz_chromeos.cc
index a8cda4e..2ff9458 100644
--- a/payload_generator/xz_chromeos.cc
+++ b/payload_generator/xz_chromeos.cc
@@ -16,13 +16,39 @@
 
 #include "update_engine/payload_generator/xz.h"
 
+#include <base/logging.h>
+#include <lzma.h>
+
 namespace chromeos_update_engine {
 
 void XzCompressInit() {}
 
 bool XzCompress(const brillo::Blob& in, brillo::Blob* out) {
-  // No Xz compressor implementation in Chrome OS delta_generator builds.
-  return false;
+  out->clear();
+  if (in.empty())
+    return true;
+
+  // Resize the output buffer to get enough memory for writing the compressed
+  // data.
+  out->resize(lzma_stream_buffer_bound(in.size()));
+
+  const uint32_t kLzmaPreset = 6;
+  size_t out_pos = 0;
+  int rc = lzma_easy_buffer_encode(kLzmaPreset,
+                                   LZMA_CHECK_NONE,  // We do not need CRC.
+                                   nullptr,
+                                   in.data(),
+                                   in.size(),
+                                   out->data(),
+                                   &out_pos,
+                                   out->size());
+  if (rc != LZMA_OK) {
+    LOG(ERROR) << "Failed to compress data to LZMA stream with return code: "
+               << rc;
+    return false;
+  }
+  out->resize(out_pos);
+  return true;
 }
 
 }  // namespace chromeos_update_engine
diff --git a/payload_generator/zip_unittest.cc b/payload_generator/zip_unittest.cc
index c750eb7..5b0d5da 100644
--- a/payload_generator/zip_unittest.cc
+++ b/payload_generator/zip_unittest.cc
@@ -115,12 +115,7 @@
   }
 };
 
-#ifdef __ANDROID__
 typedef ::testing::Types<BzipTest, XzTest> ZipTestTypes;
-#else
-// Chrome OS implementation of Xz compressor just returns false.
-typedef ::testing::Types<BzipTest> ZipTestTypes;
-#endif  // __ANDROID__
 
 TYPED_TEST_CASE(ZipTest, ZipTestTypes);
 
diff --git a/scripts/blockdiff.py b/scripts/blockdiff.py
index 1dc60a6..5793def 100755
--- a/scripts/blockdiff.py
+++ b/scripts/blockdiff.py
@@ -1,14 +1,26 @@
 #!/usr/bin/python2
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Block diff utility."""
 
 from __future__ import print_function
 
-import optparse
+# pylint: disable=import-error
+import argparse
 import sys
 
 
@@ -71,28 +83,25 @@
 
 def main(argv):
   # Parse command-line arguments.
-  parser = optparse.OptionParser(
-      usage='Usage: %prog FILE1 FILE2',
-      description='Compare FILE1 and FILE2 by blocks.')
+  parser = argparse.ArgumentParser(
+      description='Compare FILE1 and FILE2 by blocks.',
+      formatter_class=argparse.ArgumentDefaultsHelpFormatter)
 
-  parser.add_option('-b', '--block-size', metavar='NUM', type=int, default=4096,
-                    help='the block size to use (default: %default)')
-  parser.add_option('-m', '--max-length', metavar='NUM', type=int, default=-1,
-                    help='maximum number of bytes to compared')
+  parser.add_argument('-b', '--block-size', metavar='NUM', type=int,
+                      default=4096, help='the block size to use')
+  parser.add_argument('-m', '--max-length', metavar='NUM', type=int, default=-1,
+                      help='maximum number of bytes to compare')
+  parser.add_argument('file1', metavar='FILE1')
+  parser.add_argument('file2', metavar='FILE2')
 
-  opts, args = parser.parse_args(argv[1:])
-
-  try:
-    name1, name2 = args
-  except ValueError:
-    parser.error('unexpected number of arguments')
+  args = parser.parse_args(argv[1:])
 
   # Perform the block diff.
   try:
-    with open(name1) as file1:
-      with open(name2) as file2:
-        diff_list = BlockDiff(opts.block_size, file1, file2, name1, name2,
-                              opts.max_length)
+    with open(args.file1) as file1:
+      with open(args.file2) as file2:
+        diff_list = BlockDiff(args.block_size, file1, file2,
+                              args.file1, args.file2, args.max_length)
   except BlockDiffError as e:
     print('Error: ' % e, file=sys.stderr)
     return 2
diff --git a/scripts/paycheck.py b/scripts/paycheck.py
index 8df1bf0..96b1032 100755
--- a/scripts/paycheck.py
+++ b/scripts/paycheck.py
@@ -1,16 +1,32 @@
 #!/usr/bin/python2
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Command-line tool for checking and applying Chrome OS update payloads."""
 
 from __future__ import print_function
 
-import optparse
+# pylint: disable=import-error
+import argparse
+import filecmp
 import os
 import sys
+import tempfile
+
+from update_payload import error
 
 lib_dir = os.path.join(os.path.dirname(__file__), 'lib')
 if os.path.exists(lib_dir) and os.path.isdir(lib_dir):
@@ -29,17 +45,12 @@
     argv: command-line arguments to parse (excluding the program name)
 
   Returns:
-    A tuple (opts, payload, extra_args), where `opts' are the options
-    returned by the parser, `payload' is the name of the payload file
-    (mandatory argument) and `extra_args' are any additional command-line
-    arguments.
+    Returns the arguments returned by the argument parser.
   """
-  parser = optparse.OptionParser(
-      usage=('Usage: %prog [OPTION...] PAYLOAD [DST_KERN DST_ROOT '
-             '[SRC_KERN SRC_ROOT]]'),
-      description=('Applies a Chrome OS update PAYLOAD to SRC_KERN and '
-                   'SRC_ROOT emitting DST_KERN and DST_ROOT, respectively. '
-                   'SRC_KERN and SRC_ROOT are only needed for delta payloads. '
+  parser = argparse.ArgumentParser(
+      description=('Applies a Chrome OS update PAYLOAD to src_kern and '
+                   'src_root emitting dst_kern and dst_root, respectively. '
+                   'src_kern and src_root are only needed for delta payloads. '
                    'When no partitions are provided, verifies the payload '
                    'integrity.'),
       epilog=('Note: a payload may verify correctly but fail to apply, and '
@@ -47,186 +58,205 @@
               'vs dynamic correctness. A payload that both verifies and '
               'applies correctly should be safe for use by the Chrome OS '
               'Update Engine. Use --check to verify a payload prior to '
-              'applying it.'))
+              'applying it.'),
+      formatter_class=argparse.RawDescriptionHelpFormatter
+  )
 
-  check_opts = optparse.OptionGroup(parser, 'Checking payload integrity')
-  check_opts.add_option('-c', '--check', action='store_true', default=False,
-                        help=('force payload integrity check (e.g. before '
-                              'applying)'))
-  check_opts.add_option('-D', '--describe', action='store_true', default=False,
-                        help='Print a friendly description of the payload.')
-  check_opts.add_option('-r', '--report', metavar='FILE',
-                        help="dump payload report (`-' for stdout)")
-  check_opts.add_option('-t', '--type', metavar='TYPE', dest='assert_type',
-                        help=("assert that payload is either `%s' or `%s'" %
-                              (_TYPE_FULL, _TYPE_DELTA)))
-  check_opts.add_option('-z', '--block-size', metavar='NUM', default=0,
-                        type='int',
-                        help='assert a non-default (4096) payload block size')
-  check_opts.add_option('-u', '--allow-unhashed', action='store_true',
-                        default=False, help='allow unhashed operations')
-  check_opts.add_option('-d', '--disabled_tests', metavar='TESTLIST',
-                        default=(),
-                        help=('comma-separated list of tests to disable; '
-                              'available values: ' +
-                              ', '.join(update_payload.CHECKS_TO_DISABLE)))
-  check_opts.add_option('-k', '--key', metavar='FILE',
-                        help=('Override standard key used for signature '
-                              'validation'))
-  check_opts.add_option('-m', '--meta-sig', metavar='FILE',
-                        help='verify metadata against its signature')
-  check_opts.add_option('-p', '--root-part-size', metavar='NUM',
-                        default=0, type='int',
-                        help=('override rootfs partition size auto-inference'))
-  check_opts.add_option('-P', '--kern-part-size', metavar='NUM',
-                        default=0, type='int',
-                        help=('override kernel partition size auto-inference'))
-  parser.add_option_group(check_opts)
+  check_args = parser.add_argument_group('Checking payload integrity')
+  check_args.add_argument('-c', '--check', action='store_true', default=False,
+                          help=('force payload integrity check (e.g. before '
+                                'applying)'))
+  check_args.add_argument('-D', '--describe', action='store_true',
+                          default=False,
+                          help='Print a friendly description of the payload.')
+  check_args.add_argument('-r', '--report', metavar='FILE',
+                          help="dump payload report (`-' for stdout)")
+  check_args.add_argument('-t', '--type', dest='assert_type',
+                          help='assert the payload type',
+                          choices=[_TYPE_FULL, _TYPE_DELTA])
+  check_args.add_argument('-z', '--block-size', metavar='NUM', default=0,
+                          type=int,
+                          help='assert a non-default (4096) payload block size')
+  check_args.add_argument('-u', '--allow-unhashed', action='store_true',
+                          default=False, help='allow unhashed operations')
+  check_args.add_argument('-d', '--disabled_tests', default=(), metavar='',
+                          help=('space separated list of tests to disable. '
+                                'allowed options include: ' +
+                                ', '.join(update_payload.CHECKS_TO_DISABLE)),
+                          choices=update_payload.CHECKS_TO_DISABLE)
+  check_args.add_argument('-k', '--key', metavar='FILE',
+                          help=('override standard key used for signature '
+                                'validation'))
+  check_args.add_argument('-m', '--meta-sig', metavar='FILE',
+                          help='verify metadata against its signature')
+  check_args.add_argument('-p', '--root-part-size', metavar='NUM',
+                          default=0, type=int,
+                          help='override rootfs partition size auto-inference')
+  check_args.add_argument('-P', '--kern-part-size', metavar='NUM',
+                          default=0, type=int,
+                          help='override kernel partition size auto-inference')
 
-  trace_opts = optparse.OptionGroup(parser, 'Applying payload')
-  trace_opts.add_option('-x', '--extract-bsdiff', action='store_true',
-                        default=False,
-                        help=('use temp input/output files with BSDIFF '
-                              'operations (not in-place)'))
-  trace_opts.add_option('--bspatch-path', metavar='FILE',
-                        help=('use the specified bspatch binary'))
-  trace_opts.add_option('--puffpatch-path', metavar='FILE',
-                        help=('use the specified puffpatch binary'))
-  parser.add_option_group(trace_opts)
+  apply_args = parser.add_argument_group('Applying payload')
+  # TODO(ahassani): Extent extract-bsdiff to puffdiff too.
+  apply_args.add_argument('-x', '--extract-bsdiff', action='store_true',
+                          default=False,
+                          help=('use temp input/output files with BSDIFF '
+                                'operations (not in-place)'))
+  apply_args.add_argument('--bspatch-path', metavar='FILE',
+                          help='use the specified bspatch binary')
+  apply_args.add_argument('--puffpatch-path', metavar='FILE',
+                          help='use the specified puffpatch binary')
+  apply_args.add_argument('--dst_kern', metavar='FILE',
+                          help='destination kernel partition file')
+  apply_args.add_argument('--dst_root', metavar='FILE',
+                          help='destination root partition file')
+  apply_args.add_argument('--src_kern', metavar='FILE',
+                          help='source kernel partition file')
+  apply_args.add_argument('--src_root', metavar='FILE',
+                          help='source root partition file')
+  apply_args.add_argument('--out_dst_kern', metavar='FILE',
+                          help='created destination kernel partition file')
+  apply_args.add_argument('--out_dst_root', metavar='FILE',
+                          help='created destination root partition file')
 
-  trace_opts = optparse.OptionGroup(parser, 'Block tracing')
-  trace_opts.add_option('-b', '--root-block', metavar='BLOCK', type='int',
-                        help='trace the origin for a rootfs block')
-  trace_opts.add_option('-B', '--kern-block', metavar='BLOCK', type='int',
-                        help='trace the origin for a kernel block')
-  trace_opts.add_option('-s', '--skip', metavar='NUM', default='0', type='int',
-                        help='skip first NUM occurrences of traced block')
-  parser.add_option_group(trace_opts)
+  parser.add_argument('payload', metavar='PAYLOAD', help='the payload file')
 
   # Parse command-line arguments.
-  opts, args = parser.parse_args(argv)
-
-  # Validate a value given to --type, if any.
-  if opts.assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
-    parser.error('invalid argument to --type: %s' % opts.assert_type)
-
-  # Convert and validate --disabled_tests value list, if provided.
-  if opts.disabled_tests:
-    opts.disabled_tests = opts.disabled_tests.split(',')
-    for test in opts.disabled_tests:
-      if test not in update_payload.CHECKS_TO_DISABLE:
-        parser.error('invalid argument to --disabled_tests: %s' % test)
-
-  # Ensure consistent use of block tracing options.
-  do_block_trace = not (opts.root_block is None and opts.kern_block is None)
-  if opts.skip and not do_block_trace:
-    parser.error('--skip must be used with either --root-block or --kern-block')
+  args = parser.parse_args(argv)
 
   # There are several options that imply --check.
-  opts.check = (opts.check or opts.report or opts.assert_type or
-                opts.block_size or opts.allow_unhashed or
-                opts.disabled_tests or opts.meta_sig or opts.key or
-                opts.root_part_size or opts.kern_part_size)
+  args.check = (args.check or args.report or args.assert_type or
+                args.block_size or args.allow_unhashed or
+                args.disabled_tests or args.meta_sig or args.key or
+                args.root_part_size or args.kern_part_size)
 
-  # Check number of arguments, enforce payload type accordingly.
-  if len(args) == 3:
-    if opts.assert_type == _TYPE_DELTA:
-      parser.error('%s payload requires source partition arguments' %
-                   _TYPE_DELTA)
-    opts.assert_type = _TYPE_FULL
-  elif len(args) == 5:
-    if opts.assert_type == _TYPE_FULL:
-      parser.error('%s payload does not accept source partition arguments' %
-                   _TYPE_FULL)
-    opts.assert_type = _TYPE_DELTA
-  elif len(args) == 1:
-    # Not applying payload; if block tracing not requested either, do an
-    # integrity check.
-    if not do_block_trace:
-      opts.check = True
-    if opts.extract_bsdiff:
-      parser.error('--extract-bsdiff can only be used when applying payloads')
-    if opts.bspatch_path:
-      parser.error('--bspatch-path can only be used when applying payloads')
-    if opts.puffpatch_path:
-      parser.error('--puffpatch-path can only be used when applying payloads')
+  # Check the arguments, enforce payload type accordingly.
+  if (args.src_kern is None) != (args.src_root is None):
+    parser.error('--src_kern and --src_root should be given together')
+  if (args.dst_kern is None) != (args.dst_root is None):
+    parser.error('--dst_kern and --dst_root should be given together')
+  if (args.out_dst_kern is None) != (args.out_dst_root is None):
+    parser.error('--out_dst_kern and --out_dst_root should be given together')
+
+  if (args.dst_kern and args.dst_root) or \
+     (args.out_dst_kern and args.out_dst_root):
+    if args.src_kern and args.src_root:
+      if args.assert_type == _TYPE_FULL:
+        parser.error('%s payload does not accept source partition arguments'
+                     % _TYPE_FULL)
+      else:
+        args.assert_type = _TYPE_DELTA
+    else:
+      if args.assert_type == _TYPE_DELTA:
+        parser.error('%s payload requires source partitions arguments'
+                     % _TYPE_DELTA)
+      else:
+        args.assert_type = _TYPE_FULL
   else:
-    parser.error('unexpected number of arguments')
+    # Not applying payload.
+    if args.extract_bsdiff:
+      parser.error('--extract-bsdiff can only be used when applying payloads')
+    if args.bspatch_path:
+      parser.error('--bspatch-path can only be used when applying payloads')
+    if args.puffpatch_path:
+      parser.error('--puffpatch-path can only be used when applying payloads')
 
   # By default, look for a metadata-signature file with a name based on the name
   # of the payload we are checking. We only do it if check was triggered.
-  if opts.check and not opts.meta_sig:
-    default_meta_sig = args[0] + '.metadata-signature'
+  if args.check and not args.meta_sig:
+    default_meta_sig = args.payload + '.metadata-signature'
     if os.path.isfile(default_meta_sig):
-      opts.meta_sig = default_meta_sig
-      print('Using default metadata signature', opts.meta_sig, file=sys.stderr)
+      args.meta_sig = default_meta_sig
+      print('Using default metadata signature', args.meta_sig, file=sys.stderr)
 
-  return opts, args[0], args[1:]
+  return args
 
 
 def main(argv):
   # Parse and validate arguments.
-  options, payload_file_name, extra_args = ParseArguments(argv[1:])
+  args = ParseArguments(argv[1:])
 
-  with open(payload_file_name) as payload_file:
+  with open(args.payload) as payload_file:
     payload = update_payload.Payload(payload_file)
     try:
       # Initialize payload.
       payload.Init()
 
-      if options.describe:
+      if args.describe:
         payload.Describe()
 
       # Perform payload integrity checks.
-      if options.check:
+      if args.check:
         report_file = None
         do_close_report_file = False
         metadata_sig_file = None
         try:
-          if options.report:
-            if options.report == '-':
+          if args.report:
+            if args.report == '-':
               report_file = sys.stdout
             else:
-              report_file = open(options.report, 'w')
+              report_file = open(args.report, 'w')
               do_close_report_file = True
 
-          metadata_sig_file = options.meta_sig and open(options.meta_sig)
+          metadata_sig_file = args.meta_sig and open(args.meta_sig)
           payload.Check(
-              pubkey_file_name=options.key,
+              pubkey_file_name=args.key,
               metadata_sig_file=metadata_sig_file,
               report_out_file=report_file,
-              assert_type=options.assert_type,
-              block_size=int(options.block_size),
-              rootfs_part_size=options.root_part_size,
-              kernel_part_size=options.kern_part_size,
-              allow_unhashed=options.allow_unhashed,
-              disabled_tests=options.disabled_tests)
+              assert_type=args.assert_type,
+              block_size=int(args.block_size),
+              rootfs_part_size=args.root_part_size,
+              kernel_part_size=args.kern_part_size,
+              allow_unhashed=args.allow_unhashed,
+              disabled_tests=args.disabled_tests)
         finally:
           if metadata_sig_file:
             metadata_sig_file.close()
           if do_close_report_file:
             report_file.close()
 
-      # Trace blocks.
-      if options.root_block is not None:
-        payload.TraceBlock(options.root_block, options.skip, sys.stdout, False)
-      if options.kern_block is not None:
-        payload.TraceBlock(options.kern_block, options.skip, sys.stdout, True)
-
       # Apply payload.
-      if extra_args:
-        dargs = {'bsdiff_in_place': not options.extract_bsdiff}
-        if options.bspatch_path:
-          dargs['bspatch_path'] = options.bspatch_path
-        if options.puffpatch_path:
-          dargs['puffpatch_path'] = options.puffpatch_path
-        if options.assert_type == _TYPE_DELTA:
-          dargs['old_kernel_part'] = extra_args[2]
-          dargs['old_rootfs_part'] = extra_args[3]
+      if (args.dst_root and args.dst_kern) or \
+         (args.out_dst_root and args.out_dst_kern):
+        dargs = {'bsdiff_in_place': not args.extract_bsdiff}
+        if args.bspatch_path:
+          dargs['bspatch_path'] = args.bspatch_path
+        if args.puffpatch_path:
+          dargs['puffpatch_path'] = args.puffpatch_path
+        if args.assert_type == _TYPE_DELTA:
+          dargs['old_kernel_part'] = args.src_kern
+          dargs['old_rootfs_part'] = args.src_root
 
-        payload.Apply(extra_args[0], extra_args[1], **dargs)
+        if args.out_dst_kern and args.out_dst_root:
+          out_dst_kern = open(args.out_dst_kern, 'w+')
+          out_dst_root = open(args.out_dst_root, 'w+')
+        else:
+          out_dst_kern = tempfile.NamedTemporaryFile()
+          out_dst_root = tempfile.NamedTemporaryFile()
 
-    except update_payload.PayloadError, e:
+        payload.Apply(out_dst_kern.name, out_dst_root.name, **dargs)
+
+        # If destination kernel and rootfs partitions are not given, then this
+        # just becomes an apply operation with no check.
+        if args.dst_kern and args.dst_root:
+          # Prior to comparing, add the unused space past the filesystem
+          # boundary in the new target partitions to become the same size as
+          # the given partitions. This will truncate to larger size.
+          out_dst_kern.truncate(os.path.getsize(args.dst_kern))
+          out_dst_root.truncate(os.path.getsize(args.dst_root))
+
+          # Compare resulting partitions with the ones from the target image.
+          if not filecmp.cmp(out_dst_kern.name, args.dst_kern):
+            raise error.PayloadError('Resulting kernel partition corrupted.')
+          if not filecmp.cmp(out_dst_root.name, args.dst_root):
+            raise error.PayloadError('Resulting rootfs partition corrupted.')
+
+        # Close the output files. If args.out_dst_* was not given, then these
+        # files are created as temp files and will be deleted upon close().
+        out_dst_kern.close()
+        out_dst_root.close()
+
+    except error.PayloadError, e:
       sys.stderr.write('Error: %s\n' % e)
       return 1
 
diff --git a/scripts/test_paycheck.sh b/scripts/test_paycheck.sh
index c395db4..e578f85 100755
--- a/scripts/test_paycheck.sh
+++ b/scripts/test_paycheck.sh
@@ -1,8 +1,19 @@
 #!/bin/bash
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 # A test script for paycheck.py and the update_payload.py library.
 #
@@ -21,9 +32,6 @@
 #   payload type. Another artifact is a human-readable payload report, which
 #   is output to stdout to be inspected by the user.
 #
-# - It performs a random block trace on the delta payload (both kernel and
-#   rootfs blocks), dumping the traces to stdout for the user to inspect.
-#
 # - It applies old_full_payload to yield old kernel (old_kern.part) and rootfs
 #   (old_root.part) partitions.
 #
@@ -37,11 +45,9 @@
 #   ensure that they are binary identical.
 #
 # If all steps have completed successfully we know with high certainty that
-# paycheck.py (and hence update_payload.py) correctly parses both full and
-# delta payloads, and applies them to yield the expected result. We also know
-# that tracing works, to the extent it does not crash. Manual inspection of
-# payload reports and block traces will improve this our confidence and are
-# strongly encouraged. Finally, each paycheck.py execution is timed.
+# paycheck.py (and hence update_payload.py) correctly parses both full and delta
+# payloads, and applies them to yield the expected result. Finally, each
+# paycheck.py execution is timed.
 
 
 # Stop on errors, unset variables.
@@ -80,35 +86,28 @@
   time ${paycheck} -t ${payload_type} ${payload_file}
 }
 
-trace_kern_block() {
-  payload_file=$1
-  block=$2
-  time ${paycheck} -B ${block} ${payload_file}
-}
-
-trace_root_block() {
-  payload_file=$1
-  block=$2
-  time ${paycheck} -b ${block} ${payload_file}
-}
-
 apply_full_payload() {
   payload_file=$1
-  dst_kern_part="$2/$3"
-  dst_root_part="$2/$4"
+  out_dst_kern_part="$2/$3"
+  out_dst_root_part="$2/$4"
 
-  time ${paycheck} ${payload_file} ${dst_kern_part} ${dst_root_part}
+  time ${paycheck} ${payload_file} \
+    --out_dst_kern ${out_dst_kern_part} --out_dst_root ${out_dst_root_part}
 }
 
 apply_delta_payload() {
   payload_file=$1
-  dst_kern_part="$2/$3"
-  dst_root_part="$2/$4"
-  src_kern_part="$2/$5"
-  src_root_part="$2/$6"
+  out_dst_kern_part="$2/$3"
+  out_dst_root_part="$2/$4"
+  dst_kern_part="$2/$5"
+  dst_root_part="$2/$6"
+  src_kern_part="$2/$7"
+  src_root_part="$2/$8"
 
-  time ${paycheck} ${payload_file} ${dst_kern_part} ${dst_root_part} \
-    ${src_kern_part} ${src_root_part}
+  time ${paycheck} ${payload_file} \
+    --out_dst_kern ${out_dst_kern_part} --out_dst_root ${out_dst_root_part} \
+    --dst_kern ${dst_kern_part} --dst_root ${dst_root_part} \
+    --src_kern ${src_kern_part} --src_root ${src_root_part}
 }
 
 main() {
@@ -135,15 +134,6 @@
   check_payload "${delta_payload}" delta
   log "Done"
 
-  # Trace a random block between 0-1024 on all payloads.
-  block=$((RANDOM * 1024 / 32767))
-  log "Tracing a random block (${block}) in full/delta payloads..."
-  trace_kern_block "${new_full_payload}" ${block}
-  trace_root_block "${new_full_payload}" ${block}
-  trace_kern_block "${delta_payload}" ${block}
-  trace_root_block "${delta_payload}" ${block}
-  log "Done"
-
   # Apply full/delta payloads and verify results are identical.
   tmpdir="$(mktemp -d --tmpdir test_paycheck.XXXXXXXX)"
   log "Initiating application of payloads at $tmpdir"
@@ -153,16 +143,17 @@
     "${OLD_ROOT_PART}"
   log "Done"
 
-  log "Applying delta payload to old partitions..."
-  apply_delta_payload "${delta_payload}" "${tmpdir}" "${NEW_DELTA_KERN_PART}" \
-    "${NEW_DELTA_ROOT_PART}" "${OLD_KERN_PART}" "${OLD_ROOT_PART}"
-  log "Done"
-
   log "Applying new full payload..."
   apply_full_payload "${new_full_payload}" "${tmpdir}" "${NEW_FULL_KERN_PART}" \
     "${NEW_FULL_ROOT_PART}"
   log "Done"
 
+  log "Applying delta payload to old partitions..."
+  apply_delta_payload "${delta_payload}" "${tmpdir}" "${NEW_DELTA_KERN_PART}" \
+    "${NEW_DELTA_ROOT_PART}" "${NEW_FULL_KERN_PART}" \
+    "${NEW_FULL_ROOT_PART}" "${OLD_KERN_PART}" "${OLD_ROOT_PART}"
+  log "Done"
+
   log "Comparing results of delta and new full updates..."
   diff "${tmpdir}/${NEW_FULL_KERN_PART}" "${tmpdir}/${NEW_DELTA_KERN_PART}"
   diff "${tmpdir}/${NEW_FULL_ROOT_PART}" "${tmpdir}/${NEW_DELTA_ROOT_PART}"
diff --git a/scripts/update_payload/__init__.py b/scripts/update_payload/__init__.py
index e4a5588..8ee95e2 100644
--- a/scripts/update_payload/__init__.py
+++ b/scripts/update_payload/__init__.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Library for processing, verifying and applying Chrome OS update payloads."""
 
diff --git a/scripts/update_payload/applier.py b/scripts/update_payload/applier.py
index e470ac4..9582b3d 100644
--- a/scripts/update_payload/applier.py
+++ b/scripts/update_payload/applier.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Applying a Chrome OS update payload.
 
@@ -18,6 +30,20 @@
 import bz2
 import hashlib
 import itertools
+# Not everywhere we can have the lzma library so we ignore it if we didn't have
+# it because it is not going to be used. For example, 'cros flash' uses
+# devserver code which eventually loads this file, but the lzma library is not
+# included in the client test devices, and it is not necessary to do so. But
+# lzma is not used in 'cros flash' so it should be fine. Python 3.x include
+# lzma, but for backward compatibility with Python 2.7, backports-lzma is
+# needed.
+try:
+  import lzma
+except ImportError:
+  try:
+    from backports import lzma
+  except ImportError:
+    pass
 import os
 import shutil
 import subprocess
@@ -216,7 +242,7 @@
     self.truncate_to_expected_size = truncate_to_expected_size
 
   def _ApplyReplaceOperation(self, op, op_name, out_data, part_file, part_size):
-    """Applies a REPLACE{,_BZ} operation.
+    """Applies a REPLACE{,_BZ,_XZ} operation.
 
     Args:
       op: the operation object
@@ -235,6 +261,10 @@
     if op.type == common.OpType.REPLACE_BZ:
       out_data = bz2.decompress(out_data)
       data_length = len(out_data)
+    elif op.type == common.OpType.REPLACE_XZ:
+      # pylint: disable=no-member
+      out_data = lzma.decompress(out_data)
+      data_length = len(out_data)
 
     # Write data to blocks specified in dst extents.
     data_start = 0
@@ -508,7 +538,8 @@
       # Read data blob.
       data = self.payload.ReadDataBlob(op.data_offset, op.data_length)
 
-      if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ):
+      if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,
+                     common.OpType.REPLACE_XZ):
         self._ApplyReplaceOperation(op, op_name, data, new_part_file, part_size)
       elif op.type == common.OpType.MOVE:
         self._ApplyMoveOperation(op, op_name, new_part_file)
@@ -557,6 +588,7 @@
         shutil.copyfile(old_part_file_name, new_part_file_name)
       elif (self.minor_version == common.SOURCE_MINOR_PAYLOAD_VERSION or
             self.minor_version == common.OPSRCHASH_MINOR_PAYLOAD_VERSION or
+            self.minor_version == common.BROTLI_BSDIFF_MINOR_PAYLOAD_VERSION or
             self.minor_version == common.PUFFDIFF_MINOR_PAYLOAD_VERSION):
         # In minor version >= 2, we don't want to copy the partitions, so
         # instead just make the new partition file.
diff --git a/scripts/update_payload/block_tracer.py b/scripts/update_payload/block_tracer.py
deleted file mode 100644
index 5caf7e3..0000000
--- a/scripts/update_payload/block_tracer.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tracing block data source through a Chrome OS update payload.
-
-This module is used internally by the main Payload class for tracing block
-content through an update payload. This is a useful feature in debugging
-payload applying functionality in this package. The interface for invoking the
-tracer is as follows:
-
-  tracer = PayloadBlockTracer(payload)
-  tracer.Run(...)
-
-"""
-
-from __future__ import print_function
-
-from update_payload import common
-
-
-#
-# Payload block tracing.
-#
-class PayloadBlockTracer(object):
-  """Tracing the origin of block data through update instructions.
-
-  This is a short-lived object whose purpose is to isolate the logic used for
-  tracing the origin of destination partition blocks.
-
-  """
-
-  def __init__(self, payload):
-    assert payload.is_init, 'uninitialized update payload'
-    self.payload = payload
-
-  @staticmethod
-  def _TraceBlock(block, skip, trace_out_file, operations, base_name):
-    """Trace the origin of a given block through a sequence of operations.
-
-    This method tries to map the given dest block to the corresponding source
-    block from which its content originates in the course of an update. It
-    further tries to trace transitive origins through MOVE operations. It is
-    rather efficient, doing the actual tracing by means of a single reverse
-    sweep through the operation sequence. It dumps a log of operations and
-    source blocks responsible for the data in the given dest block to the
-    provided output file.
-
-    Args:
-      block: the block number to trace
-      skip: number of initial transitive origins to ignore
-      trace_out_file: a file object to dump the trace to
-      operations: the sequence of operations
-      base_name: name of the operation sequence
-    """
-    # Traverse operations backwards.
-    for op, op_name in common.OperationIter(operations, base_name,
-                                            reverse=True):
-      total_block_offset = 0
-      found = False
-
-      # Is the traced block mentioned in the dest extents?
-      for dst_ex, dst_ex_name in common.ExtentIter(op.dst_extents,
-                                                   op_name + '.dst_extents'):
-        if (block >= dst_ex.start_block
-            and block < dst_ex.start_block + dst_ex.num_blocks):
-          if skip:
-            skip -= 1
-          else:
-            total_block_offset += block - dst_ex.start_block
-            trace_out_file.write(
-                '%d: %s: found %s (total block offset: %d)\n' %
-                (block, dst_ex_name, common.FormatExtent(dst_ex),
-                 total_block_offset))
-            found = True
-            break
-
-        total_block_offset += dst_ex.num_blocks
-
-      if found:
-        # Don't trace further, unless it's a MOVE.
-        if op.type != common.OpType.MOVE:
-          break
-
-        # For MOVE, find corresponding source block and keep tracing.
-        for src_ex, src_ex_name in common.ExtentIter(op.src_extents,
-                                                     op_name + '.src_extents'):
-          if total_block_offset < src_ex.num_blocks:
-            block = src_ex.start_block + total_block_offset
-            trace_out_file.write(
-                '%s:  mapped to %s (%d)\n' %
-                (src_ex_name, common.FormatExtent(src_ex), block))
-            break
-
-          total_block_offset -= src_ex.num_blocks
-
-  def Run(self, block, skip, trace_out_file, is_kernel):
-    """Block tracer entry point, invoking the actual search.
-
-    Args:
-      block: the block number whose origin to trace
-      skip: the number of first origin mappings to skip
-      trace_out_file: file object to dump the trace to
-      is_kernel: trace through kernel (True) or rootfs (False) operations
-    """
-    if is_kernel:
-      operations = self.payload.manifest.kernel_install_operations
-      base_name = 'kernel_install_operations'
-    else:
-      operations = self.payload.manifest.install_operations
-      base_name = 'install_operations'
-
-    self._TraceBlock(block, skip, trace_out_file, operations, base_name)
diff --git a/scripts/update_payload/checker.py b/scripts/update_payload/checker.py
index e4cb845..e241b0b 100644
--- a/scripts/update_payload/checker.py
+++ b/scripts/update_payload/checker.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Verifying the integrity of a Chrome OS update payload.
 
@@ -57,6 +69,7 @@
     2: (_TYPE_DELTA,),
     3: (_TYPE_DELTA,),
     4: (_TYPE_DELTA,),
+    5: (_TYPE_DELTA,),
 }
 
 _OLD_DELTA_USABLE_PART_SIZE = 2 * 1024 * 1024 * 1024
@@ -322,6 +335,10 @@
     self.new_rootfs_fs_size = 0
     self.new_kernel_fs_size = 0
     self.minor_version = None
+    # TODO(*): When fixing crbug.com/794404, the major version should be
+    # correclty handled in update_payload scripts. So stop forcing
+    # major_verions=1 here and set it to the correct value.
+    self.major_version = 1
 
   @staticmethod
   def _CheckElem(msg, name, report, is_mandatory, is_submsg, convert=str,
@@ -701,7 +718,7 @@
     return total_num_blocks
 
   def _CheckReplaceOperation(self, op, data_length, total_dst_blocks, op_name):
-    """Specific checks for REPLACE/REPLACE_BZ operations.
+    """Specific checks for REPLACE/REPLACE_BZ/REPLACE_XZ operations.
 
     Args:
       op: The operation object from the manifest.
@@ -996,6 +1013,9 @@
     # Type-specific checks.
     if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ):
       self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name)
+    elif op.type == common.OpType.REPLACE_XZ and (self.minor_version >= 3 or
+                                                  self.major_version >= 2):
+      self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name)
     elif op.type == common.OpType.MOVE and self.minor_version == 1:
       self._CheckMoveOperation(op, data_offset, total_src_blocks,
                                total_dst_blocks, op_name)
@@ -1010,8 +1030,10 @@
     elif op.type == common.OpType.SOURCE_BSDIFF and self.minor_version >= 2:
       self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name)
       self._CheckAnySourceOperation(op, total_src_blocks, op_name)
-    elif (op.type in (common.OpType.PUFFDIFF, common.OpType.BROTLI_BSDIFF) and
-          self.minor_version >= 4):
+    elif op.type == common.OpType.BROTLI_BSDIFF and self.minor_version >= 4:
+      self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name)
+      self._CheckAnySourceOperation(op, total_src_blocks, op_name)
+    elif op.type == common.OpType.PUFFDIFF and self.minor_version >= 5:
       self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name)
       self._CheckAnySourceOperation(op, total_src_blocks, op_name)
     else:
@@ -1068,6 +1090,7 @@
     op_counts = {
         common.OpType.REPLACE: 0,
         common.OpType.REPLACE_BZ: 0,
+        common.OpType.REPLACE_XZ: 0,
         common.OpType.MOVE: 0,
         common.OpType.ZERO: 0,
         common.OpType.BSDIFF: 0,
@@ -1080,6 +1103,7 @@
     op_blob_totals = {
         common.OpType.REPLACE: 0,
         common.OpType.REPLACE_BZ: 0,
+        common.OpType.REPLACE_XZ: 0,
         # MOVE operations don't have blobs.
         common.OpType.BSDIFF: 0,
         # SOURCE_COPY operations don't have blobs.
diff --git a/scripts/update_payload/checker_unittest.py b/scripts/update_payload/checker_unittest.py
index 974519d..f718234 100755
--- a/scripts/update_payload/checker_unittest.py
+++ b/scripts/update_payload/checker_unittest.py
@@ -1,8 +1,19 @@
 #!/usr/bin/python2
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Unit testing checker.py."""
 
@@ -620,6 +631,41 @@
         PayloadError, payload_checker._CheckReplaceOperation,
         op, data_length, (data_length + block_size - 1) / block_size, 'foo')
 
+  def testCheckReplaceXzOperation(self):
+    """Tests _CheckReplaceOperation() where op.type == REPLACE_XZ."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+    data_length = block_size * 3
+
+    op = self.mox.CreateMock(
+        update_metadata_pb2.InstallOperation)
+    op.type = common.OpType.REPLACE_XZ
+
+    # Pass.
+    op.src_extents = []
+    self.assertIsNone(
+        payload_checker._CheckReplaceOperation(
+            op, data_length, (data_length + block_size - 1) / block_size + 5,
+            'foo'))
+
+    # Fail, src extents founds.
+    op.src_extents = ['bar']
+    self.assertRaises(
+        PayloadError, payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size + 5, 'foo')
+
+    # Fail, missing data.
+    op.src_extents = []
+    self.assertRaises(
+        PayloadError, payload_checker._CheckReplaceOperation,
+        op, None, (data_length + block_size - 1) / block_size, 'foo')
+
+    # Fail, too few blocks to justify XZ.
+    op.src_extents = []
+    self.assertRaises(
+        PayloadError, payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size, 'foo')
+
   def testCheckMoveOperation_Pass(self):
     """Tests _CheckMoveOperation(); pass case."""
     payload_checker = checker.PayloadChecker(self.MockPayload())
@@ -792,8 +838,8 @@
     """Parametric testing of _CheckOperation().
 
     Args:
-      op_type_name: 'REPLACE', 'REPLACE_BZ', 'MOVE', 'BSDIFF', 'SOURCE_COPY',
-        'SOURCE_BSDIFF', BROTLI_BSDIFF or 'PUFFDIFF'.
+      op_type_name: 'REPLACE', 'REPLACE_BZ', 'REPLACE_XZ', 'MOVE', 'BSDIFF',
+        'SOURCE_COPY', 'SOURCE_BSDIFF', BROTLI_BSDIFF or 'PUFFDIFF'.
       is_last: Whether we're testing the last operation in a sequence.
       allow_signature: Whether we're testing a signature-capable operation.
       allow_unhashed: Whether we're allowing to not hash the data.
@@ -848,9 +894,13 @@
       payload_checker.minor_version = 2 if fail_bad_minor_version else 1
     elif op_type in (common.OpType.SOURCE_COPY, common.OpType.SOURCE_BSDIFF):
       payload_checker.minor_version = 1 if fail_bad_minor_version else 2
+    if op_type == common.OpType.REPLACE_XZ:
+      payload_checker.minor_version = 2 if fail_bad_minor_version else 3
     elif op_type in (common.OpType.ZERO, common.OpType.DISCARD,
-                     common.OpType.PUFFDIFF, common.OpType.BROTLI_BSDIFF):
+                     common.OpType.BROTLI_BSDIFF):
       payload_checker.minor_version = 3 if fail_bad_minor_version else 4
+    elif op_type == common.OpType.PUFFDIFF:
+      payload_checker.minor_version = 4 if fail_bad_minor_version else 5
 
     if op_type not in (common.OpType.MOVE, common.OpType.SOURCE_COPY):
       if not fail_mismatched_data_offset_length:
@@ -1065,7 +1115,8 @@
         (minor_version == 1 and payload_type == checker._TYPE_DELTA) or
         (minor_version == 2 and payload_type == checker._TYPE_DELTA) or
         (minor_version == 3 and payload_type == checker._TYPE_DELTA) or
-        (minor_version == 4 and payload_type == checker._TYPE_DELTA))
+        (minor_version == 4 and payload_type == checker._TYPE_DELTA) or
+        (minor_version == 5 and payload_type == checker._TYPE_DELTA))
     args = (report,)
 
     if should_succeed:
@@ -1167,10 +1218,13 @@
   """Returns True iff the combination of arguments represents a valid test."""
   op_type = _OpTypeByName(op_type_name)
 
-  # REPLACE/REPLACE_BZ operations don't read data from src partition. They are
-  # compatible with all valid minor versions, so we don't need to check that.
-  if (op_type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ) and (
-      fail_src_extents or fail_src_length or fail_bad_minor_version)):
+  # REPLACE/REPLACE_BZ/REPLACE_XZ operations don't read data from src
+  # partition. They are compatible with all valid minor versions, so we don't
+  # need to check that.
+  if (op_type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,
+                  common.OpType.REPLACE_XZ) and (fail_src_extents or
+                                                 fail_src_length or
+                                                 fail_bad_minor_version)):
     return False
 
   # MOVE and SOURCE_COPY operations don't carry data.
@@ -1256,8 +1310,8 @@
 
   # Add all _CheckOperation() test cases.
   AddParametricTests('CheckOperation',
-                     {'op_type_name': ('REPLACE', 'REPLACE_BZ', 'MOVE',
-                                       'BSDIFF', 'SOURCE_COPY',
+                     {'op_type_name': ('REPLACE', 'REPLACE_BZ', 'REPLACE_XZ',
+                                       'MOVE', 'BSDIFF', 'SOURCE_COPY',
                                        'SOURCE_BSDIFF', 'PUFFDIFF',
                                        'BROTLI_BSDIFF'),
                       'is_last': (True, False),
@@ -1289,7 +1343,7 @@
 
   # Add all _CheckManifestMinorVersion() test cases.
   AddParametricTests('CheckManifestMinorVersion',
-                     {'minor_version': (None, 0, 1, 2, 3, 4, 555),
+                     {'minor_version': (None, 0, 1, 2, 3, 4, 5, 555),
                       'payload_type': (checker._TYPE_FULL,
                                        checker._TYPE_DELTA)})
 
diff --git a/scripts/update_payload/common.py b/scripts/update_payload/common.py
index 231c504..4e7b2e3 100644
--- a/scripts/update_payload/common.py
+++ b/scripts/update_payload/common.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Utilities for update payload processing."""
 
@@ -27,7 +39,8 @@
 INPLACE_MINOR_PAYLOAD_VERSION = 1
 SOURCE_MINOR_PAYLOAD_VERSION = 2
 OPSRCHASH_MINOR_PAYLOAD_VERSION = 3
-PUFFDIFF_MINOR_PAYLOAD_VERSION = 4
+BROTLI_BSDIFF_MINOR_PAYLOAD_VERSION = 4
+PUFFDIFF_MINOR_PAYLOAD_VERSION = 5
 
 #
 # Payload operation types.
diff --git a/scripts/update_payload/error.py b/scripts/update_payload/error.py
index 8b9cadd..6f95433 100644
--- a/scripts/update_payload/error.py
+++ b/scripts/update_payload/error.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Payload handling errors."""
 
diff --git a/scripts/update_payload/format_utils.py b/scripts/update_payload/format_utils.py
index 2c3775c..6248ba9 100644
--- a/scripts/update_payload/format_utils.py
+++ b/scripts/update_payload/format_utils.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Various formatting functions."""
 
diff --git a/scripts/update_payload/format_utils_unittest.py b/scripts/update_payload/format_utils_unittest.py
index 7153f9e..42ea621 100755
--- a/scripts/update_payload/format_utils_unittest.py
+++ b/scripts/update_payload/format_utils_unittest.py
@@ -1,8 +1,19 @@
 #!/usr/bin/python2
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Unit tests for format_utils.py."""
 
diff --git a/scripts/update_payload/histogram.py b/scripts/update_payload/histogram.py
index f72db61..1ac2ab5 100644
--- a/scripts/update_payload/histogram.py
+++ b/scripts/update_payload/histogram.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Histogram generation tools."""
 
diff --git a/scripts/update_payload/histogram_unittest.py b/scripts/update_payload/histogram_unittest.py
index 643bb32..e757dd0 100755
--- a/scripts/update_payload/histogram_unittest.py
+++ b/scripts/update_payload/histogram_unittest.py
@@ -1,8 +1,19 @@
 #!/usr/bin/python2
 #
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Unit tests for histogram.py."""
 
diff --git a/scripts/update_payload/payload.py b/scripts/update_payload/payload.py
index 8d9a20e..380d6d0 100644
--- a/scripts/update_payload/payload.py
+++ b/scripts/update_payload/payload.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Tools for reading, verifying and applying Chrome OS update payloads."""
 
@@ -10,7 +22,6 @@
 import struct
 
 from update_payload import applier
-from update_payload import block_tracer
 from update_payload import checker
 from update_payload import common
 from update_payload import update_metadata_pb2
@@ -323,23 +334,3 @@
     helper.Run(new_kernel_part, new_rootfs_part,
                old_kernel_part=old_kernel_part,
                old_rootfs_part=old_rootfs_part)
-
-  def TraceBlock(self, block, skip, trace_out_file, is_kernel):
-    """Traces the origin(s) of a given dest partition block.
-
-    The tracing tries to find origins transitively, when possible (it currently
-    only works for move operations, where the mapping of src/dst is
-    one-to-one). It will dump a list of operations and source blocks
-    responsible for the data in the given dest block.
-
-    Args:
-      block: the block number whose origin to trace
-      skip: the number of first origin mappings to skip
-      trace_out_file: file object to dump the trace to
-      is_kernel: trace through kernel (True) or rootfs (False) operations
-    """
-    self._AssertInit()
-
-    # Create a short-lived payload block tracer object and run it.
-    helper = block_tracer.PayloadBlockTracer(self)
-    helper.Run(block, skip, trace_out_file, is_kernel)
diff --git a/scripts/update_payload/test_utils.py b/scripts/update_payload/test_utils.py
index 38712fb..1e2259d 100644
--- a/scripts/update_payload/test_utils.py
+++ b/scripts/update_payload/test_utils.py
@@ -1,6 +1,18 @@
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 
 """Utilities for unit testing."""
 
@@ -276,7 +288,7 @@
 
     Args:
       is_kernel: whether this is a kernel (True) or rootfs (False) operation
-      op_type: one of REPLACE, REPLACE_BZ, MOVE or BSDIFF
+      op_type: one of REPLACE, REPLACE_BZ, REPLACE_XZ, MOVE or BSDIFF
       src_extents: list of (start, length) pairs indicating src block ranges
       src_length: size of the src data in bytes (needed for BSDIFF)
       dst_extents: list of (start, length) pairs indicating dst block ranges
diff --git a/test_http_server.cc b/test_http_server.cc
index 93aa11c..cf15672 100644
--- a/test_http_server.cc
+++ b/test_http_server.cc
@@ -496,7 +496,7 @@
     CHECK_EQ(terms.size(), num_terms);
   }
 
-  inline string Get(const off_t index) const {
+  inline const string& Get(const off_t index) const {
     return terms[index];
   }
   inline const char *GetCStr(const off_t index) const {
diff --git a/update_engine.conf b/update_engine.conf
index e3f246f..3358411 100644
--- a/update_engine.conf
+++ b/update_engine.conf
@@ -1,2 +1,2 @@
 PAYLOAD_MAJOR_VERSION=2
-PAYLOAD_MINOR_VERSION=4
+PAYLOAD_MINOR_VERSION=5
diff --git a/update_engine.gyp b/update_engine.gyp
index 1ff4d7f..ba46266 100644
--- a/update_engine.gyp
+++ b/update_engine.gyp
@@ -124,7 +124,7 @@
       'dependencies': [
         'update_metadata-protos',
       ],
-      #TODO(deymo): Remove unused dependencies once we stop including files
+      # TODO(deymo): Remove unused dependencies once we stop including files
       # from the root directory.
       'variables': {
         'exported_deps': [
@@ -364,6 +364,7 @@
         'exported_deps': [
           'ext2fs',
           'libpuffdiff',
+          'liblzma',
         ],
         'deps': ['<@(exported_deps)'],
       },
diff --git a/update_metadata.proto b/update_metadata.proto
index b5d6c59..99b7422 100644
--- a/update_metadata.proto
+++ b/update_metadata.proto
@@ -169,8 +169,10 @@
     // On minor version 4 or newer, these operations are supported:
     ZERO = 6;  // Write zeros in the destination.
     DISCARD = 7;  // Discard the destination blocks, reading as undefined.
-    PUFFDIFF = 9;  // The data is in puffdiff format.
     BROTLI_BSDIFF = 10;  // Like SOURCE_BSDIFF, but compressed with brotli.
+
+    // On minor version 5 or newer, these operations are supported:
+    PUFFDIFF = 9;  // The data is in puffdiff format.
   }
   required Type type = 1;
   // The offset into the delta file (after the protobuf)