Fix errors from validate_target_files

We should skip lines that don't contain '=', e.g. empty lines.
Also, pop a warning instead of an error if a prop is defined
multiple times with the same value

Bug: 177240467
Test: unittest, run validate_target_files
Change-Id: Ifc9eadb91e5dda7170a19d875016e5a47e8fc592
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index d2178b2..401857f 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -236,6 +236,7 @@
 
   logging.info('Done checking %s', script_path)
 
+
 # Symlink files in `src` to `dst`, if the files do not
 # already exists in `dst` directory.
 def symlinkIfNotExists(src, dst):
@@ -246,6 +247,7 @@
       continue
     os.symlink(os.path.join(src, filename), os.path.join(dst, filename))
 
+
 def ValidateVerifiedBootImages(input_tmp, info_dict, options):
   """Validates the Verified Boot related images.
 
@@ -423,16 +425,25 @@
           'Verified %s with avbtool (key: %s):\n%s', image, key,
           stdoutdata.rstrip())
 
-def CheckDataDuplicity(lines):
+
+def CheckDataInconsistency(lines):
     build_prop = {}
     for line in lines:
       if line.startswith("import") or line.startswith("#"):
         continue
-      key, value = line.split("=", 1)
+      if "=" not in line:
+        continue
+
+      key, value = line.rstrip().split("=", 1)
       if key in build_prop:
-        return key
+        logging.info("Duplicated key found for {}".format(key))
+        if value != build_prop[key]:
+          logging.error("Key {} is defined twice with different values {} vs {}"
+                        .format(key, value, build_prop[key]))
+          return key
       build_prop[key] = value
 
+
 def CheckBuildPropDuplicity(input_tmp):
   """Check all buld.prop files inside directory input_tmp, raise error
   if they contain duplicates"""
@@ -448,9 +459,11 @@
         continue
       logging.info("Checking {}".format(path))
       with open(path, 'r') as fp:
-        dupKey = CheckDataDuplicity(fp.readlines())
+        dupKey = CheckDataInconsistency(fp.readlines())
         if dupKey:
-          raise ValueError("{} contains duplicate keys for {}", path, dupKey)
+          raise ValueError("{} contains duplicate keys for {}".format(
+              path, dupKey))
+
 
 def main():
   parser = argparse.ArgumentParser(