Bug 1263073 - partner repacks should not block other uploads r=nthomas a=release DONTBUILD draft
authorRail Aliiev <rail@mozilla.com>
Mon, 11 Apr 2016 21:37:59 -0400
changeset 349668 90cfc160c1c4559051305db35b3de904d5ee9f64
parent 349667 9c35ce0d49ada38c197f0ffaca026b3f66382c95
child 518161 6d485bac28ad8ad7882236a0e33cdaa2d50b9abf
push id15152
push userbmo:rail@mozilla.com
push dateTue, 12 Apr 2016 01:41:14 +0000
reviewersnthomas, release
bugs1263073
milestone48.0a1
Bug 1263073 - partner repacks should not block other uploads r=nthomas a=release DONTBUILD MozReview-Commit-ID: 69DSyJrB6hI
testing/mozharness/scripts/release/push-candidate-to-releases.py
--- a/testing/mozharness/scripts/release/push-candidate-to-releases.py
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -144,28 +144,38 @@ class ReleasePusher(BaseScript, Virtuale
                             aws_secret_access_key=self.aws_secret_key)
         self.info("Getting bucket {}".format(self.config["bucket_name"]))
         bucket = conn.get_bucket(self.config["bucket_name"])
 
         # ensure the destination is empty
         self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
         keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
         if keys:
-            self.fatal("Destination already exists with %s keys, aborting" %
-                       len(keys))
+            self.warning("Destination already exists with %s keys" % len(keys))
 
         def worker(item):
             source, destination = item
 
-            self.info("Copying {} to {}".format(source, destination))
-            return retry(bucket.copy_key,
-                         args=(destination,
-                               self.config["bucket_name"],
-                               source),
-                         sleeptime=5, max_sleeptime=60,
+            def copy_key():
+                dest_key = bucket.get_key(destination)
+                source_key = bucket.get_key(source)
+                if not dest_key:
+                    self.info("Copying {} to {}".format(source, destination))
+                    bucket.copy_key(destination, self.config["bucket_name"],
+                                    source)
+                elif source_key.etag == dest_key.etag:
+                    self.warning(
+                        "{} already exists with the same content ({}), skipping copy".format(
+                            destination, dest_key.etag))
+                else:
+                    self.fatal(
+                        "{} already exists with the different content (src: {}, dest: {}), aborting".format(
+                            destination, source_key.etag, dest_key.etag))
+
+            return retry(copy_key, sleeptime=5, max_sleeptime=60,
                          retry_exceptions=(S3CopyError, S3ResponseError))
 
         def find_release_files():
             candidates_prefix = self._get_candidates_prefix()
             release_prefix = self._get_releases_prefix()
             self.info("Getting key names from candidates")
             for key in bucket.list(prefix=candidates_prefix):
                 keyname = key.name