Bug 1263073 - partner repacks should not block other uploads r=nthomas a=release DONTBUILD
MozReview-Commit-ID: 69DSyJrB6hI
--- a/testing/mozharness/scripts/release/push-candidate-to-releases.py
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -144,28 +144,38 @@ class ReleasePusher(BaseScript, Virtuale
aws_secret_access_key=self.aws_secret_key)
self.info("Getting bucket {}".format(self.config["bucket_name"]))
bucket = conn.get_bucket(self.config["bucket_name"])
# ensure the destination is empty
self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
if keys:
- self.fatal("Destination already exists with %s keys, aborting" %
- len(keys))
+ self.warning("Destination already exists with %s keys" % len(keys))
def worker(item):
source, destination = item
- self.info("Copying {} to {}".format(source, destination))
- return retry(bucket.copy_key,
- args=(destination,
- self.config["bucket_name"],
- source),
- sleeptime=5, max_sleeptime=60,
+ def copy_key():
+ dest_key = bucket.get_key(destination)
+ source_key = bucket.get_key(source)
+ if not dest_key:
+ self.info("Copying {} to {}".format(source, destination))
+ bucket.copy_key(destination, self.config["bucket_name"],
+ source)
+ elif source_key.etag == dest_key.etag:
+ self.warning(
+ "{} already exists with the same content ({}), skipping copy".format(
+ destination, dest_key.etag))
+ else:
+ self.fatal(
+ "{} already exists with the different content (src: {}, dest: {}), aborting".format(
+ destination, source_key.etag, dest_key.etag))
+
+ return retry(copy_key, sleeptime=5, max_sleeptime=60,
retry_exceptions=(S3CopyError, S3ResponseError))
def find_release_files():
candidates_prefix = self._get_candidates_prefix()
release_prefix = self._get_releases_prefix()
self.info("Getting key names from candidates")
for key in bucket.list(prefix=candidates_prefix):
keyname = key.name