in taskcat/_s3_sync.py [0:0]
def _s3_upload_file(self, paths, prefix, s3_client, acl):
local_filename, bucket, s3_path = paths
retry = 0
# backoff and retry
while retry < 5:
if self.dry_run:
LOG.info(
f"[DRY_RUN] s3://{bucket}/{prefix + s3_path}",
extra={"nametag": PrintMsg.S3},
)
break
LOG.info(
f"s3://{bucket}/{prefix + s3_path}", extra={"nametag": PrintMsg.S3}
)
try:
s3_client.upload_file(
local_filename,
bucket,
prefix + s3_path,
ExtraArgs={"ACL": acl},
Config=TransferConfig(use_threads=False),
)
break
except Exception as e: # pylint: disable=broad-except
retry += 1
LOG.error("S3 upload error: %s" % e)
# give up if we've exhausted retries, or if the error is not-retryable
# ie. AccessDenied
if retry == 5 or (
isinstance(e, S3UploadFailedError) and "(AccessDenied)" in str(e)
):
# pylint: disable=raise-missing-from
raise TaskCatException("Failed to upload to S3")
time.sleep(retry * 2)