in scripts/provisioning/command_line.py [0:0]
def _create_sts_jobs_for_bucket(project_id, start_date, source_bucket,
sink_bucket, job_type):
storagetransfer = googleapiclient.discovery.build('storagetransfer', 'v1', cache_discovery=False)
sts_jobs = []
number_of_jobs = 25
i = 0
while i < number_of_jobs:
description = 'Pooled STS Job ' + str(i) + ' for bucket ' + source_bucket
if i == 24:
# create the default job - number 25
job_type = 'default'
start_time_string = '{:02d}:59:59'.format(23)
else:
start_time_string = '{:02d}:00:00'.format(i)
start_time = datetime.datetime.strptime(start_time_string, '%H:%M:%S')
#Transfer time is in UTC Time (24hr) HH:MM:SS.
transfer_job = {
'description': description,
'status': 'DISABLED',
'projectId': project_id,
'schedule': {
'scheduleStartDate': {
'day': start_date.day,
'month': start_date.month,
'year': start_date.year
},
'startTimeOfDay': {
'hours': start_time.hour,
'minutes': start_time.minute,
'seconds': start_time.second
}
},
'transferSpec': {
'gcsDataSource': {
'bucketName': source_bucket
},
'gcsDataSink': {
'bucketName': sink_bucket
}
}
}
try:
result = storagetransfer.transferJobs().create(body=transfer_job).execute()
pooled_sts_job = {
'name': result.get("name"),
'status': result.get("status"),
'type': job_type,
'projectId': result.get("projectId"),
'sourceBucket': result.get("transferSpec").get("gcsDataSource").get("bucketName"),
'sourceProject': project_id,
'targetBucket': result.get("transferSpec").get("gcsDataSink").get("bucketName"),
'schedule': start_time_string
}
sts_jobs.append(pooled_sts_job)
i += 1
except Exception as e:
# If an exception is encountered during any API iteration, roll back the transaction and error out
LOGGER.error("Exception, rolling back and exiting program " + str(e))
_exit_creation_with_cleanup(sts_jobs)
print("Successfully created STS job pool in the cloud, standby")
return sts_jobs