in perfmetrics/scripts/hns_rename_folders_metrics/generate_folders_and_files.py [0:0]
def _generate_files_and_upload_to_gcs_bucket(destination_blob_name, num_of_files,
file_size_unit, file_size,
filename_prefix) -> int:
# Creating folders locally in temp directory and copying to gcs bucket.
for batch_start in range(1, num_of_files + 1, BATCH_SIZE):
for file_num in range(batch_start, batch_start + BATCH_SIZE):
if file_num > num_of_files:
break
file_name = '{}_{}'.format(filename_prefix, file_num)
temp_file = '{}/{}.txt'.format(TEMPORARY_DIRECTORY, file_name)
# Creating files in temporary folder:
with open(temp_file, 'wb') as out:
if (file_size_unit.lower() == 'gb'):
out.truncate(1024 * 1024 * 1024 * int(file_size))
if (file_size_unit.lower() == 'mb'):
out.truncate(1024 * 1024 * int(file_size))
if (file_size_unit.lower() == 'kb'):
out.truncate(1024 * int(file_size))
if (file_size_unit.lower() == 'b'):
out.truncate(int(file_size))
num_files = os.listdir(TEMPORARY_DIRECTORY)
if not num_files:
_logmessage("Files were not created locally",LOG_ERROR)
return 1
# Starting upload to the gcs bucket.
try:
subprocess.Popen(
'gcloud storage cp --recursive {}/* {}'.format(TEMPORARY_DIRECTORY,
destination_blob_name),
shell=True).communicate()
except subprocess.CalledProcessError as e:
_logmessage("Issue while uploading files to GCS bucket.Aborting...",LOG_ERROR)
return 1
# Delete local files from temporary directory.
subprocess.call('rm -rf {}/*'.format(TEMPORARY_DIRECTORY), shell=True)
# Writing number of files uploaded to output file after every batch uploads.
_logmessage('{}/{} files uploaded to {}\n'.format(len(num_files), num_of_files,
destination_blob_name),LOG_INFO)
return 0