in perfmetrics/scripts/generate_files.py [0:0]
def generate_files_and_upload_to_gcs_bucket(destination_blob_name, num_of_files,
file_size_unit, file_size,
filename_prefix,
local_destination_folder,
upload_to_gcs_bucket):
for batch_start in range(1, num_of_files + 1, BATCH_SIZE):
for file_num in range(batch_start, batch_start + BATCH_SIZE):
if file_num > num_of_files:
break
file_name = '{}_{}'.format(filename_prefix, file_num)
temp_file = '{}/{}.txt'.format(TEMPORARY_DIRECTORY, file_name)
# Creating files in temporary folder:
with open(temp_file, 'wb') as out:
if(file_size_unit.lower() == 'gb'):
out.truncate(1024 * 1024 * 1024 * int(file_size))
if(file_size_unit.lower() == 'mb'):
out.truncate(1024 * 1024 * int(file_size))
if(file_size_unit.lower() == 'kb'):
out.truncate(1024 * int(file_size))
if(file_size_unit.lower() == 'b'):
out.truncate(int(file_size))
num_files = os.listdir(TEMPORARY_DIRECTORY)
if not num_files:
return 0
# Uploading batch files to GCS
if upload_to_gcs_bucket:
process = Popen(
'gsutil -m cp -r {}/* {}'.format(TEMPORARY_DIRECTORY,
destination_blob_name),
shell=True)
process.communicate()
exit_code = process.wait()
if exit_code != 0:
return exit_code
# Copying batch files from temporary to local destination folder:
subprocess.call(
'cp -r {}/* {}'.format(TEMPORARY_DIRECTORY, local_destination_folder),
shell=True)
# Deleting batch files from temporary folder:
subprocess.call('rm -rf {}/*'.format(TEMPORARY_DIRECTORY), shell=True)
# Writing number of files uploaded to output file after every batch uploads:
logmessage('{}/{} files uploaded to {}\n'.format(file_num, num_of_files,
destination_blob_name))
return 0