in awsbatch-cli/src/awsbatch/awsbsub.py [0:0]
def _upload_and_get_command(boto3_factory, args, job_s3_folder, job_name, config, log):
"""
Get command by parsing args and config.
The function will also perform an s3 upload, if needed.
:param boto3_factory: initialized Boto3ClientFactory object
:param args: input arguments
:param job_s3_folder: S3 folder for the job files
:param job_name: job name
:param config: config object
:param log: log
:return: command to submit
"""
# create S3 folder for the job
s3_uploader = S3Uploader(boto3_factory, config.s3_bucket, job_s3_folder)
# upload input files, if there
if args.input_file:
for file in args.input_file:
s3_uploader.put_file(file, os.path.basename(file))
# upload command, if needed
if args.command_file or not sys.stdin.isatty() or args.env:
# define job script name
job_script = job_name + ".sh"
log.info("Using command-file option or stdin. Job script name: %s" % job_script)
env_file = None
if args.env:
env_file = job_name + ".env.sh"
# get environment variables and upload file used to extend the submission environment
env_blacklist = args.env_blacklist if args.env_blacklist else config.env_blacklist
_get_env_and_upload(s3_uploader, args.env, env_blacklist, env_file, log)
# upload job script
if args.command_file:
# existing script file
try:
s3_uploader.put_file(args.command, job_script)
except Exception as e:
fail("Error creating job script. Failed with exception: %s" % e)
elif not sys.stdin.isatty():
# stdin
_get_stdin_and_upload(s3_uploader, job_script)
# define command to execute
bash_command = _compose_bash_command(args, config.s3_bucket, config.region, job_s3_folder, job_script, env_file)
command = ["/bin/bash", "-c", bash_command]
elif isinstance(args.command, str):
log.info("Using command parameter")
command = [args.command] + args.arguments
else:
fail("Unexpected error. Command cannot be empty.")
log.info("Command: %s" % shell_join(command)) # pylint: disable=E0606
return command