in awsbatch-cli/src/awsbatch/awsbsub.py [0:0]
def main():
"""Command entrypoint."""
try:
# parse input parameters and config file
args = _get_parser().parse_args()
_validate_parameters(args)
log = config_logger(args.log_level)
log.info("Input parameters: %s", args)
config = AWSBatchCliConfig(log=log, cluster=args.cluster)
boto3_factory = Boto3ClientFactory(region=config.region, proxy=config.proxy)
# define job name
if args.job_name:
job_name = args.job_name
else:
# set a default job name if not specified
if not sys.stdin.isatty():
# stdin
job_name = "STDIN"
else:
# normalize name
job_name = re.sub(r"\W+", "_", os.path.basename(args.command))
log.info("Job name not specified, setting it to (%s)", job_name)
# generate an internal unique job-id
job_key = _generate_unique_job_key(job_name)
job_s3_folder = "{prefix}/batch/{job_key}/".format(prefix=config.artifact_directory, job_key=job_key)
# upload script, if needed, and get related command
command = _upload_and_get_command(boto3_factory, args, job_s3_folder, job_name, config, log)
# parse and validate depends_on parameter
depends_on = _get_depends_on(args)
# select submission (standard vs MNP)
if args.nodes and args.nodes > 1:
if not hasattr(config, "job_definition_mnp"):
fail("Current cluster does not support MNP jobs submission")
job_definition = config.job_definition_mnp
nodes = args.nodes
else:
job_definition = config.job_definition
nodes = None
AWSBsubCommand(log, boto3_factory).run(
job_definition=job_definition,
job_name=job_name,
job_queue=config.job_queue,
command=command,
nodes=nodes,
vcpus=args.vcpus,
memory=args.memory,
array_size=args.array_size,
dependencies=depends_on,
retry_attempts=args.retry_attempts,
timeout=args.timeout,
env=[("PCLUSTER_JOB_S3_URL", f"s3://{config.s3_bucket}/{job_s3_folder}")],
)
except KeyboardInterrupt:
print("Exiting...")
sys.exit(0)
except Exception as e:
fail("Unexpected error. Command failed with exception: %s" % e)