def main()

in util/upload-cookbook.py [0:0]


def main():
    args = _parse_args()

    # Check if archive exists
    if not os.path.exists(args.cookbook_archive_path):
        print("Cookbook archive {0} not found".format(args.cookbook_archive_path))
        exit(1)

    base_name = os.path.splitext(os.path.basename(args.cookbook_archive_path))[0]
    _md5sum(args.cookbook_archive_path, "{0}.md5".format(base_name))

    for region in args.regions:
        s3 = _create_s3_client(region)
        bucket_name = _get_bucket_name(args, region)

        s3_key = _COOKBOOKS_DIR + "/" + base_name + ".tgz"
        print("Listing cookbook for region: {0}, bucket: {1}, key: {2}".format(region, bucket_name, s3_key))
        _aws_s3_ls(s3, region, bucket_name, s3_key)

    if len(_ls_error_array) > 0 and not args.override:
        print("We know the cookbook archives are already there, in this round we need to upload the .date files!")
        print("Failed to push cookbook, already present for regions: {0} ".format(" ".join(_ls_error_array)))
        exit(1)
    elif len(_ls_error_array) > 0 and args.override:
        print("Some or all of the cookbook archives are already there but OVERRIDE=true")

    for region in args.regions:
        s3 = _create_s3_client(region)
        bucket_name = _get_bucket_name(args, region)

        if args.override:
            print("Backup cookbook for region: {0}".format(region))
            _aws_s3_bck(s3, args, region, bucket_name, base_name + ".tgz")
            _aws_s3_bck(s3, args, region, bucket_name, base_name + ".md5")
            _aws_s3_bck(s3, args, region, bucket_name, base_name + ".tgz.date")

        print("Pushing cookbook for region: {0}".format(region))
        _aws_s3_cp(s3, args, region, bucket_name, _COOKBOOKS_DIR, args.cookbook_archive_path)
        _aws_s3_cp(s3, args, region, bucket_name, _COOKBOOKS_DIR, base_name + ".md5")

        if not args.dryrun:
            # Stores LastModified info into .tgz.date file and uploads it back to bucket
            with open(base_name + ".tgz.date", "w+") as f:
                response = s3.head_object(Bucket=bucket_name, Key=_COOKBOOKS_DIR + "/" + base_name + ".tgz")
                f.write(response.get("LastModified").strftime("%Y-%m-%d_%H-%M-%S"))

            _aws_s3_cp(s3, args, region, bucket_name, _COOKBOOKS_DIR, base_name + ".tgz.date")
        else:
            print("File {0}.{1} not stored to bucket {2} due to dryrun mode".format(base_name, "tgz.date", bucket_name))

    if len(_bck_error_array) > 0:
        print("Failed to backup cookbook for region ({0})".format(" ".join(_bck_error_array)))

    if len(_cp_error_array) > 0:
        print("Failed to push cookbook for region ({0})".format(" ".join(_cp_error_array)))
        exit(1)