def csv_generation_parser()

in fenix-retrieval/generate_applink_data.py [0:0]


def csv_generation_parser():
    """Parser for the CSV generation script."""
    parser = argparse.ArgumentParser(
        "Run this tool to build CSVs containing Fenix data from some tasks "
        + "running with the multi-commit paradigm in mozilla-central "
        + "(must have perfherder data)."
    )
    parser.add_argument(
        "-t",
        "--test-name",
        type=str,
        default=DEFAULT_TEST_NAME,
        help="The name of the test to get data from (must exist in the task name). "
        + "Defaults to `view`. To get view data before Jul. 31, 2020, use `applink`.",
    )
    parser.add_argument(
        "-d",
        "--device",
        type=str,
        choices=["p2", "g5"],
        default="p2",
        help="Device to get data from.",
    )
    parser.add_argument(
        "-c",
        "--cache-path",
        type=str,
        default=None,
        help="Path to a cache for perfherder artifacts (so you don't re-download them). "
        + "Disabled by default.",
    )
    parser.add_argument(
        "-r",
        "--fenix-repo",
        type=str,
        required=True,
        help="Path to a local Fenix github repo.",
    )
    parser.add_argument(
        "-o",
        "--output",
        type=str,
        default=None,
        help="Path to the output directory. Defaults to current working directory.",
    )
    parser.add_argument(
        "--try",
        action="store_true",
        dest="try_data",
        default=False,
        help="Include data from the try server.",
    )
    parser.add_argument(
        "--replicates",
        action="store_true",
        default=False,
        help="Gather the replicates instead of the medians.",
    )
    parser.add_argument(
        "--median-per-day",
        action="store_true",
        default=False,
        help="Returns a single result per day - the median - instead of per commit runs",
    )
    return parser