in competitions/evaluate.py [0:0]
def run(params):
logger.info(params)
if isinstance(params, dict):
params = EvalParams(**params)
utils.update_submission_status(params, SubmissionStatus.PROCESSING.value)
if params.competition_type == "script":
try:
requirements_fname = hf_hub_download(
repo_id=params.competition_id,
filename="requirements.txt",
token=params.token,
repo_type="dataset",
)
except EntryNotFoundError:
requirements_fname = None
if requirements_fname:
logger.info("Installing requirements")
utils.uninstall_requirements(requirements_fname)
utils.install_requirements(requirements_fname)
if len(str(params.dataset).strip()) > 0:
# _ = Repository(local_dir="/tmp/data", clone_from=params.dataset, token=params.token)
_ = snapshot_download(
repo_id=params.dataset,
local_dir="/tmp/data",
token=params.token,
repo_type="dataset",
)
generate_submission_file(params)
evaluation = compute_metrics(params)
utils.update_submission_score(params, evaluation["public_score"], evaluation["private_score"])
utils.update_submission_status(params, SubmissionStatus.SUCCESS.value)
utils.delete_space(params)