in competitions/info.py [0:0]
def __post_init__(self):
config_fname = hf_hub_download(
repo_id=self.competition_id,
filename="conf.json",
use_auth_token=self.autotrain_token,
repo_type="dataset",
)
competition_desc = hf_hub_download(
repo_id=self.competition_id,
filename="COMPETITION_DESC.md",
use_auth_token=self.autotrain_token,
repo_type="dataset",
)
dataset_desc = hf_hub_download(
repo_id=self.competition_id,
filename="DATASET_DESC.md",
use_auth_token=self.autotrain_token,
repo_type="dataset",
)
self.config = self.load_config(config_fname)
self.competition_desc = self.load_md(competition_desc)
self.dataset_desc = self.load_md(dataset_desc)
try:
submission_desc = hf_hub_download(
repo_id=self.competition_id,
filename="SUBMISSION_DESC.md",
use_auth_token=self.autotrain_token,
repo_type="dataset",
)
self.submission_desc = self.load_md(submission_desc)
except Exception:
self.submission_desc = None
try:
rules_md = hf_hub_download(
repo_id=self.competition_id,
filename="RULES.md",
use_auth_token=self.autotrain_token,
repo_type="dataset",
)
self.rules_md = self.load_md(rules_md)
except Exception:
self.rules_md = None
if self.config["EVAL_METRIC"] == "custom":
if "SCORING_METRIC" not in self.config:
raise ValueError(
"For custom metrics, please provide a single SCORING_METRIC name in the competition config file: conf.json"
)