in sync/tc.py [0:0]
def get_task_artifacts(destination: str,
task: Task,
file_names: list[str],
session: requests.Session | None,
retry: int
):
status = task.get("status", {})
if not status.get("runs"):
logger.debug("No runs for task %s" % status["taskId"])
return
artifacts_base_url = QUEUE_BASE + "task/%s/artifacts" % status["taskId"]
if session is None:
session = requests.Session()
try:
artifacts = fetch_json(artifacts_base_url, session=session)
except requests.HTTPError as e:
logger.warning(str(e))
artifact_urls = ["{}/{}".format(artifacts_base_url, item["name"])
for item in artifacts["artifacts"]
if any(item["name"].endswith("/" + file_name)
for file_name in file_names)]
run = status["runs"][-1]
if "_log_paths" not in run:
run["_log_paths"] = {}
for url in artifact_urls:
params = {
"task": status["taskId"],
"file_name": url.rsplit("/", 1)[1]
}
log_name = "{task}_{file_name}".format(**params)
success = False
logger.debug(f"Trying to download {url}")
log_path = os.path.abspath(os.path.join(destination, log_name))
if not os.path.exists(log_path):
success = download(url, log_path, retry, session=session)
else:
success = True
if not success:
logger.warning(f"Failed to download log from {url}")
run["_log_paths"][params["file_name"]] = log_path