in submitit/core/core.py [0:0]
def _get_outcome_and_result(self) -> tp.Tuple[str, tp.Any]:
"""Getter for the output of the submitted function.
Returns
-------
outcome
the outcome of the job: either "error" or "success"
result
the output of the submitted function
Raises
------
UncompletedJobError
if the job is not finished or failed outside of the job (from slurm)
"""
assert not self._sub_jobs, "This should not be called for a meta-job"
p = self.paths.folder
timeout = self._results_timeout_s
try:
# trigger cache update: https://stackoverflow.com/questions/3112546/os-path-exists-lies/3112717
p.chmod(p.stat().st_mode)
except PermissionError:
# chmod requires file ownership and might fail.
# Increase the timeout since we can't force cache refresh.
timeout *= 2
# if filesystem is slow, we need to wait a bit for result_pickle.
start_wait = _time.time()
while not self.paths.result_pickle.exists() and _time.time() - start_wait < timeout:
_time.sleep(1)
if not self.paths.result_pickle.exists():
message = [
f"Job {self.job_id} (task: {self.task_id}) with path {self.paths.result_pickle}",
f"has not produced any output (state: {self.state})",
]
log = self.stderr()
if log:
message.extend(["Error stream produced:", "-" * 40, log])
elif self.paths.stdout.exists():
log = subprocess.check_output(["tail", "-40", str(self.paths.stdout)], encoding="utf-8")
message.extend(
[f"No error stream produced. Look at stdout: {self.paths.stdout}", "-" * 40, log]
)
else:
message.append(f"No output/error stream produced ! Check: {self.paths.stdout}")
raise utils.UncompletedJobError("\n".join(message))
try:
output: tp.Tuple[str, tp.Any] = utils.pickle_load(self.paths.result_pickle)
except EOFError:
warnings.warn(f"EOFError on file {self.paths.result_pickle}, trying again in 2s") # will it work?
_time.sleep(2)
output = utils.pickle_load(self.paths.result_pickle)
return output