in minihack/agent/polybeast/core/file_writer.py [0:0]
def __init__(self, xp_args=None, rootdir="~/palaas"):
if rootdir == "~/palaas":
# make unique id in case someone uses the default rootdir
xpid = "{proc}_{unixtime}".format(
proc=os.getpid(), unixtime=int(time.time())
)
rootdir = os.path.join(rootdir, xpid)
self.basepath = os.path.expandvars(os.path.expanduser(rootdir))
self._tick = 0
# metadata gathering
if xp_args is None:
xp_args = {}
self.metadata = gather_metadata()
# we need to copy the args, otherwise when we close the file writer
# (and rewrite the args) we might have non-serializable objects (or
# other nasty stuff).
self.metadata["args"] = copy.deepcopy(xp_args)
formatter = logging.Formatter("%(message)s")
self._logger = logging.getLogger("palaas/out")
# to stdout handler
shandle = logging.StreamHandler()
shandle.setFormatter(formatter)
self._logger.addHandler(shandle)
self._logger.setLevel(logging.INFO)
# to file handler
if not os.path.exists(self.basepath):
self._logger.info("Creating log directory: %s", self.basepath)
os.makedirs(self.basepath, exist_ok=True)
else:
self._logger.info("Found log directory: %s", self.basepath)
self.paths = dict(
msg="{base}/out.log".format(base=self.basepath),
logs="{base}/logs.csv".format(base=self.basepath),
fields="{base}/fields.csv".format(base=self.basepath),
meta="{base}/meta.json".format(base=self.basepath),
)
self._logger.info("Saving arguments to %s", self.paths["meta"])
if os.path.exists(self.paths["meta"]):
self._logger.warning(
"Path to meta file already exists. " "Not overriding meta."
)
else:
self.save_metadata()
self._logger.info("Saving messages to %s", self.paths["msg"])
if os.path.exists(self.paths["msg"]):
self._logger.warning(
"Path to message file already exists. "
"New data will be appended."
)
fhandle = logging.FileHandler(self.paths["msg"])
fhandle.setFormatter(formatter)
self._logger.addHandler(fhandle)
self._logger.info("Saving logs data to %s", self.paths["logs"])
self._logger.info("Saving logs' fields to %s", self.paths["fields"])
self.fieldnames = ["_tick", "_time"]
if os.path.exists(self.paths["logs"]):
self._logger.warning(
"Path to log file already exists. "
"New data will be appended."
)
# Override default fieldnames.
with open(self.paths["fields"], "r") as csvfile:
reader = csv.reader(csvfile)
lines = list(reader)
if len(lines) > 0:
self.fieldnames = lines[-1]
# Override default tick: use the last tick from the logs file plus 1.
with open(self.paths["logs"], "r") as csvfile:
reader = csv.reader(csvfile)
lines = list(reader)
# Need at least two lines in order to read the last tick:
# the first is the csv header and the second is the first line
# of data.
if len(lines) > 1:
self._tick = int(lines[-1][0]) + 1
self._fieldfile = open(self.paths["fields"], "a")
self._fieldwriter = csv.writer(self._fieldfile)
self._fieldfile.flush()
self._logfile = open(self.paths["logs"], "a")
self._logwriter = csv.DictWriter(
self._logfile, fieldnames=self.fieldnames
)
# Auto-close (and save) on destruction.
weakref.finalize(
self, _save_metadata, self.paths["meta"], self.metadata
)