in src/utils.py [0:0]
def init_logger(args):
if torch.distributed.is_initialized():
torch.distributed.barrier()
file_handler = logging.FileHandler(filename=os.path.join(args.output_dir, 'run.log'))
stdout_handler = logging.StreamHandler(sys.stdout)
handlers = [file_handler, stdout_handler]
logging.basicConfig(
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO if src.dist_utils.is_main() else logging.WARN,
format="[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s",
handlers=handlers,
)
return logger