in hostfactory/host_provider/src/util.py [0:0]
def init_logging(loglevel=logging.INFO, logfile=None):
global _logging_init
if logfile is None:
logfile = "azurecc_prov.log"
logfile_path = os.path.join(os.getenv("PRO_LOG_DIR", "."), logfile)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default': {
'format': '%(operation_id)s - %(asctime)s %(levelname)-8s %(message)s'
},
},
'handlers': {
'file': {
# The values below are popped from this dictionary and
# used to create the handler, set the handler's level and
# its formatter.
'()': ConcurrentRotatingFileHandler,
'level': logging.INFO,
'formatter': 'default',
# The values below are passed to the handler creator callable
# as keyword arguments.
# 'owner': ['root', 'cyclecloud'],
'filename': logfile_path,
},
},
'root': {
'handlers': ['file'],
'level': logging.INFO,
},
}
logging.config.dictConfig(LOGGING)
try:
root_logger = logging.getLogger()
filtered_handlers = []
for handler in root_logger.handlers:
filtered_handlers.append(handler)
root_logger.handlers = filtered_handlers
for handler in logging.getLogger().handlers:
handler.setLevel(logging.ERROR)
except:
pass
# this is really chatty
requests_logger = logging.getLogger("requests.packages.urllib3.connectionpool")
requests_logger.setLevel(logging.WARN)
logger = logging.getLogger("cyclecloud")
if _logging_init:
return logger
logger.setLevel(logging.DEBUG)
tenMB = 10 * 1024 * 1024
logfile_handler = ConcurrentRotatingFileHandler(logfile_path, mode='a',maxBytes=tenMB, backupCount=5)
logfile_handler.setLevel(loglevel)
logfile_handler.setFormatter(CustomFormatter('%(operation_id)s - %(asctime)s - %(name)s - %(levelname)s - %(message)s'))
logger.addHandler(logfile_handler)
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.setLevel(logging.DEBUG)
stderr_handler.setFormatter(CustomFormatter('%(operation_id)s - %(levelname)s - %(message)s'))
logger.addHandler(stderr_handler)
_logging_init = True
return logger