- def logger_profile(callable):
- def wrapper(*args, **kwds):
- start = time.time()
- result = callable(*args, **kwds)
- end = time.time()
- args = map(str, args)
- args += ["%s = %s" % (name, str(value)) for (name, value) in kwds.items()]
- # should probably use debug, but then debug is not always enabled
- logger.info("PROFILED %s (%s): %.02f s" % (callable.__name__, ", ".join(args), end - start))
- return result
- return wrapper
- return logger_profile
-
-
-if __name__ == '__main__':
- print 'testing sfalogging into logger.log'
- logger=_SfaLogger('logger.log')
- logger.critical("logger.critical")
- logger.error("logger.error")
- logger.warning("logger.warning")
- logger.info("logger.info")
- logger.debug("logger.debug")
- logger.setLevel(logging.DEBUG)
- logger.debug("logger.debug again")
-
- @profile(console_logger)
- def sleep(seconds = 1):
- time.sleep(seconds)
-
-
- console_logger.info('console.info')
- sleep(0.5)
- console_logger.setLevel(logging.DEBUG)
- sleep(0.25)
+
+ # shorthand to avoid having to import logging all over the place
+ def setLevelDebug(self):
+ self.setLevel(DEBUG)
+
+ def debugEnabled(self):
+ return self.getEffectiveLevel() == logging.DEBUG
+
+ # define a verbose option with s/t like
+ # parser.add_option("-v", "--verbose", action="count",
+ # dest="verbose", default=0)
+ # and pass the coresponding options.verbose to this method to adjust level
+ def setLevelFromOptVerbose(self, verbose):
+ if verbose == 0:
+ self.setLevel(logging.WARNING)
+ elif verbose == 1:
+ self.setLevel(logging.INFO)
+ elif verbose >= 2:
+ self.setLevel(logging.DEBUG)
+
+ # in case some other code needs a boolean
+ @staticmethod
+ def getBoolVerboseFromOpt(verbose):
+ return verbose >= 1
+
+ @staticmethod
+ def getBoolDebugFromOpt(verbose):
+ return verbose >= 2
+
+ def log_exc(self, message, limit=100):
+ """
+ standard logger has an exception() method but this will
+ dump the stack only between the frames
+ (1) that does `raise` and (2) the one that does `except`
+
+ log_exc() has a limit argument that allows to see deeper than that
+
+ use limit=None to get the same behaviour as exception()
+ """
+ self.error("%s BEG TRACEBACK" % message + "\n" +
+ traceback.format_exc(limit=limit).strip("\n"))
+ self.error("%s END TRACEBACK" % message)
+
+ # for investigation purposes, can be placed anywhere
+ def log_stack(self, message, limit=100):
+ to_log = "".join(traceback.format_stack(limit=limit))
+ self.info("%s BEG STACK" % message + "\n" + to_log)
+ self.info("%s END STACK" % message)
+
+ def enable_console(self):
+ formatter = logging.Formatter("%(message)s")
+ handler = logging.StreamHandler(sys.stdout)
+ handler.setFormatter(formatter)
+ self.addHandler(handler)
+
+
+# install our class as the default
+logging.setLoggerClass(SfaLogger)
+
+
+# configure
+# this is *NOT* passed to dictConfig as-is
+# instead we filter 'handlers' and 'loggers'
+# to contain just one entry
+# so make sure that 'handlers' and 'loggers'
+# have the same set of keys
+def logging_config(context):
+ if context == 'server':
+ handlername = 'file'
+ filename = '/var/log/sfa.log'
+ level = 'INFO'
+ elif context == 'import':
+ handlername = 'file'
+ filename = '/var/log/sfa-import.log'
+ level = 'INFO'
+ elif context == 'cli':
+ handlername = 'file'
+ filename = os.path.expanduser("~/.sfi.log")
+ level = 'DEBUG'
+ elif context == 'console':
+ handlername = 'stdout'
+ filename = 'ignored'
+ level = 'INFO'
+ else:
+ print("Cannot configure logging - exiting")
+ exit(1)
+
+ config = {
+ 'version': 1,
+ # IMPORTANT: we may be imported by something else, so:
+ 'disable_existing_loggers': False,
+ 'formatters': {
+ 'standard': {
+ 'datefmt': '%m-%d %H:%M:%S',
+ 'format': ('%(asctime)s %(levelname)s '
+ '%(filename)s:%(lineno)d %(message)s'),
+ },
+ },
+ # fill in later with just the one needed
+ # otherwise a dummy 'ignored' file gets created
+ 'handlers': {
+ },
+ 'loggers': {
+ 'sfa': {
+ 'handlers': [handlername],
+ 'level': level,
+ 'propagate': False,
+ },
+ },
+ }
+ if handlername == 'stdout':
+ config['handlers']['stdout'] = {
+ 'level': level,
+ 'formatter': 'standard',
+ 'class': 'logging.StreamHandler',
+ }
+ else:
+ config['handlers']['file'] = {
+ 'filename': filename,
+ 'level': level,
+ 'formatter': 'standard',
+ 'class': 'logging.handlers.TimedRotatingFileHandler',
+ # every monday and during 3 months
+ 'when': 'w0',
+ 'interval': 1,
+ 'backupCount': 12,
+ }
+ return config
+
+
+logger = logging.getLogger('sfa')
+
+
+def init_logger(context):
+ logging.config.dictConfig(logging_config(context))
+
+
+# if the user process does not do anything
+# like for the miscell testers and other certificate
+# probing/dumping utilities
+init_logger('console')