Skip to content

Commit

Permalink
Improve logger (aws#106)
Browse files Browse the repository at this point in the history
* making a quick variance fix (aws#99)

* Fix logger
  • Loading branch information
rahul003 authored Aug 18, 2019
1 parent d24fd31 commit dd57c78
Show file tree
Hide file tree
Showing 2 changed files with 93 additions and 91 deletions.
22 changes: 15 additions & 7 deletions tests/analysis/integration_testing_rules.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import os
import sys
# set environment variable values for tornasole logger
os.environ['TORNASOLE_LOG_LEVEL'] = 'debug'
os.environ['TORNASOLE_LOG_FILTER'] = 'error'
os.environ['TORNASOLE_LOG_LEVEL'] = 'DEBUG'
os.environ['TORNASOLE_LOG_ALL_TO_STDOUT'] = 'FALSE'
os.environ['TORNASOLE_LOG_PATH'] = 'tornasole.log'
# if true, block stdout prints on console, if false, show stdout prints on console
stdout = os.environ.get('BLOCK_STDOUT', default='FALSE') == 'TRUE'
# if true, block stderr prints on console, if false, show stderr prints on console
Expand Down Expand Up @@ -48,6 +49,7 @@


logger = get_logger()

# store path to config file and test mode for testing rule scrip with training script
class TestRules():
def __init__(self, framework, path_to_config=None, env_dict={}, test_case_list=[], test_case_regex=None):
Expand Down Expand Up @@ -180,20 +182,26 @@ def run_one_script(self, path_to_script, script_args, trial_dir, job_name, mode,
# use subprocess to execute cmd line prompt
command_list = commands.split(' ')
# create a subprocess using Popen
p = Popen(command_list, stdout=PIPE if self.stdout_mode else None, stderr=PIPE if self.stderr_mode else None,
p = Popen(command_list,
stdout=PIPE if self.stdout_mode else None,
stderr=PIPE if self.stderr_mode else None,
env=dict(os.environ,
TORNASOLE_LOG_CONTEXT='{}_{}'.format(path_to_script, trial_dir),
TORNASOLE_LOG_PATH='./integration_test_log/{}_{}_{}_{}_{}.log'.format(
job_name.replace('/', '_'), local_or_s3, mode, time_stamp, train_test_str),
TORNASOLE_LOG_PATH='./integration_test_log/{}_{}_{}_{}_{}.log'.
format(job_name.replace('/', '_'), local_or_s3, mode,
time_stamp, train_test_str),
TORNASOLE_LOG_LEVEL='debug',
TORNASOLE_LOG_FILTER='error'))
TORNASOLE_LOG_ALL_TO_STDOUT='FALSE'))

out, error = p.communicate()
if p.returncode == 0:
logger.info("script {} of job {} in {} in {} ran {} successfully".format(path_to_script, job_name, local_or_s3, mode, train_test_str))

else:
logger.error("script {} of job {} in {} in {} {} failed with error {} , output is:{}".format(path_to_script, job_name, local_or_s3, mode, train_test_str, error, out))
logger.error("script {} of job {} in {} in {} {} failed with error {} , "
"output is:{}".format(path_to_script, job_name,
local_or_s3, mode, train_test_str,
error, out))
## returning exit code
exit(p.returncode)
# run 'job's provided by user. a 'job' is a training/test scripts combination
Expand Down
162 changes: 78 additions & 84 deletions tornasole/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from botocore.exceptions import ClientError
import uuid
import sys
import socket


def flatten(lis):
"""Given a list, possibly nested to any level, return it flattened."""
Expand All @@ -16,100 +18,92 @@ def flatten(lis):
new_lis.append(item)
return new_lis

_logger_level_set = False
_logger_filter_set = False

guid = str(uuid.uuid4())
log_context = os.environ.get('TORNASOLE_LOG_CONTEXT', default=guid)
_logger_initialized = False


class SingleLevelFilter(logging.Filter):
def __init__(self, passlevel, reject):
self.passlevel = passlevel
self.reject = reject
class MaxLevelFilter(logging.Filter):
'''Filters (lets through) all messages with level < LEVEL'''
def __init__(self, level):
super().__init__()
self.level = level

def filter(self, record):
if self.reject:
return (record.levelno != self.passlevel)
else:
return (record.levelno == self.passlevel)

def get_logger():
global _logger_level_set
global _logger_filter_set
name = 'tornasole'
if not _logger_level_set:
log_level = os.environ.get('TORNASOLE_LOG_LEVEL', default='info')
# TORNASOLE_LOG_PATH is the full path to log file, by default, log is generated in current dir
fh = logging.FileHandler(os.environ.get('TORNASOLE_LOG_PATH', default='./tornasole.log'))
log_level = log_level.lower()
if log_level not in ['info', 'debug', 'warning', 'error', 'critical', 'off']:
print('Invalid log level for TORNASOLE_LOG_LEVEL')
log_level = 'info'
# "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
return record.levelno < self.level


def _get_log_level():
default = 'info'
log_level = os.environ.get('TORNASOLE_LOG_LEVEL', default=default)
log_level = log_level.lower()
allowed_levels = ['info', 'debug', 'warning', 'error', 'critical', 'off']
if log_level not in allowed_levels:
log_level = default

level = None
if log_level is None or log_level == 'off':
level = None
if log_level == 'off':
print('logging is off')
else:
if log_level == 'critical':
level = logging.CRITICAL
elif log_level == 'error':
level = logging.ERROR
elif log_level == 'warning':
level = logging.WARNING
elif log_level == 'info':
level = logging.INFO
elif log_level == 'debug':
level = logging.DEBUG
_logger_level_set = True
if not _logger_filter_set:
log_filter = os.environ.get('TORNASOLE_LOG_FILTER', default=None)
if log_filter is not None:
log_filter = log_filter.lower()
if log_filter not in ['info', 'debug', 'warning', 'error', 'critical', None]:
print('Invalid log filter for TORNASOLE_LOG_FILTER')
log_filter = None
filter = None
# by default, no filter
if log_filter is None:
print('logging filter is off')
else:
if log_filter == 'critical':
filter = logging.CRITICAL
elif log_filter == 'error':
filter = logging.ERROR
elif log_filter == 'warning':
filter = logging.WARNING
elif log_filter == 'info':
filter = logging.INFO
elif log_filter == 'debug':
filter = logging.DEBUG
_logger_filter_set = True
# create a logger
logger = logging.getLogger()
if not len(logger.handlers):
log_formatter = logging.Formatter('%(asctime)s ' + log_context + ' %(levelname)s %(filename)s(%(lineno)d) %(message)s')
stdout_handler = logging.StreamHandler(sys.stdout)
stderr_handler = logging.StreamHandler(sys.stderr)
# adding filter to StreamHandler
if log_filter is not None:
f1 = SingleLevelFilter(filter, False)
f2 = SingleLevelFilter(filter, True)
# print level == log_filter messages to stderr
stderr_handler.addFilter(f1)
# print level != log_filter messages to stdout
stdout_handler.addFilter(f2)
# adding filter to FileHandler
# fh.addFilter(f2)
else:
if log_level == 'critical':
level = logging.CRITICAL
elif log_level == 'error':
level = logging.ERROR
elif log_level == 'warning':
level = logging.WARNING
elif log_level == 'info':
level = logging.INFO
elif log_level == 'debug':
level = logging.DEBUG
return level


def get_logger(name='tornasole'):
global _logger_initialized
if not _logger_initialized:
worker_pid = socket.gethostname() + ':' + str(os.getpid())
log_context = os.environ.get('TORNASOLE_LOG_CONTEXT', default=worker_pid)
level = _get_log_level()
logger = logging.getLogger(name)

logger.handlers = []
log_formatter = logging.Formatter(fmt='[%(asctime)s.%(msecs)03d ' + log_context +
' %(levelname)s %(filename)s:%(lineno)d] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')

stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(log_formatter)
stderr_handler.setFormatter(log_formatter)
fh.setFormatter(log_formatter)

if os.environ.get('TORNASOLE_LOG_ALL_TO_STDOUT', default='TRUE').lower() == 'false':
stderr_handler = logging.StreamHandler(sys.stderr)
min_level = logging.DEBUG
# lets through all levels less than ERROR
stdout_handler.addFilter(MaxLevelFilter(logging.ERROR))
stdout_handler.setLevel(min_level)

stderr_handler.setLevel(max(min_level, logging.ERROR))
stderr_handler.setFormatter(log_formatter)
logger.addHandler(stderr_handler)

logger.addHandler(stdout_handler)

# TORNASOLE_LOG_PATH is the full path to log file
# by default, log is only written to stdout&stderr
# if this is set, it is written to file
path = os.environ.get('TORNASOLE_LOG_PATH', default=None)
if path is not None:
fh = logging.FileHandler(path)
fh.setFormatter(log_formatter)
logger.addHandler(fh)

if level:
logger.setLevel(level)
else:
logger.disabled = True
logger.addHandler(fh)
logger.addHandler(stdout_handler)
logger.addHandler(stderr_handler)
return logger
logger.propagate = False
_logger_initialized = True
return logging.getLogger(name)


def get_immediate_subdirectories(a_dir):
return [name for name in os.listdir(a_dir)
Expand Down

0 comments on commit dd57c78

Please sign in to comment.