diff --git a/apputils/execution_env.py b/apputils/execution_env.py index b4d1bbeedeeaf2f5eeb5565c6994d66733c791bf..8867b9ce7964b5d1d0cfd71dba3ea89e0aa88500 100755 --- a/apputils/execution_env.py +++ b/apputils/execution_env.py @@ -14,7 +14,7 @@ # limitations under the License. # -"""Log information regarfing the execution environment. +"""Log information regarding the execution environment. This is helpful if you want to recreate an experiment at a later time, or if you want to understand the environment in which you execute the training. @@ -22,8 +22,10 @@ you want to understand the environment in which you execute the training. import sys import os +import time import platform import logging +import logging.config import numpy as np import torch from git import Repo @@ -76,3 +78,26 @@ def log_execution_env_state(app_args, gitroot='.'): logger.debug("Numpy: %s", np.__version__) log_git_state() logger.debug("App args: %s", app_args) + + +def config_pylogger(log_cfg_file, experiment_name): + """Configure the Python logger. + + For each execution of the application, we'd like to create a unique log directory. + By default this library is named using the date and time of day, to that directories + can be sorted by recency. You can also name yor experiments and prefix the log + directory with this name. This can be useful when accessing experiment data from + TensorBoard, for example. + """ + timestr = time.strftime("%Y.%m.%d-%H%M%S") + filename = timestr if experiment_name is None else experiment_name + '___' + timestr + logdir = './logs' + '/' + filename + if not os.path.exists(logdir): + os.makedirs(logdir) + log_filename = os.path.join(logdir, filename + '.log') + logging.config.fileConfig(os.path.join(os.getcwd(), log_cfg_file), defaults={'logfilename': log_filename}) + msglogger = logging.getLogger() + msglogger.logdir = logdir + msglogger.log_filename = log_filename + msglogger.info('Log file for this run: ' + os.path.realpath(log_filename)) + return msglogger diff --git a/examples/classifier_compression/compress_classifier.py b/examples/classifier_compression/compress_classifier.py index 9b45391d860bef3c85061a9d690691e62205503a..b302522851ca1ab483d9419a41d7c1b63a6a2cff 100755 --- a/examples/classifier_compression/compress_classifier.py +++ b/examples/classifier_compression/compress_classifier.py @@ -130,21 +130,6 @@ parser.add_argument('--gpus', metavar='DEV_ID', default=None, parser.add_argument('--name', '-n', metavar='NAME', default=None, help='Experiment name') -def config_logger(experiment_name): - # The Distiller library writes logs to the Python logger, so we configure it. - timestr = time.strftime("%Y.%m.%d-%H%M%S") - filename = timestr if experiment_name is None else experiment_name + '___' + timestr - logdir = './logs' + '/' + filename - if not os.path.exists(logdir): - os.makedirs(logdir) - log_filename = os.path.join(logdir, filename + '.log') - logging.config.fileConfig(os.path.join(script_dir, 'logging.conf'), defaults={'logfilename': log_filename}) - msglogger = logging.getLogger() - msglogger.logdir = logdir - msglogger.log_filename = log_filename - msglogger.info('Log file for this run: ' + os.path.realpath(log_filename)) - return msglogger - def check_pytorch_version(): if torch.__version__ < '0.4.0': print("\nNOTICE:") @@ -161,7 +146,7 @@ def main(): global msglogger check_pytorch_version() args = parser.parse_args() - msglogger = config_logger(args.name) + msglogger = apputils.config_pylogger('logging.conf', args.name) # Log various details about the execution environment. It is sometimes useful # to refer to past experiment executions and this information may be useful. @@ -514,7 +499,7 @@ if __name__ == '__main__': except Exception as e: if msglogger is not None: msglogger.error(traceback.format_exc()) - exit(1) + raise e finally: if msglogger is not None: msglogger.info('')