diff --git a/distiller/__init__.py b/distiller/__init__.py index f3367b1aa002503656fc9afea937649e383624f3..fff740e0bca689ee3ba363ab32732a55e10bcaa1 100755 --- a/distiller/__init__.py +++ b/distiller/__init__.py @@ -26,6 +26,8 @@ from .thinning import * from .knowledge_distillation import KnowledgeDistillationPolicy, DistillationLossWeights from .summary_graph import SummaryGraph, onnx_name_2_pytorch_name +import logging +logging.captureWarnings(True) del dict_config del thinning diff --git a/distiller/apputils/execution_env.py b/distiller/apputils/execution_env.py index ee8a73f34e65994f1061e28d483dd018b6d46159..fe7d59428233de2b185736cbf10771527266eeb9 100755 --- a/distiller/apputils/execution_env.py +++ b/distiller/apputils/execution_env.py @@ -117,7 +117,7 @@ def log_execution_env_state(config_paths=None, logdir=None, gitroot='.'): logger.debug('Failed to copy of config file: {}'.format(str(e))) -def config_pylogger(log_cfg_file, experiment_name, output_dir='logs'): +def config_pylogger(log_cfg_file, experiment_name, output_dir='logs', verbose=False): """Configure the Python logger. For each execution of the application, we'd like to create a unique log directory. @@ -137,6 +137,8 @@ def config_pylogger(log_cfg_file, experiment_name, output_dir='logs'): msglogger = logging.getLogger() msglogger.logdir = logdir msglogger.log_filename = log_filename + if verbose: + msglogger.setLevel(logging.DEBUG) msglogger.info('Log file for this run: ' + os.path.realpath(log_filename)) # Create a symbollic link to the last log file created (for easier access) diff --git a/examples/classifier_compression/compress_classifier.py b/examples/classifier_compression/compress_classifier.py index 0f5b78c370928b617c3e7595d95c6e76360d2ed4..0b13698507cb1efe38f1b800335c57530ca57e68 100755 --- a/examples/classifier_compression/compress_classifier.py +++ b/examples/classifier_compression/compress_classifier.py @@ -93,7 +93,8 @@ def main(): if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) - msglogger = apputils.config_pylogger(os.path.join(script_dir, 'logging.conf'), args.name, args.output_dir) + msglogger = apputils.config_pylogger(os.path.join(script_dir, 'logging.conf'), args.name, args.output_dir, + args.verbose) # Log various details about the execution environment. It is sometimes useful # to refer to past experiment executions and this information may be useful. diff --git a/examples/classifier_compression/parser.py b/examples/classifier_compression/parser.py index f19abac907cc6b3fac2d73d19119a55304bfb449..38df6e88dbfd85e88350edefd464eff80409b741 100755 --- a/examples/classifier_compression/parser.py +++ b/examples/classifier_compression/parser.py @@ -51,6 +51,7 @@ def get_parser(): parser.add_argument('--print-freq', '-p', default=10, type=int, metavar='N', help='print frequency (default: 10)') + parser.add_argument('--verbose', '-v', action='store_true', help='Emit debug log messages') load_checkpoint_group = parser.add_argument_group('Resuming arguments') load_checkpoint_group_exc = load_checkpoint_group.add_mutually_exclusive_group() @@ -81,18 +82,20 @@ def get_parser(): parser.add_argument('--masks-sparsity', dest='masks_sparsity', action='store_true', default=False, help='print masks sparsity table at end of each epoch') parser.add_argument('--param-hist', dest='log_params_histograms', action='store_true', default=False, - help='log the parameter tensors histograms to file (WARNING: this can use significant disk space)') + help='log the parameter tensors histograms to file ' + '(WARNING: this can use significant disk space)') parser.add_argument('--summary', type=lambda s: s.lower(), choices=SUMMARY_CHOICES, action='append', help='print a summary of the model, and exit - options: | '.join(SUMMARY_CHOICES)) parser.add_argument('--export-onnx', action='store', nargs='?', type=str, const='model.onnx', default=None, help='export model to ONNX format') parser.add_argument('--compress', dest='compress', type=str, nargs='?', action='store', help='configuration file for pruning the model (default is to use hard-coded schedule)') - parser.add_argument('--sense', dest='sensitivity', choices=['element', 'filter', 'channel'], type=lambda s: s.lower(), - help='test the sensitivity of layers to pruning') + parser.add_argument('--sense', dest='sensitivity', choices=['element', 'filter', 'channel'], + type=lambda s: s.lower(), help='test the sensitivity of layers to pruning') parser.add_argument('--sense-range', dest='sensitivity_range', type=float, nargs=3, default=[0.0, 0.95, 0.05], - help='an optional parameter for sensitivity testing providing the range of sparsities to test.\n' - 'This is equivalent to creating sensitivities = np.arange(start, stop, step)') + help='an optional parameter for sensitivity testing ' + 'providing the range of sparsities to test.\n' + 'This is equivalent to creating sensitivities = np.arange(start, stop, step)') parser.add_argument('--extras', default=None, type=str, help='file with extra configuration information') parser.add_argument('--deterministic', '--det', action='store_true', @@ -100,7 +103,8 @@ def get_parser(): parser.add_argument('--seed', type=int, default=None, help='seed the PRNG for CPU, CUDA, numpy, and Python') parser.add_argument('--gpus', metavar='DEV_ID', default=None, - help='Comma-separated list of GPU device IDs to be used (default is to use all available devices)') + help='Comma-separated list of GPU device IDs to be used ' + '(default is to use all available devices)') parser.add_argument('--cpu', action='store_true', default=False, help='Use CPU only. \n' 'Flag not set => uses GPUs according to the --gpus flag value.'