From 6567ecec9d95f6af0a4a2a64fc28d8d3e599cf80 Mon Sep 17 00:00:00 2001 From: Neta Zmora <neta.zmora@intel.com> Date: Sun, 3 Mar 2019 14:38:10 +0200 Subject: [PATCH] compress_classifier.py: fix PNG and ONNX exports broken in new release Release 0.3 broke the expots to PNG and ONNX and this is the fix. --- examples/classifier_compression/compress_classifier.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/examples/classifier_compression/compress_classifier.py b/examples/classifier_compression/compress_classifier.py index d0092e1..7d49d2e 100755 --- a/examples/classifier_compression/compress_classifier.py +++ b/examples/classifier_compression/compress_classifier.py @@ -69,6 +69,7 @@ import torch.utils.data import torchnet.meter as tnt import distiller import distiller.apputils as apputils +import distiller.model_summaries as model_summaries from distiller.data_loggers import * import distiller.quantization as quantization import examples.automated_deep_compression as adc @@ -615,9 +616,9 @@ def evaluate_model(model, criterion, test_loader, loggers, activations_collector def summarize_model(model, dataset, which_summary): if which_summary.startswith('png'): - apputils.draw_img_classifier_to_file(model, 'model.png', dataset, which_summary == 'png_w_params') + model_summaries.draw_img_classifier_to_file(model, 'model.png', dataset, which_summary == 'png_w_params') elif which_summary == 'onnx': - apputils.export_img_classifier_to_onnx(model, 'model.onnx', dataset) + model_summaries.export_img_classifier_to_onnx(model, 'model.onnx', dataset) else: distiller.model_summary(model, which_summary, dataset) -- GitLab