From 09d2eea38fd13f6c0af9b04429e645bf611b6cfc Mon Sep 17 00:00:00 2001
From: Neta Zmora <neta.zmora@intel.com>
Date: Sun, 5 May 2019 14:29:42 +0300
Subject: [PATCH] Allow loading checkpoints not containing an optimizer

Support loading a model from a checkpoint file that does not
have an Optimizer instance.
Before the change, loading such a model required using
```load_lean_checkpoint```  (or --exp-load-weights-from
From the compress_classifier.py command-line), therefore
this change is for convenience only.
---
 distiller/apputils/checkpoint.py | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/distiller/apputils/checkpoint.py b/distiller/apputils/checkpoint.py
index acfa3a3..8ba1558 100755
--- a/distiller/apputils/checkpoint.py
+++ b/distiller/apputils/checkpoint.py
@@ -185,10 +185,9 @@ def load_checkpoint(model, chkpt_file, optimizer=None, model_device=None, *, lea
         optimizer = _load_optimizer(checkpoint['optimizer_type'],
             checkpoint['optimizer_state_dict'], model)
     except KeyError:
-        if 'optimizer' not in checkpoint:
-            raise
-        # older checkpoints didn't support this feature
-        # they had the 'optimizer' field instead
+        # Older checkpoints do support optimizer loading: They either had an 'optimizer' field 
+        # (different name) which was not used during the load, or they didn't even checkpoint
+        # the optimizer. 
         optimizer = None
 
     if optimizer is not None:
-- 
GitLab