From 148b7474c08de574e56edce7dd6d8efd53aa8d78 Mon Sep 17 00:00:00 2001
From: Neta Zmora <31280975+nzmora@users.noreply.github.com>
Date: Mon, 1 Apr 2019 15:51:16 +0300
Subject: [PATCH] Thinning: fix param_name_2_layer_name

This fix does not change the behavior.
The previous code worked correctly because 'weights' and '.weight' have the same length.
---
 distiller/thinning.py | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/distiller/thinning.py b/distiller/thinning.py
index c92f880..9475836 100755
--- a/distiller/thinning.py
+++ b/distiller/thinning.py
@@ -84,7 +84,14 @@ def get_normalized_recipe(recipe):
 
 
 def param_name_2_layer_name(param_name):
-    return param_name[:-len('weights')]
+    """Convert a weights tensor's name to the name of the layer using the tensor.
+    
+    By convention, PyTorch modules name their weights parameters as self.weight
+    (see for example: torch.nn.modules.conv) which means that their fully-qualified 
+    name when enumerating a model's parameters is the modules name followed by '.weight'.
+    We exploit this convention to convert a weights tensor name to the fully-qualified 
+    module name."""
+    return param_name[:-len('.weight')]
 
 
 def directives_equal(d1, d2):
-- 
GitLab