Skip to content
Snippets Groups Projects
Commit 2e0d147f authored by Guy Jacob's avatar Guy Jacob
Browse files

Quantizer: Re-move model to device at the end of prepare_model

parent 475b6a67
No related branches found
No related tags found
No related merge requests found
......@@ -220,6 +220,8 @@ class Quantizer(object):
summary_graph = distiller.SummaryGraph(self.model, dummy_input)
self.adjacency_map = summary_graph.adjacency_map(dedicated_modules_only=False)
model_device = distiller.model_device(self.model)
self._pre_prepare_model(dummy_input)
self._pre_process_container(self.model)
......@@ -247,6 +249,9 @@ class Quantizer(object):
self._post_prepare_model()
# Re-transfer model to the device it was on, in case the quantizer created new parameters/buffers
self.model.to(model_device)
msglogger.info('Quantized model:\n\n{0}\n'.format(self.model))
def _pre_prepare_model(self, dummy_input):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment