From 52c4d0b0ec380df65ec13b5c77eda49175228e65 Mon Sep 17 00:00:00 2001 From: Lev Zlotnik <46742999+levzlotnik@users.noreply.github.com> Date: Tue, 16 Apr 2019 17:20:10 +0300 Subject: [PATCH] ActivationStatsCollectors: Handle non-contiguous tensors (#228) --- distiller/data_loggers/collector.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/distiller/data_loggers/collector.py b/distiller/data_loggers/collector.py index 1e3a7b9..9ceed23 100755 --- a/distiller/data_loggers/collector.py +++ b/distiller/data_loggers/collector.py @@ -233,6 +233,8 @@ class RecordsActivationStatsCollector(ActivationStatsCollector): return stats.detach().cpu().numpy() # We get a batch of activations, from which we collect statistics + if not output.is_contiguous(): + output = output.contiguous() act = output.view(output.size(0), -1) batch_min_list = to_np(torch.min(act, dim=1)).tolist() batch_max_list = to_np(torch.max(act, dim=1)).tolist() @@ -372,6 +374,8 @@ class QuantCalibrationStatsCollector(ActivationStatsCollector): return sqrt((M / (total_values_so_far + numel - 1)).item()) def update_record(record, tensor): + if not tensor.is_contiguous(): + tensor = tensor.contiguous() act = tensor.view(tensor.size(0), -1) min_per_sample = act.min(dim=1)[0] max_per_sample = act.max(dim=1)[0] -- GitLab