From 44144f7c7a61c2f6057053b63f7866b0d151924a Mon Sep 17 00:00:00 2001
From: Guy Jacob <guy.jacob@intel.com>
Date: Sun, 2 Dec 2018 15:13:26 +0200
Subject: [PATCH] A couple of clarifications and typo fixes from last commit

---
 examples/quantization/preact_resnet_cifar_base_fp32.yaml | 8 ++++----
 examples/quantization/preact_resnet_cifar_dorefa.yaml    | 8 ++++----
 2 files changed, 8 insertions(+), 8 deletions(-)

diff --git a/examples/quantization/preact_resnet_cifar_base_fp32.yaml b/examples/quantization/preact_resnet_cifar_base_fp32.yaml
index 3b5b41d..40e7636 100644
--- a/examples/quantization/preact_resnet_cifar_base_fp32.yaml
+++ b/examples/quantization/preact_resnet_cifar_base_fp32.yaml
@@ -5,8 +5,8 @@
 # python compress_classifier.py -a preact_resnet20_cifar --lr 0.1 -p 50 -b 128 <path_to_cifar10_dataset> -j 1 --epochs 200 --compress=../quantization/preact_resnet_cifar_base_fp32.yaml --wd=0.0002 --vs=0 --gpus 0
 #
 # Notes:
-#  * Replace '-a preact_resnet20_cifar' with the required depth
-#  * '--wd-0.0002': Weight decay of 0.0002 is used
+#  * In '-a preact_resnet20_cifar', replace '20' with the required depth
+#  * '--wd=0.0002': Weight decay of 0.0002 is used
 #  * '--vs=0': We train on the entire training dataset, and validate using the test set
 #
 # Knowledge Distillation:
@@ -38,8 +38,8 @@ policies:
       ending_epoch: 200
       frequency: 1
 
-# The results listed here are based on 4 runs in each configuration:
-
+# The results listed here are based on 4 runs in each configuration. All results are Top-1:
+#
 # +-------+--------------+-------------------------+
 # |       |              |           FP32          |
 # +-------+--------------+-------------------------+
diff --git a/examples/quantization/preact_resnet_cifar_dorefa.yaml b/examples/quantization/preact_resnet_cifar_dorefa.yaml
index 7ae1faa..cec1f54 100644
--- a/examples/quantization/preact_resnet_cifar_dorefa.yaml
+++ b/examples/quantization/preact_resnet_cifar_dorefa.yaml
@@ -9,8 +9,8 @@
 # python compress_classifier.py -a preact_resnet20_cifar --lr 0.1 -p 50 -b 128 <path_to_cifar10_dataset> -j 1 --epochs 200 --compress=../quantization/preact_resnet_cifar_dorefa.yaml --wd=0.0002 --vs=0 --gpus 0
 #
 # Notes:
-#  * Replace '-a preact_resnet20_cifar' with the required depth
-#  * '--wd-0.0002': Weight decay of 0.0002 is used
+#  * In '-a preact_resnet20_cifar', replace '20' with the required depth
+#  * '--wd=0.0002': Weight decay of 0.0002 is used
 #  * '--vs=0': We train on the entire training dataset, and validate using the test set
 #
 # Knowledge Distillation:
@@ -68,8 +68,8 @@ policies:
       ending_epoch: 161
       frequency: 1
 
-# The results listed here are based on 4 runs in each configuration:
-
+# The results listed here are based on 4 runs in each configuration. All results are Top-1:
+#
 # +-------+--------------+-------------------------+-------------------------+
 # |       |              |           FP32          |       DoReFa w3-a8      |
 # +-------+--------------+-------------------------+-------------------------+
-- 
GitLab