Skip to content
Snippets Groups Projects
Unverified Commit 0caca675 authored by Lev Zlotnik's avatar Lev Zlotnik Committed by GitHub
Browse files

Bug fix in PTQ yaml

parent d6e5c9eb
No related tags found
No related merge requests found
......@@ -67,16 +67,18 @@ quantizers:
conv1:
# The input to the first layer in the model will never have quantization metadata
input_overrides:
# Shorthand to take the quantization settings of the output (ignores any other settings)
from_outputs: True
0:
# Shorthand to take the quantization settings of the output (ignores any other settings)
from_output: True
fc:
# In ResNet, the FC layer has a view op before, which kills the quantization metadata. So we have to override
# (or enable auto_fallback).
input_overrides:
# Example of setting the actual value. Applicable only if 'from_outputs' isn't set.
# The following keys are supported: 'bits_activations', 'mode', 'clip_acts', 'clip_n_stds'
# Any key not explicitly set will default to the output setting
bits_activations: 6
0:
# Example of setting the actual value. Applicable only if 'from_outputs' isn't set.
# The following keys are supported: 'bits_activations', 'mode', 'clip_acts', 'clip_n_stds'
# Any key not explicitly set will default to the output setting
bits_activations: 6
# Overrides section for run 3
# overrides:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment