Skip to content

Commit

Permalink
[GNA] Fixed legacy tests
Browse files Browse the repository at this point in the history
  • Loading branch information
elilobanova committed Sep 7, 2021
1 parent 2dcb217 commit 007ff88
Showing 1 changed file with 3 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,8 @@ class ScaleFactorPerLayer<InferenceEngine::CNNLayer *> {

if ((!fakeQuantize && quantSibling->_dst_quant.IsScaleSet()) ||
(fakeQuantize && quantSibling->_dst_quant.IsScaleSet() && !fp32eq(quantSibling->_dst_quant.GetScale(), 1.0) &&
quantSibling->_dst_quant.GetScale() < inputQuant->_dst_quant.GetScale()) || infiniteLoopCount > 0) {
quantSibling->_dst_quant.GetScale() < inputQuant->_dst_quant.GetScale()) ||
quantSibling->_dst_quant.IsScaleSet() && infiniteLoopCount > 0) {
// means we already restarted propagation input memory layer
// need to search for requantiseable layer prior memory output layer
InferenceEngine::CNNLayerPtr restartedLayer;
Expand Down Expand Up @@ -658,7 +659,7 @@ class ScaleFactorPerLayer<InferenceEngine::EltwiseLayer*> {
auto quantParamsOpposite =
InferenceEngine::getInjectedData<QuantizedLayerParams>(InferenceEngine::CNNNetPrevLayer(eltwiseLayer, !inputIx));

while (in) {
while (in && !LayerInfo(in).isInput() && !LayerInfo(in).isMemory() && !LayerInfo(in).isCopy()) {
auto info = LayerInfo(in);
if (info.isActivation() || info.isConst()) {
auto quantDataForInputLayer = InferenceEngine::getInjectedData<QuantizedLayerParams>(*in);
Expand All @@ -681,10 +682,6 @@ class ScaleFactorPerLayer<InferenceEngine::EltwiseLayer*> {
return true;
}

if (info.has8BOr16BOutput()) {
return false;
}

if (fakeQuantize && info.isWeightableIdentity()) {
auto quantDataForInputLayer = InferenceEngine::getInjectedData<QuantizedLayerParams>(*in);
if (!fp32eq(quantDataForInputLayer->_weights_quant.GetScale(), 1.0f)) {
Expand Down

0 comments on commit 007ff88

Please sign in to comment.