Skip to content

Commit

Permalink
Gna fix sca issues (openvinotoolkit#5992)
Browse files Browse the repository at this point in the history
* [GNA] Fix static code analysis issue 22401

* [GNA] Fix static code analysis issue 22404

* [GNA] Fix static code analysis issue 22297

* [GNA] Fix static code analysis issue 22525

* [GNA] Replace return with assert
  • Loading branch information
sirzabek authored and rnugmanx committed Aug 26, 2021
1 parent e8ad1e2 commit 1637a37
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
2 changes: 1 addition & 1 deletion inference-engine/src/gna_plugin/backend/dnn_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ struct DnnActivation {
return type;
}
static DnnActivation fromType(DnnActivationType type) {
DnnActivation activation;
DnnActivation activation{};
activation.type = type;
activation.args = {};
return activation;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class GNAFakeQuantizeLayer {
* @brief convert FQ layer directly to gna-pwl activation layer
*/
DnnActivation parseAsActivation() const {
DnnActivation fqActivation;
DnnActivation fqActivation{};

fqActivation.fqParams.levels = fqLayer->GetParamAsSizeT("levels");
auto inputShape = getShapeForRange(fqLayer, 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2091,6 +2091,7 @@ void MoveFakeQuantizeLayerIntoQuantParamsPass :: run() {
};

auto quantParams = InferenceEngine::getInjectedData<QuantizedLayerParams>(layer);
IE_ASSERT(quantParams != nullptr);

// Find all output layers connected to FQ
auto nextLayers = CNNNetGetAllNextLayersSkipCertain(layer.get(), -1, donotSkip);
Expand Down Expand Up @@ -2304,7 +2305,7 @@ void TransposeWeightsFromNCHWToNHWCPass::run() {
}
}
// Find a convolution in next layers to rotate weights columns
if (!l->outData.empty() && !getInputTo(l->outData[0]).empty() && !l->outData.empty() && !getInputTo(l->outData[0]).empty()) {
if (!l->outData.empty() && !getInputTo(l->outData[0]).empty()) {
std::vector<TranspositionInfo> transpositionInfo;
auto nextLayer = getInputTo(l->outData[0]).begin()->second;
transpositionInfo = FindTranspositionInfoFromNextLayers(nextLayer);
Expand Down Expand Up @@ -2345,7 +2346,7 @@ void TransposeWeightsFromNCHWToNHWCPass::run() {
}
// Find a convolution in previous or next layers
auto transpositionInfo = FindTranspositionInfoFromPrevLayers(firstInput);
if (!FoundPartToTranspose(transpositionInfo)) {
if (!FoundPartToTranspose(transpositionInfo) && !l->outData.empty() && !getInputTo(l->outData[0]).empty()) {
transpositionInfo = FindTranspositionInfoFromNextLayers(getInputTo(l->outData[0]).begin()->second);
}
if (FoundPartToTranspose(transpositionInfo)) {
Expand Down

0 comments on commit 1637a37

Please sign in to comment.