Skip to content

Commit

Permalink
[GNA] Fix KW issues (openvinotoolkit#3)
Browse files Browse the repository at this point in the history
  • Loading branch information
dorloff committed Jan 22, 2020
1 parent 1adce8c commit d44d651
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 2 deletions.
1 change: 1 addition & 0 deletions inference-engine/src/gna_plugin/gna_graph_compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1306,6 +1306,7 @@ void GNAGraphCompiler::PermutePrimitive(InferenceEngine::CNNLayerPtr layer) {
}

if (layerOrder == vector<int>({1, 0, 2})) {
IE_ASSERT(!layer->insData.empty());
auto inputs = layer->insData.begin()->lock();
auto inputs_size = inputs->getTensorDesc().getDims().size();
if (inputs_size != layerOrder.size()) {
Expand Down
1 change: 1 addition & 0 deletions inference-engine/src/gna_plugin/gna_plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1014,6 +1014,7 @@ void GNAPlugin::Infer(const InferenceEngine::Blob &input, InferenceEngine::Blob

IE_ASSERT(!inputsDataMap.empty());
bmInput[inputsDataMap.begin()->first] = std::shared_ptr<Blob>(const_cast<Blob*>(&input), [](Blob*){});
IE_ASSERT(!outputsDataMap.empty());
bmOutput[outputsDataMap.begin()->first] = std::shared_ptr<Blob>(&output, [](Blob*){});
Infer(bmInput, bmOutput);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ static void insertDiagonalLayerBetween(InferenceEngine::CNNLayerPtr prevLayer,
gnalog() << "Inserted Diagonal Layer " << diagName <<" between: " << prevLayer->name << " and " << nextLayer->name << "\n" << std::flush;

auto diagLayer = std::make_shared<ScaleShiftLayer>(LayerParams({diagName, "ScaleShift", Precision::FP32}));
IE_ASSERT(diagLayer != nullptr);

// TODO: diagonal size
auto dimsIndex = nextLayer->outData[0]->getTensorDesc().getDims().size() - 1;
Expand Down Expand Up @@ -330,11 +331,15 @@ void SubstitutePReluPass::run() {

auto inData_0 = sum->insData[0].lock();
IE_ASSERT(inData_0 != nullptr);
auto creatorLayer_0 = inData_0->getCreatorLayer().lock();
IE_ASSERT(creatorLayer_0 != nullptr);
auto inData_1 = sum->insData[1].lock();
IE_ASSERT(inData_1 != nullptr);
auto creatorLayer_1 = inData_1->getCreatorLayer().lock();
IE_ASSERT(creatorLayer_1 != nullptr);

auto s1 = inData_0->getCreatorLayer().lock().get();
auto s2 = inData_1->getCreatorLayer().lock().get();
auto s1 = creatorLayer_0.get();
auto s2 = creatorLayer_1.get();

if (s1 != static_cast<InferenceEngine::CNNLayer *>(first) &&
s2 != static_cast<InferenceEngine::CNNLayer *>(first)) {
Expand Down

0 comments on commit d44d651

Please sign in to comment.