diff --git a/inference-engine/src/mkldnn_plugin/cpu_blocked_memory_desc.h b/inference-engine/src/mkldnn_plugin/cpu_blocked_memory_desc.h index d9430a7cf807b1..91e50d1d14b3fc 100644 --- a/inference-engine/src/mkldnn_plugin/cpu_blocked_memory_desc.h +++ b/inference-engine/src/mkldnn_plugin/cpu_blocked_memory_desc.h @@ -16,7 +16,7 @@ class BlockedMemoryDesc : public MemoryDesc { const std::vector& strides = {}); MemoryDescPtr clone() const override { - return make_unique(*this); + return MKLDNNPlugin::make_unique(*this); } bool isDefined() const override; diff --git a/inference-engine/src/mkldnn_plugin/cpu_memory_desc_utils.cpp b/inference-engine/src/mkldnn_plugin/cpu_memory_desc_utils.cpp index d1947d9c17977d..96a454dd9c373e 100644 --- a/inference-engine/src/mkldnn_plugin/cpu_memory_desc_utils.cpp +++ b/inference-engine/src/mkldnn_plugin/cpu_memory_desc_utils.cpp @@ -134,6 +134,21 @@ MKLDNNMemoryDesc MemoryDescUtils::convertToMKLDNNMemoryDesc(const MemoryDesc& de MKLDNNMemoryDesc MemoryDescUtils::convertToMKLDNNMemoryDesc(const BlockedMemoryDesc& desc) { dnnl_memory_desc_t mkldnnDesc; + + // scalar case + if (desc.getShape().getRank() == 0) { + mkldnn::memory::desc convertedDesc; + convertedDesc.data.format_kind = dnnl_blocked; + convertedDesc.data.data_type = memory::convert_to_c(MKLDNNMemory::convertToDataType(desc.getPrecision())); + convertedDesc.data.ndims = 1; + convertedDesc.data.dims[0] = 1; + convertedDesc.data.padded_dims[0] = 1; + convertedDesc.data.format_desc.blocking.strides[0] = 1; + convertedDesc.data.padded_offsets[0] = 0; + convertedDesc.data.offset0 = desc.getOffsetPadding(); + return MKLDNNMemoryDesc(convertedDesc); + } + auto dims = desc.getShape().getStaticDims(); auto ie_blkdDims = desc.getBlockDims(); @@ -332,11 +347,11 @@ BlockedMemoryDesc MemoryDescUtils::convertToBlockedDescriptor(const MemoryDesc & MemoryDescPtr MemoryDescUtils::applyUndefinedOffset(const MKLDNNMemoryDesc& desc) { if (desc.getFormatKind() != dnnl_format_kind_t::dnnl_blocked) - return make_unique(desc); + return MKLDNNPlugin::make_unique(desc); mkldnn::memory::desc retDesc = desc; retDesc.data.offset0 = Shape::UNDEFINED_DIM; - return make_unique(retDesc); + return MKLDNNPlugin::make_unique(retDesc); } MemoryDescPtr MemoryDescUtils::applyUndefinedOffset(const BlockedMemoryDesc &desc) { @@ -347,20 +362,20 @@ MemoryDescPtr MemoryDescUtils::applyUndefinedOffset(const BlockedMemoryDesc &des offsetPaddingToData.resize(desc.getBlockDims().size(), 0); size_t offsetPadding = Shape::UNDEFINED_DIM; - return make_unique(desc.getPrecision(), desc.getShape().getDims(), desc.getBlockDims(), + return MKLDNNPlugin::make_unique(desc.getPrecision(), desc.getShape().getDims(), desc.getBlockDims(), desc.getOrder(), offsetPadding, offsetPaddingToData, strides); } MemoryDescPtr MemoryDescUtils::resetOffset(const MemoryDesc* desc) { if (MemoryDescType::Blocked == desc->getType()) { auto blockedDesc = desc->as(); - return make_unique(blockedDesc->getPrecision(), blockedDesc->getShape().getDims(), + return MKLDNNPlugin::make_unique(blockedDesc->getPrecision(), blockedDesc->getShape().getDims(), blockedDesc->getBlockDims(), blockedDesc->getOrder()); } else if (MemoryDescType::Mkldnn == desc->getType()) { auto mkldnnDesc = desc->as(); mkldnn::memory::desc retDesc = *mkldnnDesc; retDesc.data.offset0 = 0; - return make_unique(retDesc); + return MKLDNNPlugin::make_unique(retDesc); } return desc->clone(); } diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_graph_optimizer.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_graph_optimizer.cpp index d51b7df1901409..e8e834edc19def 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_graph_optimizer.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_graph_optimizer.cpp @@ -1689,7 +1689,7 @@ void MKLDNNGraphOptimizer::MergeTransposeAndReorder(MKLDNNGraph &graph) { } auto& transposeOrder = transposeNode->getOrder(); - auto &layoutOrder = MemoryDescUtils::convertToBlockedDescriptor( + auto layoutOrder = MemoryDescUtils::convertToBlockedDescriptor( *transposeNode->getSelectedPrimitiveDescriptor()->getConfig().outConfs[0].desc).getOrder(); auto inBlockedDesc = MemoryDescUtils::convertToBlockedDescriptor(*reorderNode->getSelectedPrimitiveDescriptor()->getConfig().inConfs[0].desc); diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_memory.h b/inference-engine/src/mkldnn_plugin/mkldnn_memory.h index ef9aae9212e6fe..517318704a4d7b 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_memory.h +++ b/inference-engine/src/mkldnn_plugin/mkldnn_memory.h @@ -87,7 +87,7 @@ class MKLDNNMemoryDesc : public MemoryDesc { } std::unique_ptr clone() const override { - return make_unique(*this); + return MKLDNNPlugin::make_unique(*this); } bool checkGeneralLayout(GeneralLayout layoutType) const override; diff --git a/inference-engine/src/mkldnn_plugin/mkldnn_node.cpp b/inference-engine/src/mkldnn_plugin/mkldnn_node.cpp index 6498c3291d4ed3..41e0efcc5d978c 100644 --- a/inference-engine/src/mkldnn_plugin/mkldnn_node.cpp +++ b/inference-engine/src/mkldnn_plugin/mkldnn_node.cpp @@ -1041,7 +1041,7 @@ bool MKLDNNNode::isConfigDefined(const NodeConfig &config) const { } std::unique_ptr MKLDNNNode::getSrcMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { - return make_unique(primitive_desc_it.src_desc(idx)); + return MKLDNNPlugin::make_unique(primitive_desc_it.src_desc(idx)); // TODO [DS]: uncomment or remove // InferenceEngine::TensorDesc desc = MKLDNNMemoryDesc(primitive_desc_it.src_desc(idx)); // if (desc.getLayout() == InferenceEngine::Layout::ANY) @@ -1055,7 +1055,7 @@ std::unique_ptr MKLDNNNode::getSrcMemDesc(mkldnn::primitive_de } std::unique_ptr MKLDNNNode::getDstMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { - return make_unique(primitive_desc_it.dst_desc(idx)); + return MKLDNNPlugin::make_unique(primitive_desc_it.dst_desc(idx)); // TODO [DS]: uncomment or remove // InferenceEngine::TensorDesc desc = MKLDNNMemoryDesc(primitive_desc_it.dst_desc(idx)); // if (desc.getLayout() == InferenceEngine::Layout::ANY) diff --git a/inference-engine/src/mkldnn_plugin/nodes/common/blocked_desc_creator.h b/inference-engine/src/mkldnn_plugin/nodes/common/blocked_desc_creator.h index c8403bd38c7304..6358942f90c96d 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/common/blocked_desc_creator.h +++ b/inference-engine/src/mkldnn_plugin/nodes/common/blocked_desc_creator.h @@ -30,7 +30,7 @@ class BlockedDescCreator { makeFilteredRange(const CreatorsMap& map, Predicate predicate); virtual BlockedMemoryDesc createDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const = 0; std::unique_ptr createUniqueDesc(const InferenceEngine::Precision& precision, const InferenceEngine::SizeVector& srcDims) const { - return make_unique(createDesc(precision, srcDims)); + return MKLDNNPlugin::make_unique(createDesc(precision, srcDims)); } virtual size_t getMinimalRank() const = 0; virtual ~BlockedDescCreator() = default; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_bin_conv_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_bin_conv_node.cpp index bd565936e2e8b7..6ad9ac766fd9d9 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_bin_conv_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_bin_conv_node.cpp @@ -988,7 +988,7 @@ void MKLDNNBinaryConvolutionNode::initSupportedPrimitiveDescriptors() { weiDims[2], weiDims[3], weiFirstDimBlockSize, 32}; std::vector weiOrder = {0, 1, 2, 3, 0, 1}; - config.inConfs[1].desc = make_unique(Precision::BIN, weiDims, weiBlockDims, weiOrder); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(Precision::BIN, weiDims, weiBlockDims, weiOrder); //result auto outputPrecision = withBinarization ? Precision::BIN : Precision::FP32; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp index 479212bb50d8d1..6f80f9b347230c 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_concat_node.cpp @@ -178,14 +178,14 @@ void MKLDNNConcatNode::initSupportedPrimitiveDescriptors() { } } - config.outConfs[0].desc = make_unique(outputPrecision, dstDims, blkDims, order, offset, offsets, strides); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(outputPrecision, dstDims, blkDims, order, offset, offsets, strides); for (size_t i = 0; i < getParentEdges().size(); i++) { const auto& srcBlkDims = refConfig.inConfs[i].desc->as()->getBlockDims(); const auto& dims = refConfig.inConfs[i].desc->getShape().getStaticDims(); config.inConfs[i].inPlace = 0; - config.inConfs[i].desc = make_unique(inputPrecision, dims, srcBlkDims, order, offset, offsets, strides); + config.inConfs[i].desc = MKLDNNPlugin::make_unique(inputPrecision, dims, srcBlkDims, order, offset, offsets, strides); } supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::unknown); } @@ -432,7 +432,7 @@ void MKLDNNConcatNode::initOptimalPrimitiveDescriptor() { size_t offset = 0; for (size_t i = 0; i < config.inConfs.size(); i++) { auto inpBlockingDesc = MemoryDescUtils::convertToBlockedDescriptor(*config.inConfs[i].desc); - config.inConfs[i].desc = make_unique(inpBlockingDesc.getPrecision(), + config.inConfs[i].desc = MKLDNNPlugin::make_unique(inpBlockingDesc.getPrecision(), inpBlockingDesc.getShape().getStaticDims(), inpBlockingDesc.getBlockDims(), inpBlockingDesc.getOrder(), diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_conv_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_conv_node.cpp index ba5ef1310f2de0..9c53056982503b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_conv_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_conv_node.cpp @@ -243,9 +243,9 @@ void MKLDNNConvolutionNode::getSupportedDescriptors() { outputDataType = memory::data_type::f32; if (eltwisePrecision == Precision::BF16) eltwisePrecision = Precision::FP32; - in_candidate = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), + in_candidate = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, ndims == 5 ? memory::format_tag::ndhwc : memory::format_tag::nhwc); - out_candidate = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), + out_candidate = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, ndims == 5 ? memory::format_tag::ndhwc : memory::format_tag::nhwc); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } else { @@ -288,31 +288,31 @@ void MKLDNNConvolutionNode::getSupportedDescriptors() { auto outputDims = getChildEdgeAt(0)->getShape().getStaticMklDims(); if (IC == 1 && groupOC == 1) { - in_candidate = make_unique(inputDims, inputDataType, ncsp); - out_candidate = make_unique(outputDims, outputDataType, ncsp); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, ncsp); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, ncsp); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } else if (IC < 4) { - in_candidate = make_unique(inputDims, inputDataType, ncsp); - out_candidate = make_unique(outputDims, outputDataType, nCsp16c); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, ncsp); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, nCsp16c); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); - out_candidate = make_unique(outputDims, outputDataType, nCsp8c); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, nCsp8c); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } else { - in_candidate = make_unique(inputDims, inputDataType, nCsp16c); - out_candidate = make_unique(outputDims, outputDataType, nCsp16c); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, nCsp16c); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, nCsp16c); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); - in_candidate = make_unique(inputDims, inputDataType, nCsp8c); - out_candidate = make_unique(outputDims, outputDataType, nCsp8c); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, nCsp8c); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, nCsp8c); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } - in_candidate = make_unique(inputDims, inputDataType, ncsp); - out_candidate = make_unique(outputDims, outputDataType, ncsp); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, ncsp); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, ncsp); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); if (inputDataType != memory::data_type::bf16 && isNspcAvailable()) { - in_candidate = make_unique(inputDims, inputDataType, nspc); - out_candidate = make_unique(outputDims, outputDataType, nspc); + in_candidate = MKLDNNPlugin::make_unique(inputDims, inputDataType, nspc); + out_candidate = MKLDNNPlugin::make_unique(outputDims, outputDataType, nspc); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } } @@ -412,10 +412,10 @@ void MKLDNNConvolutionNode::initSupportedPrimitiveDescriptors() { PortConfig dataConfig; dataConfig.inPlace = -1; dataConfig.constant = false; - dataConfig.desc = make_unique(dwWeightsDims, weightsPrc, memory::format_tag::Goihw8g); + dataConfig.desc = MKLDNNPlugin::make_unique(dwWeightsDims, weightsPrc, memory::format_tag::Goihw8g); config.inConfs.push_back(dataConfig); - dataConfig.desc = make_unique(dwBiasesDims, biasPrc, memory::format_tag::x); + dataConfig.desc = MKLDNNPlugin::make_unique(dwBiasesDims, biasPrc, memory::format_tag::x); config.inConfs.push_back(dataConfig); } @@ -597,10 +597,10 @@ void MKLDNNConvolutionNode::initDescriptor(const NodeConfig& config) { PortConfig dataConfig; dataConfig.inPlace = -1; dataConfig.constant = false; - dataConfig.desc = make_unique(dwWeightsDims, weightsPrc, memory::format_tag::Goihw8g); + dataConfig.desc = MKLDNNPlugin::make_unique(dwWeightsDims, weightsPrc, memory::format_tag::Goihw8g); cfg.inConfs.push_back(dataConfig); - dataConfig.desc = make_unique(dwBiasesDims, biasPrc, memory::format_tag::x); + dataConfig.desc = MKLDNNPlugin::make_unique(dwBiasesDims, biasPrc, memory::format_tag::x); cfg.inConfs.push_back(dataConfig); } @@ -717,9 +717,9 @@ std::unique_ptr MKLDNNConvolutionNode::getSrcMemDesc(mkldnn::p new_dims.push_back(old_dims[i]); } - return make_unique(MKLDNNDims(new_dims), desc.getDataType(), desc.getFormat()); + return MKLDNNPlugin::make_unique(MKLDNNDims(new_dims), desc.getDataType(), desc.getFormat()); } else { - return make_unique(std::move(desc)); + return MKLDNNPlugin::make_unique(std::move(desc)); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_convert_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_convert_node.cpp index 71cf39d85b4dfa..da7e07cdcbd6f2 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_convert_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_convert_node.cpp @@ -92,8 +92,8 @@ void MKLDNNConvertNode::initSupportedPrimitiveDescriptors() { auto range = BlockedDescCreator::makeFilteredRange(creators, insShape.getRank()); for (auto itr = range.first; itr != range.second; ++itr) { - config.inConfs[0].desc = make_unique(itr->second->createDesc(insPrecision, insShape.getDims())); - config.outConfs[0].desc = make_unique(itr->second->createDesc(outPrecision, outputShape.getDims())); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(itr->second->createDesc(insPrecision, insShape.getDims())); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(itr->second->createDesc(outPrecision, outputShape.getDims())); supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::unknown); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_deconv_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_deconv_node.cpp index ac63e29ccf84ca..0c7bb777302b95 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_deconv_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_deconv_node.cpp @@ -403,7 +403,7 @@ void MKLDNNDeconvolutionNode::createDescriptor(const std::vector MKLDNNDeconvolutionNode::getSrcMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { if (idx == 2) { auto dataType = MKLDNNExtensionUtils::IEPrecisionToDataType(getOriginalInputPrecisionAtPort(2)); - return make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), dataType, + return MKLDNNPlugin::make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), dataType, MKLDNNMemory::GetPlainFormatByRank(getParentEdgeAt(2)->getShape().getRank())); } @@ -411,16 +411,16 @@ std::unique_ptr MKLDNNDeconvolutionNode::getSrcMemDesc(mkldnn: : isInt8 ? MKLDNNMemoryDesc(primitive_desc_it.src_desc(idx)) : MKLDNNMemoryDesc(primitive_desc_it.diff_dst_desc(idx)); if (getParentEdgeAt(idx)->getShape().getRank() != desc.getShape().getRank()) { - return make_unique(MKLDNNDims(weightDims), desc.getDataType(), desc.getFormat()); + return MKLDNNPlugin::make_unique(MKLDNNDims(weightDims), desc.getDataType(), desc.getFormat()); } else { - return make_unique(std::move(desc)); + return MKLDNNPlugin::make_unique(std::move(desc)); } } std::unique_ptr MKLDNNDeconvolutionNode::getDstMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { MKLDNNMemoryDesc desc = isInt8 ? MKLDNNMemoryDesc(primitive_desc_it.dst_desc(idx)) : MKLDNNMemoryDesc(primitive_desc_it.diff_src_desc(idx)); - return make_unique(getChildEdgeAt(idx)->getShape().getStaticMklDims(), desc.getDataType(), desc.getFormat()); + return MKLDNNPlugin::make_unique(getChildEdgeAt(idx)->getShape().getStaticMklDims(), desc.getDataType(), desc.getFormat()); } InferenceEngine::Precision MKLDNNDeconvolutionNode::getRuntimePrecision() const { diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_def_conv_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_def_conv_node.cpp index c0f3a2a484f26f..0ae9b92a9fa7a7 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_def_conv_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_def_conv_node.cpp @@ -838,20 +838,24 @@ void MKLDNNDeformableConvolutionNode::initSupportedPrimitiveDescriptors() { auto weiFormat = group > 1 ? mayiuse(avx512_common) ? memory::format_tag::gOIhw16i16o : memory::format_tag::gOIhw8i8o : mayiuse(avx512_common) ? memory::format_tag::OIhw16i16o : memory::format_tag::OIhw8i8o; - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, dataFormat); - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::f32, offFormat); - config.inConfs[2].desc = make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), memory::data_type::f32, weiFormat); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, dataFormat); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), + memory::data_type::f32, dataFormat); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), + memory::data_type::f32, offFormat); + config.inConfs[2].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), + memory::data_type::f32, weiFormat); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), + memory::data_type::f32, dataFormat); supportedPrimitiveDescriptors.push_back({config, impl_type}); } else { // reference implementation - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, memory::format_tag::nchw); - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::f32, + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::f32, memory::format_tag::nchw); - config.inConfs[2].desc = make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), memory::data_type::f32, + config.inConfs[2].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), memory::data_type::f32, memory::format_tag::oihw); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), memory::data_type::f32, memory::format_tag::nchw); supportedPrimitiveDescriptors.push_back({config, impl_type}); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_eltwise_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_eltwise_node.cpp index a03cbe2d0ba2ae..17385dc21ff719 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_eltwise_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_eltwise_node.cpp @@ -1097,7 +1097,7 @@ void MKLDNNEltwiseNode::initSupportedPrimitiveDescriptors() { blocks[i] = dims[order[i]]; } - return make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); + return MKLDNNPlugin::make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); } else if (lt == Blocked && edge->getShape().getRank() != 1 && edge->getShape().getStaticDims()[1] != 1) { size_t blockSize = mayiuse(x64::avx512_common) ? 16 : 8; @@ -1109,13 +1109,13 @@ void MKLDNNEltwiseNode::initSupportedPrimitiveDescriptors() { blocks.push_back(blockSize); order.push_back(1); - return make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); + return MKLDNNPlugin::make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); } else { std::vector blocks = edge->getShape().getStaticDims(); std::vector order(blocks.size()); std::iota(order.begin(), order.end(), 0); - return make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); + return MKLDNNPlugin::make_unique(prc, edge->getShape().getStaticDims(), blocks, order, offset); } }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fake_quantize_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fake_quantize_node.cpp index 79a9dee8d0f9f6..3f9a2418a6379b 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fake_quantize_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fake_quantize_node.cpp @@ -1462,8 +1462,9 @@ void MKLDNNFakeQuantizeNode::executeQuantization() { auto& srcDesc = srcMemory->GetDesc(); auto srcDims = srcDesc.getShape().getStaticDims(); - bool is_blk_format = !srcDesc.checkGeneralLayout(GeneralLayout::nspc); - int blk_size = (srcDesc.checkGeneralLayout(GeneralLayout::ncsp)) ? 1 : mayiuse(cpu::x64::avx512_common) ? 16 : 8; + bool is_blk_format = !srcDesc.checkGeneralLayout(GeneralLayout::nspc) && one_of(srcDesc.getShape().getRank(), 4, 5); + int blk_size = (srcDesc.checkGeneralLayout(GeneralLayout::ncsp) && one_of(srcDesc.getShape().getRank(), 3, 4, 5)) + ? 1 : mayiuse(cpu::x64::avx512_common) ? 16 : 8; auto src_type_size = jqp.src_prc.size(); auto dst_type_size = jqp.dst_prc.size(); diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fullyconnected_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fullyconnected_node.cpp index 977e1ac4a22031..4d9f1e406e4305 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fullyconnected_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_fullyconnected_node.cpp @@ -294,7 +294,7 @@ std::unique_ptr MKLDNNFullyConnectedNode::getSrcMemDesc(mkldnn desc = MKLDNNMemoryDesc(getParentEdgeAt(idx)->getShape().getStaticMklDims(), MKLDNNExtensionUtils::IEPrecisionToDataType(desc.getPrecision()), MKLDNNMemory::GetPlainFormatByRank(getParentEdgeAt(idx)->getShape().getRank())); } - return make_unique(std::move(desc)); + return MKLDNNPlugin::make_unique(std::move(desc)); } std::unique_ptr MKLDNNFullyConnectedNode::getDstMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { @@ -303,7 +303,7 @@ std::unique_ptr MKLDNNFullyConnectedNode::getDstMemDesc(mkldnn desc = MKLDNNMemoryDesc(getChildEdgeAt(idx)->getShape().getStaticMklDims(), MKLDNNExtensionUtils::IEPrecisionToDataType(desc.getPrecision()), MKLDNNMemory::GetPlainFormatByRank(getChildEdgeAt(idx)->getShape().getRank())); } - return make_unique(std::move(desc)); + return MKLDNNPlugin::make_unique(std::move(desc)); } InferenceEngine::Precision MKLDNNFullyConnectedNode::getRuntimePrecision() const { diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_interpolate_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_interpolate_node.cpp index 2202993002dba9..6690b0347b75e4 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_interpolate_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_interpolate_node.cpp @@ -1916,15 +1916,16 @@ void MKLDNNInterpolateNode::initSupportedPrimitiveDescriptors() { auto axesType = MKLDNNExtensionUtils::IEPrecisionToDataType(Precision::I32); auto pushDesc = [&](memory::format_tag dataFormat, impl_desc_type implDetail) { - config.inConfs[DATA_ID].desc = make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), inputDataType, dataFormat); - config.inConfs[TARGET_SHAPE_ID].desc = make_unique(getParentEdgeAt(TARGET_SHAPE_ID)->getShape().getStaticMklDims(), + config.inConfs[DATA_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), + inputDataType, dataFormat); + config.inConfs[TARGET_SHAPE_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(TARGET_SHAPE_ID)->getShape().getStaticMklDims(), targetShapeType, memory::format_tag::x); - config.inConfs[SCALES_ID].desc = make_unique(getParentEdgeAt(SCALES_ID)->getShape().getStaticMklDims(), scalesType, + config.inConfs[SCALES_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(SCALES_ID)->getShape().getStaticMklDims(), scalesType, memory::format_tag::x); if (isAxesSpecified) - config.inConfs[AXES_ID].desc = make_unique(getParentEdgeAt(AXES_ID)->getShape().getStaticMklDims(), axesType, + config.inConfs[AXES_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(AXES_ID)->getShape().getStaticMklDims(), axesType, memory::format_tag::x); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, dataFormat); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, dataFormat); supportedPrimitiveDescriptors.push_back({config, implDetail}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_lrn_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_lrn_node.cpp index c46d195c0e9c0a..93988f8b2d5611 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_lrn_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_lrn_node.cpp @@ -93,14 +93,14 @@ void MKLDNNLrnNode::getSupportedDescriptors() { const auto parentStaticDims = parentShape.getStaticMklDims(); for (auto format : getAvailableFormatsForDims(parentShape)) { - auto in_candidate = make_unique(parentStaticDims, inputDataType, format); + auto in_candidate = MKLDNNPlugin::make_unique(parentStaticDims, inputDataType, format); createDescriptor({in_candidate.get()}, {}); } } std::unique_ptr MKLDNNLrnNode::getSrcMemDesc(mkldnn::primitive_desc_iterator &primitive_desc_it, size_t idx) { if (idx > 0) { - return make_unique(getParentEdgeAt(idx)->getShape().getStaticMklDims(), + return MKLDNNPlugin::make_unique(getParentEdgeAt(idx)->getShape().getStaticMklDims(), MKLDNNExtensionUtils::IEPrecisionToDataType(getOriginalInputPrecisions()[idx]), MKLDNNMemory::GetPlainFormatByRank(getParentEdgeAt(idx)->getShape().getRank())); } else { diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_matmul_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_matmul_node.cpp index 986708bf5e64f7..7cda16e44747cf 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_matmul_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_matmul_node.cpp @@ -142,7 +142,7 @@ void MKLDNNMatMulNode::initSupportedPrimitiveDescriptors() { PortConfig dataConfig; dataConfig.inPlace = -1; dataConfig.constant = false; - dataConfig.desc = make_unique(dims, dataType, MKLDNNMemory::GetPlainFormatByRank(dims.size())); + dataConfig.desc = MKLDNNPlugin::make_unique(dims, dataType, MKLDNNMemory::GetPlainFormatByRank(dims.size())); return dataConfig; }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_memory_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_memory_node.cpp index e80ba86d935559..5e6728628a45e2 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_memory_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_memory_node.cpp @@ -65,7 +65,7 @@ void MKLDNNMemoryOutputNode::initSupportedPrimitiveDescriptors() { config.inConfs.resize(1); config.inConfs[0].inPlace = -1; config.inConfs[0].constant = false; - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, MKLDNNMemory::GetPlainFormatByRank(getParentEdgeAt(0)->getShape().getRank())); supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::unknown); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_mvn_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_mvn_node.cpp index c256ffe802d601..87af89ef67a673 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_mvn_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_mvn_node.cpp @@ -742,14 +742,14 @@ void MKLDNNMVNNode::initSupportedPrimitiveDescriptors() { config.inConfs[0].inPlace = -1; config.outConfs[0].inPlace = canBeInplace ? 0 : -1; if (inputsNum == 2) { - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::s32, + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::s32, MKLDNNMemory::GetPlainFormatByRank(getParentEdgeAt(1)->getShape().getRank())); config.inConfs[1].constant = true; } auto pushDesc = [&](memory::format_tag format, impl_desc_type impl_type) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, format); - config.outConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, format); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, format); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, format); supportedPrimitiveDescriptors.push_back({config, impl_type}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_normalize_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_normalize_node.cpp index c5f965556efe0f..315f211d86dbe5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_normalize_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_normalize_node.cpp @@ -764,10 +764,10 @@ void MKLDNNNormalizeL2Node::initSupportedPrimitiveDescriptors() { config.outConfs[0].inPlace = canBeInplace ? 0 : -1; auto pushDesc = [&](memory::format_tag format) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(DATA)->getShape().getStaticMklDims(), inputDataType, format); - config.inConfs[1].desc = make_unique(getParentEdgeAt(AXES)->getShape().getStaticMklDims(), memory::data_type::s32, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(DATA)->getShape().getStaticMklDims(), inputDataType, format); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(AXES)->getShape().getStaticMklDims(), memory::data_type::s32, memory::format_tag::x); - config.outConfs[0].desc = make_unique(getParentEdgeAt(DATA)->getShape().getStaticMklDims(), outputDataType, format); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(DATA)->getShape().getStaticMklDims(), outputDataType, format); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pad_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pad_node.cpp index d8a9134f03df6c..7f02b156dccd68 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pad_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pad_node.cpp @@ -131,15 +131,16 @@ void MKLDNNPadNode::initSupportedPrimitiveDescriptors() { config.outConfs.resize(1); auto pushSupportedPrimitiveDescriptor = [&](memory::format_tag memoryFormat) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, memoryFormat); - config.inConfs[1].desc = make_unique(getParentEdgeAt(PADS_BEGIN_ID)->getShape().getStaticMklDims(), memory::data_type::s32, - memory::format_tag::x); - config.inConfs[2].desc = make_unique(getParentEdgeAt(PADS_END_ID)->getShape().getStaticMklDims(), memory::data_type::s32, - memory::format_tag::x); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, + memoryFormat); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(PADS_BEGIN_ID)->getShape().getStaticMklDims(), + memory::data_type::s32, memory::format_tag::x); + config.inConfs[2].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(PADS_END_ID)->getShape().getStaticMklDims(), + memory::data_type::s32, memory::format_tag::x); if (isPadValueSpecified) - config.inConfs[3].desc = make_unique(getParentEdgeAt(PAD_VALUE_ID)->getShape().getStaticMklDims(), memory::data_type::f32, - memory::format_tag::x); - config.outConfs[0].desc = make_unique(getChildEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, memoryFormat); + config.inConfs[3].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(PAD_VALUE_ID)->getShape().getStaticMklDims(), + memory::data_type::f32, memory::format_tag::x); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, memoryFormat); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::ref}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pooling_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pooling_node.cpp index 228d108dfcdfcc..d137379114f877 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pooling_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_pooling_node.cpp @@ -133,16 +133,16 @@ void MKLDNNPoolingNode::getSupportedDescriptors() { if (outputDataType == memory::data_type::bf16) outputDataType = memory::data_type::f32; // i8 layers supports only ndhwc and nhwc layouts - const auto in_candidate = make_unique(parentDims, inputDataType, inputRank == 5 ? + const auto in_candidate = MKLDNNPlugin::make_unique(parentDims, inputDataType, inputRank == 5 ? memory::format_tag::ndhwc : memory::format_tag::nhwc); - const auto out_candidate = make_unique(childDims, outputDataType, inputRank == 5 ? + const auto out_candidate = MKLDNNPlugin::make_unique(childDims, outputDataType, inputRank == 5 ? memory::format_tag::ndhwc : memory::format_tag::nhwc); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } else if ((inputRank == 4 || inputRank == 5) && parentDims[1] == 1) { // WA. We should force planar layout since it provides better performance - const auto in_candidate = make_unique(parentDims, inputDataType, inputRank == 5 ? + const auto in_candidate = MKLDNNPlugin::make_unique(parentDims, inputDataType, inputRank == 5 ? memory::format_tag::ncdhw : memory::format_tag::nchw); - const auto out_candidate = make_unique(childDims, outputDataType, inputRank == 5 ? + const auto out_candidate = MKLDNNPlugin::make_unique(childDims, outputDataType, inputRank == 5 ? memory::format_tag::ncdhw : memory::format_tag::nchw); createDescriptor({ in_candidate.get() }, { out_candidate.get() }); } else { @@ -152,8 +152,8 @@ void MKLDNNPoolingNode::getSupportedDescriptors() { } // It doesn't support any format for (auto format : getAvailableFormatsForDims(getParentEdgeAt(0)->getShape())) { - const auto in_candidate = make_unique(parentDims, inputDataType, format); - const auto out_candidate = make_unique(childDims, outputDataType, format); + const auto in_candidate = MKLDNNPlugin::make_unique(parentDims, inputDataType, format); + const auto out_candidate = MKLDNNPlugin::make_unique(childDims, outputDataType, format); createDescriptor({in_candidate.get()}, {out_candidate.get()}); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reduce_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reduce_node.cpp index 4c459bd185637c..83362ed08a6ab3 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reduce_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reduce_node.cpp @@ -1474,10 +1474,11 @@ void MKLDNNReduceNode::initSupportedPrimitiveDescriptors() { auto pushDesc = [&](memory::format_tag inFormat, memory::format_tag outFormat, memory::data_type inDataType, memory::data_type outDataType, impl_desc_type impl_type) { - config.inConfs[REDUCE_DATA].desc = make_unique(getParentEdgeAt(REDUCE_DATA)->getShape().getStaticMklDims(), inDataType, inFormat); - config.inConfs[REDUCE_INDEXES].desc = make_unique(getParentEdgeAt(REDUCE_INDEXES)->getShape().getStaticMklDims(), + config.inConfs[REDUCE_DATA].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(REDUCE_DATA)->getShape().getStaticMklDims(), + inDataType, inFormat); + config.inConfs[REDUCE_INDEXES].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(REDUCE_INDEXES)->getShape().getStaticMklDims(), memory::data_type::s32, memory::format_tag::x); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outDataType, outFormat); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outDataType, outFormat); supportedPrimitiveDescriptors.push_back({config, impl_type}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reshape_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reshape_node.cpp index 8a934467ab0df8..d54dbf96c9b120 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reshape_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_reshape_node.cpp @@ -41,12 +41,12 @@ void MKLDNNReshapeNode::initSupportedPrimitiveDescriptors() { for (size_t i = 0; i (getParentEdgeAt(i)->getShape().getStaticMklDims(), inputDataType); + config.inConfs[i].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(i)->getShape().getStaticMklDims(), inputDataType); } config.outConfs.resize(1); config.outConfs[0].inPlace = 0; config.outConfs[0].constant = false; - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType); supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::unknown); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_rnn.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_rnn.cpp index 3e56171135deb2..586aa007c7fe7f 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_rnn.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_rnn.cpp @@ -299,11 +299,11 @@ void MKLDNNRNN::fillCellDesc() { out_data_d.emplace_back(S_4D_shape, memory::data_type::f32, memory::format_tag::ldnc); } - w_data_d = make_unique(MKLDNNDims{L, D, DC, G, SC}, dataType, memory::format_tag::ldigo); - w_state_d = make_unique(MKLDNNDims{L, D, SC, G, SC}, dataType, memory::format_tag::ldigo); + w_data_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, DC, G, SC}, dataType, memory::format_tag::ldigo); + w_state_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, SC, G, SC}, dataType, memory::format_tag::ldigo); // Add 5th input - w_bias_d = make_unique(MKLDNNDims{L, D, Gb, SC}, memory::data_type::f32, memory::format_tag::ldgo); + w_bias_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, Gb, SC}, memory::data_type::f32, memory::format_tag::ldgo); copyWeightsData(); @@ -400,10 +400,10 @@ void MKLDNNRNN::fillSeqDesc() { out_data_d.emplace_back(MKLDNNDims{S_4D_shape}, memory::data_type::f32, memory::format_tag::ldnc); } - w_data_d = make_unique(MKLDNNDims{L, D, DC, G, SC}, dataType, memory::format_tag::ldigo); - w_state_d = make_unique(MKLDNNDims{L, D, SC, G, SC}, dataType, memory::format_tag::ldigo); + w_data_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, DC, G, SC}, dataType, memory::format_tag::ldigo); + w_state_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, SC, G, SC}, dataType, memory::format_tag::ldigo); - w_bias_d = make_unique(MKLDNNDims{L, D, Gb, SC}, memory::data_type::f32, memory::format_tag::ldgo); + w_bias_d = MKLDNNPlugin::make_unique(MKLDNNDims{L, D, Gb, SC}, memory::data_type::f32, memory::format_tag::ldgo); copyWeightsData(); diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_align_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_align_node.cpp index 4fbe6894805db0..620ae5d000368a 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_align_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_align_node.cpp @@ -129,12 +129,12 @@ void MKLDNNROIAlignNode::initSupportedPrimitiveDescriptors() { }; for (auto fmts : supportedFormats) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, fmts.first); - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::f32, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, fmts.first); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), memory::data_type::f32, memory::format_tag::nc); - config.inConfs[2].desc = make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), memory::data_type::s32, + config.inConfs[2].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(2)->getShape().getStaticMklDims(), memory::data_type::s32, memory::format_tag::x); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, fmts.second); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, fmts.second); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_pooling_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_pooling_node.cpp index 5e4e79b03020ea..9e35897a004a66 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_pooling_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roi_pooling_node.cpp @@ -413,9 +413,9 @@ void MKLDNNROIPoolingNode::initSupportedPrimitiveDescriptors() { impl_type = impl_desc_type::ref; } - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), dataType, format); - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), dataType, memory::format_tag::nc); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), dataType, format); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), dataType, format); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), dataType, memory::format_tag::nc); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), dataType, format); supportedPrimitiveDescriptors.push_back({config, impl_type}); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roll_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roll_node.cpp index 8cfc1d3de2a9b6..ba614a39d08474 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roll_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_roll_node.cpp @@ -101,7 +101,7 @@ void MKLDNNRollNode::initSupportedPrimitiveDescriptors() { PortConfig dataConfig; dataConfig.inPlace = -1; dataConfig.constant = false; - dataConfig.desc = make_unique(dims.getStaticMklDims(), dataType, MKLDNNMemory::GetPlainFormatByRank(dims.getRank())); + dataConfig.desc = MKLDNNPlugin::make_unique(dims.getStaticMklDims(), dataType, MKLDNNMemory::GetPlainFormatByRank(dims.getRank())); return dataConfig; }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_scatter_update_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_scatter_update_node.cpp index 7bee9036bfae57..44a396f1ea33d5 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_scatter_update_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_scatter_update_node.cpp @@ -201,13 +201,15 @@ void MKLDNNScatterUpdateNode::initSupportedPrimitiveDescriptors() { } auto pushDesc = [&](memory::format_tag inFormat, memory::format_tag idxFormat, memory::format_tag updateFormat, memory::format_tag outFormat) { - config.inConfs[DATA_ID].desc = make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, inFormat); - config.inConfs[INDICES_ID].desc = make_unique(getParentEdgeAt(INDICES_ID)->getShape().getStaticMklDims(), indicesType, idxFormat); - config.inConfs[UPDATE_ID].desc = make_unique(getParentEdgeAt(UPDATE_ID)->getShape().getStaticMklDims(), dataType, updateFormat); + config.inConfs[DATA_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(DATA_ID)->getShape().getStaticMklDims(), dataType, inFormat); + config.inConfs[INDICES_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(INDICES_ID)->getShape().getStaticMklDims(), indicesType, + idxFormat); + config.inConfs[UPDATE_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(UPDATE_ID)->getShape().getStaticMklDims(), dataType, + updateFormat); if (axisRelaxed) - config.inConfs[AXIS_ID].desc = make_unique(getParentEdgeAt(AXIS_ID)->getShape().getStaticMklDims(), + config.inConfs[AXIS_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(AXIS_ID)->getShape().getStaticMklDims(), MKLDNNExtensionUtils::IEPrecisionToDataType(axisPrec), memory::format_tag::x); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), dataType, outFormat); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), dataType, outFormat); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); }; diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_softmax_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_softmax_node.cpp index de6dc13b14ee63..e3c7787c6f64de 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_softmax_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_softmax_node.cpp @@ -39,7 +39,8 @@ void MKLDNNSoftMaxNode::getSupportedDescriptors() { IE_THROW() << "Incorrect number of output edges for layer " << getName(); if (getParentEdgeAt(0)->getShape().getRank() == 3) { - MemoryDescPtr in_candidate = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::abc); + MemoryDescPtr in_candidate = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + memory::format_tag::abc); createDescriptor({in_candidate.get()}, {}); } @@ -48,7 +49,7 @@ void MKLDNNSoftMaxNode::getSupportedDescriptors() { if (MKLDNNMemoryDesc(dims, inputDataType, format).blocksExtended()) continue; - MemoryDescPtr in_candidate = make_unique(dims, inputDataType, format); + MemoryDescPtr in_candidate = MKLDNNPlugin::make_unique(dims, inputDataType, format); createDescriptor({in_candidate.get()}, {}); } diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_split_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_split_node.cpp index 04d6a8a9b4b351..4a12ec4b81c205 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_split_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_split_node.cpp @@ -139,12 +139,12 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { config.inConfs.resize(INPUTS_NUM); config.inConfs[0].inPlace = -1; config.inConfs[0].constant = false; - config.inConfs[0].desc = make_unique(itr->second->createDesc(inpPrecision, srcShape.getStaticDims())); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(itr->second->createDesc(inpPrecision, srcShape.getStaticDims())); config.inConfs[1].inPlace = -1; config.inConfs[1].constant = true; - config.inConfs[1].desc = make_unique(axisPrecision, SizeVector{1}); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(axisPrecision, SizeVector{1}); if (INPUTS_NUM == 3) { - config.inConfs[2].desc = make_unique(axisPrecision, SizeVector{outputShapes.size()}); + config.inConfs[2].desc = MKLDNNPlugin::make_unique(axisPrecision, SizeVector{outputShapes.size()}); config.inConfs[2].constant = true; } @@ -153,7 +153,7 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { for (size_t i = 0; i < outputShapes.size(); i++) { config.outConfs[i].inPlace = -1; config.outConfs[i].constant = false; - config.outConfs[i].desc = make_unique(itr->second->createDesc(inpPrecision, outputShapes[i].getStaticDims())); + config.outConfs[i].desc = MKLDNNPlugin::make_unique(itr->second->createDesc(inpPrecision, outputShapes[i].getStaticDims())); } supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::ref); @@ -189,7 +189,7 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { } } - config.inConfs[0].desc = make_unique(inpPrecision, srcShape.getStaticDims(), blkDims, order, offset, offsets, strides); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(inpPrecision, srcShape.getStaticDims(), blkDims, order, offset, offsets, strides); for (size_t i = 0; i < outputShapes.size(); i++) { auto outBlockingDesc = MemoryDescUtils::convertToBlockedDescriptor(*refConfig.outConfs[i].desc); @@ -197,7 +197,7 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { const auto& dims = outBlockingDesc.getShape().getStaticDims(); config.outConfs[i].inPlace = 0; - config.outConfs[i].desc = make_unique(outPrecision, dims, outBlkDims, order, offset, offsets, strides); + config.outConfs[i].desc = MKLDNNPlugin::make_unique(outPrecision, dims, outBlkDims, order, offset, offsets, strides); } supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::unknown); } @@ -210,13 +210,13 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { config.inConfs.resize(INPUTS_NUM); config.inConfs[0].inPlace = -1; config.inConfs[0].constant = false; - config.inConfs[0].desc = make_unique( + config.inConfs[0].desc = MKLDNNPlugin::make_unique( creatorsMap.at(GeneralLayout::nspc)->createDesc(inpPrecision, srcShape.getStaticDims())); config.inConfs[1].inPlace = -1; config.inConfs[1].constant = true; - config.inConfs[1].desc = make_unique(axisPrecision, SizeVector{1}); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(axisPrecision, SizeVector{1}); if (INPUTS_NUM == 3) { - config.inConfs[2].desc = make_unique(axisPrecision, SizeVector{outputShapes.size()}); + config.inConfs[2].desc = MKLDNNPlugin::make_unique(axisPrecision, SizeVector{outputShapes.size()}); config.inConfs[2].constant = true; } config.outConfs.resize(outputShapes.size()); @@ -224,7 +224,7 @@ void MKLDNNSplitNode::initSupportedPrimitiveDescriptors() { for (size_t i = 0; i < outputShapes.size(); i++) { config.outConfs[i].inPlace = -1; config.outConfs[i].constant = false; - config.outConfs[i].desc = make_unique(creatorsMap.at(GeneralLayout::ncsp)->createDesc(inpPrecision, + config.outConfs[i].desc = MKLDNNPlugin::make_unique(creatorsMap.at(GeneralLayout::ncsp)->createDesc(inpPrecision, outputShapes[i].getStaticDims())); } supportedPrimitiveDescriptors.emplace_back(config, impl_desc_type::ref); @@ -368,7 +368,7 @@ void MKLDNNSplitNode::initOptimalPrimitiveDescriptor() { size_t offset = 0; for (size_t i = 0; i < outputShapes.size(); i++) { auto outBlockingDesc = MemoryDescUtils::convertToBlockedDescriptor(*config.outConfs[i].desc); - config.outConfs[i].desc = make_unique(outBlockingDesc.getPrecision(), + config.outConfs[i].desc = MKLDNNPlugin::make_unique(outBlockingDesc.getPrecision(), outBlockingDesc.getShape().getStaticDims(), outBlockingDesc.getBlockDims(), outBlockingDesc.getOrder(), diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_strided_slice_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_strided_slice_node.cpp index b788bec61f9aef..8a5832210e0906 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_strided_slice_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_strided_slice_node.cpp @@ -243,12 +243,12 @@ void MKLDNNStridedSliceNode::initSupportedPrimitiveDescriptors() { for (auto itr = range.first; itr != range.second; ++itr) { config.inConfs[0].desc = itr->second->createUniqueDesc(dataPrecision, getParentEdgeAt(DATA_ID)->getShape().getStaticDims()); - config.inConfs[BEGIN_ID].desc = make_unique(getParentEdgeAt(BEGIN_ID)->getShape().getStaticMklDims(), beginDataType, + config.inConfs[BEGIN_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(BEGIN_ID)->getShape().getStaticMklDims(), beginDataType, mkldnn::memory::format_tag::x); - config.inConfs[END_ID].desc = make_unique(getParentEdgeAt(END_ID)->getShape().getStaticMklDims(), endDataType, + config.inConfs[END_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(END_ID)->getShape().getStaticMklDims(), endDataType, mkldnn::memory::format_tag::x); if (hasStrides) - config.inConfs[STRIDE_ID].desc = make_unique(getParentEdgeAt(STRIDE_ID)->getShape().getStaticMklDims(), + config.inConfs[STRIDE_ID].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(STRIDE_ID)->getShape().getStaticMklDims(), MKLDNNExtensionUtils::IEPrecisionToDataType(stridePrecision), mkldnn::memory::format_tag::x); diff --git a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_transpose_node.cpp b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_transpose_node.cpp index 7ce6106249b0bd..8b3b0de8cb2d69 100644 --- a/inference-engine/src/mkldnn_plugin/nodes/mkldnn_transpose_node.cpp +++ b/inference-engine/src/mkldnn_plugin/nodes/mkldnn_transpose_node.cpp @@ -74,63 +74,66 @@ void MKLDNNTransposeNode::initSupportedPrimitiveDescriptors() { config.inConfs[0].constant = false; config.outConfs[0].inPlace = -1; config.outConfs[0].constant = false; - config.inConfs[1].desc = make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), inputOrderDataType, memory::format_tag::x); + config.inConfs[1].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(1)->getShape().getStaticMklDims(), inputOrderDataType, + memory::format_tag::x); if (getParentEdgeAt(0)->getShape().getRank() == 4) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nchw); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, memory::format_tag::nchw); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + memory::format_tag::nchw); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, + memory::format_tag::nchw); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); auto srcDims = getParentEdgeAt(0)->getShape().getStaticMklDims(); if (srcDims[1] % 8 == 0) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nChw8c); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } if (srcDims[1] % 16 == 0) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nChw16c); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } if (prec == Precision::FP32 || prec == Precision::I8 || prec == Precision::U8) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nhwc); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, memory::format_tag::nhwc); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } } else if (getParentEdgeAt(0)->getShape().getRank() == 5) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::ncdhw); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, memory::format_tag::ncdhw); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); auto srcDims = getParentEdgeAt(0)->getShape().getStaticMklDims(); if (srcDims[1] % 8 == 0) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nCdhw8c); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } if (srcDims[1] % 16 == 0) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::nCdhw16c); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } if (prec == Precision::FP32 || prec == Precision::I8 || prec == Precision::U8) { - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType, memory::format_tag::ndhwc); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType, memory::format_tag::ndhwc); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } } else { // general plain case - config.inConfs[0].desc = make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType); - config.outConfs[0].desc = make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType); + config.inConfs[0].desc = MKLDNNPlugin::make_unique(getParentEdgeAt(0)->getShape().getStaticMklDims(), inputDataType); + config.outConfs[0].desc = MKLDNNPlugin::make_unique(getChildEdgeAt(0)->getShape().getStaticMklDims(), outputDataType); supportedPrimitiveDescriptors.push_back({config, impl_desc_type::unknown}); } } diff --git a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/skip_tests_config.cpp b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/skip_tests_config.cpp index 2cbe8b70185844..24948ed99d605b 100644 --- a/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/skip_tests_config.cpp +++ b/inference-engine/tests/functional/plugin/cpu/shared_tests_instances/skip_tests_config.cpp @@ -77,24 +77,14 @@ std::vector disabledTestPatterns() { // INVESTIGATE - // R"(.*FakeQuantizeLayerTest.*)", - // R"(.*StaticShapeLoopTest.*)", - // R"(.*TrivialLoopTest.*)", - // R"(.*TransposeLayerTest.*)", - // R"(.*TransposeLayerCPUTest.*)", - // R"(.*FuseTransposeAndReorderTest.*)", - // - // R"(.*TensorIteratorTest.*)", + - // R"(.*FuseScaleShiftAndFakeQuantizeTest.*)", - // R"(.*OnnxModelWithCustomAbs.*)", - // R"(.*XmlModelWithCustomAbs.*)", - // R"(.*Gather_x2_add_mul_relu_concat_matmul.*)", + - // R"(.*SetMean.*)", - // R"(.*SetScale.*)", - // R"(.*smoke_LPT.*)", - // R"(.*GRUSequenceCPUTest.*)", // reorder BF16 - // R"(.*LSTMSequenceCPUTest.*)", // reorder BF16 - // R"(.*RNNSequenceCPUTest.*)" // reorder BF16 + R"(.*OnnxModelWithCustomAbs.*)", + R"(.*XmlModelWithCustomAbs.*)", + R"(.*SetMean.*)", + R"(.*SetScale.*)", + R"(.*GRUSequenceCPUTest.*)", // reorder BF16 + R"(.*LSTMSequenceCPUTest.*)", // reorder BF16 + R"(.*RNNSequenceCPUTest.*)", // reorder BF16 + R"(.*ConvConcatSubgraphTest.*)" // inPlace }; #ifdef __APPLE__ // TODO: Issue 55717 diff --git a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/conv_concat.cpp b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/conv_concat.cpp index c3312de90b4da9..dd31d2ed847a5c 100644 --- a/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/conv_concat.cpp +++ b/inference-engine/tests/functional/plugin/cpu/subgraph_tests/src/conv_concat.cpp @@ -115,7 +115,6 @@ TEST_P(ConvConcatSubgraphTest, CompareWithRefs) { SKIP_IF_CURRENT_TEST_IS_DISABLED() Run(); - executableNetwork.GetExecGraphInfo().serialize("graph.xml"); CheckPluginRelatedResults(executableNetwork, pluginTypeNode); };