Skip to content

Commit

Permalink
[CPU] NormalizeL2 node migration on nGraph (#13)
Browse files Browse the repository at this point in the history
  • Loading branch information
Maxim Andronov committed Apr 21, 2021
1 parent 3b06850 commit ebdc143
Show file tree
Hide file tree
Showing 19 changed files with 514 additions and 497 deletions.
2 changes: 1 addition & 1 deletion inference-engine/src/mkldnn_plugin/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ set(LAYERS
# ${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_tensoriterator_node.cpp
# ${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_tile_node.cpp
${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_mvn_node.cpp
# ${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_normalize_node.cpp
${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_normalize_node.cpp
# ${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_scatter_update_node.cpp
${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_interpolate_node.cpp
# ${CMAKE_CURRENT_SOURCE_DIR}/nodes/mkldnn_reduce_node.cpp
Expand Down
16 changes: 16 additions & 0 deletions inference-engine/src/mkldnn_plugin/mkldnn_extension_utils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -209,3 +209,19 @@ InferenceEngine::Precision MKLDNNExtensionUtils::getMaxPrecision(std::vector<Inf

return InferenceEngine::Precision::UNSPECIFIED;
}

bool MKLDNNExtensionUtils::isPerTensorOrPerChannelBroadcastable(const InferenceEngine::SizeVector &firstInputDims,
InferenceEngine::SizeVector secondInputDims) {
if (secondInputDims.size() > firstInputDims.size())
return false;
if (std::accumulate(secondInputDims.begin(), secondInputDims.end(), 1, std::multiplies<size_t>()) == 1)
return true;
for (size_t i = 0; i < (firstInputDims.size() - secondInputDims.size()); i++) {
secondInputDims.insert(secondInputDims.begin(), 1);
}
for (size_t i = 0; i < secondInputDims.size(); i++) {
if ((i == 1 && secondInputDims[i] != firstInputDims[1]) || (i != 1 && secondInputDims[i] != 1))
return false;
}
return true;
}
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ class MKLDNNExtensionUtils {
static bool initTensorsAreEqual(const InferenceEngine::TensorDesc &desc1, const InferenceEngine::TensorDesc &desc2);
static std::string getReorderArgs(const InferenceEngine::TensorDesc &parentDesc, const InferenceEngine::TensorDesc &childDesc);
static InferenceEngine::Precision getMaxPrecision(std::vector<InferenceEngine::Precision> precisions);
static bool isPerTensorOrPerChannelBroadcastable(const InferenceEngine::SizeVector &firstInputDims, InferenceEngine::SizeVector secondInputDims);
};

} // namespace MKLDNNPlugin
109 changes: 42 additions & 67 deletions inference-engine/src/mkldnn_plugin/mkldnn_graph_optimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,8 @@ void MKLDNNGraphOptimizer::ApplyCommonGraphOptimizations(MKLDNNGraph &graph) {
FuseInterpolateAndSimpleOperation(graph);
graph.RemoveDroppedNodes();

// TODO [NM]: transformation should be implemented w/o using of CNNLayer
// FuseNormalizeAndSimpleOperation(graph);
// graph.RemoveDroppedNodes();
FuseNormalizeL2AndSimpleOperation(graph);
graph.RemoveDroppedNodes();

FuseEltwiseAndSimple(graph);
graph.RemoveDroppedNodes();
Expand Down Expand Up @@ -1428,70 +1427,46 @@ void MKLDNNGraphOptimizer::FuseInterpolateAndSimpleOperation(MKLDNNGraph &graph)
}
}

void MKLDNNGraphOptimizer::FuseNormalizeAndSimpleOperation(MKLDNNGraph &graph) {
// auto& graphNodes = graph.GetNodes();
//
// auto isSutableParentNode = [](MKLDNNNodePtr node) {
// bool isSutableNormalize = node->getType() == Normalize;
//
// if (isSutableNormalize) {
// return node->getChildEdges().size() == 1;
// } else {
// return false;
// }
// };
//
// auto isSutableChildNode = [&](MKLDNNNodePtr node) {
// if (!node->getCnnLayer())
// return false;
//
// if (node->getType() == Quantize) {
// auto* quantizeNode = dynamic_cast<MKLDNNQuantizeNode*>(node.get());
// if (quantizeNode == nullptr)
// IE_THROW() << "Cannot get quantize layer " << node->getName();
// return !quantizeNode->isBinarization();
// } else if (node->getType() == Eltwise) {
// auto *eltwiseNode = dynamic_cast<MKLDNNEltwiseNode *>(node.get());
// if (eltwiseNode == nullptr)
// IE_THROW() << "Cannot get Eltwise node " << node->getName();
// return IsOneOf(eltwiseNode->getOpType(), {Relu, Gelu, Elu, Logistic, BoundedRelu, Clamp, Tanh, Swish,
// Hswish, Mish, Hsigmoid, Round, Linear, Abs, Square, Sqrt}) ||
// ((eltwiseNode->getOpType() == MulAdd && eltwiseNode->getCnnLayer()->blobs.size() == 2) ||
// (eltwiseNode->getOpType() == Prelu));
// }
//
// return false;
// };
//
// auto parent = graphNodes.begin();
// while (parent != graphNodes.end()) {
// auto parentNode = *parent;
// if (!isSutableParentNode(parentNode)) {
// parent++;
// continue;
// }
//
// auto childNode = parentNode->getChildEdgeAt(0)->getChild();
// if (!isSutableChildNode(childNode)) {
// parent++;
// continue;
// }
//
// parentNode->fuseWith(childNode);
//
// if (childNode->getType() == Quantize || childNode->getType() == Eltwise) {
// auto parentEdges = childNode->parentEdges;
// for (auto &parentEdge : parentEdges) {
// auto p_edge = parentEdge.lock();
// if (p_edge->getParent()->getType() == Normalize)
// continue;
//
// removeEdge(graph, p_edge);
// }
// }
//
// graph.DropNode(childNode);
// }
void MKLDNNGraphOptimizer::FuseNormalizeL2AndSimpleOperation(MKLDNNGraph &graph) {
auto& graphNodes = graph.GetNodes();

auto isSutableParentNode = [](MKLDNNNodePtr node) {
if (node->getType() == NormalizeL2) {
return node->getChildEdges().size() == 1;
} else {
return false;
}
};

auto parent = graphNodes.begin();
while (parent != graphNodes.end()) {
auto parentNode = *parent;
if (!isSutableParentNode(parentNode)) {
parent++;
continue;
}

auto childNode = parentNode->getChildEdgeAt(0)->getChild();
if (!parentNode->canFuse(childNode)) {
parent++;
continue;
}

parentNode->fuseWith(childNode);

if (childNode->getType() == Quantize || childNode->getType() == Eltwise) {
auto parentEdges = childNode->parentEdges;
for (auto &parentEdge : parentEdges) {
auto p_edge = parentEdge.lock();
if (p_edge->getParent()->getType() == NormalizeL2)
continue;

removeEdge(graph, p_edge);
}
}

graph.DropNode(childNode);
}
}

void MKLDNNGraphOptimizer::FuseEltwiseAndSimple(MKLDNNGraph &graph) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class MKLDNNGraphOptimizer {
void FuseConvolutionSumAndConvolutionSumActivation(MKLDNNGraph &graph);
void FuseMVNAndSimpleOperation(MKLDNNGraph &graph);
void FuseInterpolateAndSimpleOperation(MKLDNNGraph &graph);
void FuseNormalizeAndSimpleOperation(MKLDNNGraph &graph);
void FuseNormalizeL2AndSimpleOperation(MKLDNNGraph &graph);

void DropDoubleReorders(MKLDNNGraph& graph);
void DropConvertReorder(MKLDNNGraph& graph);
Expand Down
2 changes: 1 addition & 1 deletion inference-engine/src/mkldnn_plugin/mkldnn_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ static const InferenceEngine::details::caseless_unordered_map<std::string, Type>
// { "Memory", MemoryOutput }, // for construction from layer ctor
// { "Convert", Convert },
{ "MVN", MVN},
// { "Normalize", Normalize},
{ "NormalizeL2", NormalizeL2},
// { "ScatterUpdate", ScatterUpdate},
// { "ScatterElementsUpdate", ScatterElementsUpdate},
// { "ScatterNDUpdate", ScatterNDUpdate},
Expand Down
10 changes: 7 additions & 3 deletions inference-engine/src/mkldnn_plugin/mkldnn_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ enum Type {
TensorIterator,
Convert,
MVN,
Normalize,
NormalizeL2,
ScatterUpdate,
ScatterElementsUpdate,
ScatterNDUpdate,
Expand Down Expand Up @@ -238,8 +238,8 @@ static std::string NameFromType(Type type) {
return "TensorIterator";
case Convert:
return "Convert";
case Normalize:
return "Normalize";
case NormalizeL2:
return "NormalizeL2";
case ScatterUpdate:
return "ScatterUpdate";
case ScatterElementsUpdate:
Expand Down Expand Up @@ -620,6 +620,10 @@ class MKLDNNNode : public InferenceEngine::details::no_copy {
return algorithm;
}

virtual bool canFuse(const MKLDNNNodePtr& node) const {
return false;
}

protected:
void setType(Type type) {
this->type = type;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ class MKLDNNBinaryConvolutionNode : public MKLDNNNode {
return false;
}
void setPostOps(mkldnn::primitive_attr &attr);
bool canFuse(const MKLDNNNodePtr& node) const;
bool canFuse(const MKLDNNNodePtr& node) const override;

private:
bool withSum = false;
Expand Down
Loading

0 comments on commit ebdc143

Please sign in to comment.