Skip to content

Commit

Permalink
Statically analyzed issues. (#2261)
Browse files Browse the repository at this point in the history
  • Loading branch information
nshchego authored Sep 16, 2020
1 parent f5bd169 commit e7e82b9
Show file tree
Hide file tree
Showing 12 changed files with 34 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,8 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
[](const std::shared_ptr<::ngraph::Node>& node, const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), node->description(), details::convertPrecision(node->get_output_element_type(0))};
auto reduce_node = std::dynamic_pointer_cast<ngraph::op::util::ArithmeticReductionKeepDims>(node);
if (reduce_node == nullptr)
THROW_IE_EXCEPTION << "Node '" << node->get_name() << "' is not an instance of ArithmeticReductionKeepDims.";
auto res = std::make_shared<InferenceEngine::ReduceLayer>(attrs);
res->params = params;
res->params["keep_dims"] = reduce_node->get_keep_dims() ? "True" : "False";
Expand All @@ -678,6 +680,8 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
addSpecificCreator({"ReduceLogicalAnd"}, [](const std::shared_ptr<::ngraph::Node>& node, const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "ReduceAnd", details::convertPrecision(node->get_output_element_type(0))};
auto reduce_node = std::dynamic_pointer_cast<ngraph::op::util::LogicalReductionKeepDims>(node);
if (reduce_node == nullptr)
THROW_IE_EXCEPTION << "Node '" << node->get_name() << "' is not an instance of LogicalReductionKeepDims.";
auto res = std::make_shared<InferenceEngine::ReduceLayer>(attrs);
res->params = params;
res->params["keep_dims"] = reduce_node->get_keep_dims() ? "True" : "False";
Expand All @@ -687,6 +691,8 @@ InferenceEngine::details::CNNLayerCreator::CNNLayerCreator(const std::shared_ptr
addSpecificCreator({"ReduceLogicalOr"}, [](const std::shared_ptr<::ngraph::Node>& node, const std::map<std::string, std::string>& params) -> CNNLayerPtr {
LayerParams attrs = {node->get_friendly_name(), "ReduceOr", details::convertPrecision(node->get_output_element_type(0))};
auto reduce_node = std::dynamic_pointer_cast<ngraph::op::util::LogicalReductionKeepDims>(node);
if (reduce_node == nullptr)
THROW_IE_EXCEPTION << "Node '" << node->get_name() << "' is not an instance of LogicalReductionKeepDims.";
auto res = std::make_shared<InferenceEngine::ReduceLayer>(attrs);
res->params = params;
res->params["keep_dims"] = reduce_node->get_keep_dims() ? "True" : "False";
Expand Down
4 changes: 3 additions & 1 deletion inference-engine/src/legacy_api/src/net_pass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -398,8 +398,10 @@ bool convertToRNNSeq(CNNLayerPtr cur, const N& net) {
IE_ASSERT(cell->insData.size() == NS + 1); // {data, state1, [state2]}
IE_ASSERT(cell->outData.size() == NS); // {state1, [state2]}

auto outData0InputsTo = getInputTo(cell->outData[0]);
if (getCreatorLayer(cell->insData[0].lock()).lock() != rsp1 ||
getInputTo(cell->outData[0]).begin()->second != rsp2)
outData0InputsTo.empty() ||
outData0InputsTo.begin()->second != rsp2)
return false;

// Check port mapping
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ void CropValidator::checkShapes(const CNNLayer* layer, const vector<SizeVector>&
}
} else if (!casted->dim.empty()) {
int dim = casted->dim[i];
if (firstShape[axis] < static_cast<size_t>(offset + dim)) {
if (firstShape[axis] < (static_cast<size_t>(offset) + dim)) {
THROW_IE_EXCEPTION << "Incorrect crop data! Offset(" << offset << ") + result size of output(" << dim
<< ") should be less then input size(" << firstShape[axis] << ") for axis(" << axis
<< ")";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ ngraph::pass::FullyConnectedBiasFusion::FullyConnectedBiasFusion() {

if (m_fc == nullptr) {
m_fc = std::dynamic_pointer_cast<op::FullyConnected>(add_input_1);
if (m_fc == nullptr)
return false;
m_bias = add_input_0;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ ngraph::pass::ReduceL1Decomposition::ReduceL1Decomposition() {
auto &pattern_to_output = m.get_pattern_value_map();
auto reduce_l1_node = std::dynamic_pointer_cast<ngraph::opset4::ReduceL1>(pattern_to_output.at(reduce_l1).get_node_shared_ptr());

if (m_transformation_callback(reduce_l1_node)) {
if (reduce_l1_node == nullptr || m_transformation_callback(reduce_l1_node)) {
return false;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ ngraph::pass::ReduceL2Decomposition::ReduceL2Decomposition() {
auto &pattern_to_output = m.get_pattern_value_map();
auto reduce_l2_node = std::dynamic_pointer_cast<ngraph::opset4::ReduceL2>(pattern_to_output.at(reduce_l2).get_node_shared_ptr());

if (m_transformation_callback(reduce_l2_node)) {
if (reduce_l2_node == nullptr || m_transformation_callback(reduce_l2_node)) {
return false;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ void dynamicToStaticShapeReshape(std::shared_ptr<ngraph::Node> target) {
target->get_friendly_name(), target->get_type_info(), ngraph::vpu::op::DynamicShapeResolver::type_info, 0);

const auto reshape = std::dynamic_pointer_cast<ngraph::opset3::Reshape>(target);
VPU_THROW_UNLESS(reshape != nullptr,
"DynamicToStaticShape transformation for '{}' expects Reshape node",
target->get_friendly_name());
const auto outShapeDescriptor = reshape->input_value(1).get_node_shared_ptr();

const auto replacement = ngraph::as_type_ptr<ngraph::opset3::Constant>(outShapeDescriptor)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ void dynamicToStaticShapeSqueeze(std::shared_ptr<ngraph::Node> target) {
target->get_friendly_name(), target->get_type_info(), ngraph::op::Constant::type_info, 1);

const auto squeeze = std::dynamic_pointer_cast<ngraph::opset3::Squeeze>(target);
VPU_THROW_UNLESS(squeeze != nullptr,
"DynamicToStaticShape transformation for '{}' expects Squeeze node",
target->get_friendly_name());
const auto copied = squeeze->clone_with_new_inputs(target->input_values());
const auto shape = dsr->input(1).get_source_output();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ void dynamicToStaticShapeTranspose(std::shared_ptr<ngraph::Node> target) {
target->get_friendly_name(), target->get_type_info(), ngraph::opset3::Constant::type_info, 1);

const auto transpose = std::dynamic_pointer_cast<ngraph::opset3::Transpose>(target);
VPU_THROW_UNLESS(transpose != nullptr,
"DynamicToStaticShape transformation for '{}' expects Transpose node",
target->get_friendly_name());
const auto copied = transpose->clone_with_new_inputs(target->input_values());
const auto shape = dsr->input(1).get_source_output();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ void dynamicToStaticShapeUnsqueeze(std::shared_ptr<ngraph::Node> target) {
target->get_friendly_name(), target->get_type_info(), ngraph::op::Constant::type_info, 1);

const auto unsqueeze = std::dynamic_pointer_cast<ngraph::opset3::Unsqueeze>(target);
VPU_THROW_UNLESS(unsqueeze != nullptr,
"DynamicToStaticShape transformation for '{}' expects Unsqueeze node",
target->get_friendly_name());
const auto copied = unsqueeze->clone_with_new_inputs(target->input_values());
const auto shape = dsr->input(1).get_source_output();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,12 @@ uint32_t md_parser_t::evaluate_expr(const md_expr_t *expression,
case md_type_op_xor:
case md_type_op_shl:
case md_type_op_lshr:
uint32_t rhs = values.rbegin()[0];
uint32_t lhs = values.rbegin()[1];
values.pop_back();
values.back() = md_eval_expression_type_op_2(v.type, lhs, rhs);
if (!values.empty()) {
uint32_t rhs = values.rbegin()[0];
uint32_t lhs = values.rbegin()[1];
values.pop_back();
values.back() = md_eval_expression_type_op_2(v.type, lhs, rhs);
}
}
break;
case md_type_global_size:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,8 @@ CustomLayer::CustomLayer(std::string configDir, const pugi::xml_node& customLaye
stageOrder.emplace(stageNum, CustomKernel{kernel, _configDir});
}

VPU_THROW_UNLESS(!stageOrder.empty(),
"Error while binding %s custom layer: No stages.", _layerName);
VPU_THROW_UNLESS(stageOrder.begin()->first == 0,
"Error while binding %s custom layer: Stage 0 is not found.", _layerName);
VPU_THROW_UNLESS(stageOrder.rbegin()->first == stageOrder.size() - 1,
Expand Down

0 comments on commit e7e82b9

Please sign in to comment.