Skip to content

Commit

Permalink
[LPT] Empty shape on weights handling: cherry-pick to master (#6170)
Browse files Browse the repository at this point in the history
* [LPT] empty shape on weights fix

* [LPT] SplitTransformation naming fix

* [LPT] tests

Co-authored-by: Vladislav Golubev <[email protected]>
  • Loading branch information
eshoguli and v-Golubev authored Jun 17, 2021
1 parent f977125 commit 8390f40
Show file tree
Hide file tree
Showing 7 changed files with 81 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,13 @@ void SplitTransformation::updateOutputs(
updateOutput(context, lastNodes[0], originalNode);
} else {
const std::string originalName = originalNode->get_friendly_name();
for (auto& lastNode : lastNodes) {
for (size_t outIdx = 0; outIdx < lastNodes.size(); ++outIdx) {
for (size_t i = 0; i < outputSize; ++i) {
std::shared_ptr<ngraph::Node> result = context.function->get_output_op(i);
std::shared_ptr<ngraph::Node> outputNode = result->get_input_node_shared_ptr(0);
if (outputNode.get() == lastNode.get()) {
if (outputNode.get() == lastNodes[outIdx].get()) {
originalNode->set_friendly_name(originalName + LayerTransformation::originalLayerPostfix);
lastNode->set_friendly_name(originalName + "." + std::to_string(i));
lastNodes[outIdx]->set_friendly_name(originalName + "." + std::to_string(outIdx));
break;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,15 @@ bool WeightableLayerTransformation::canBeTransformed(const TransformationContext
}

const size_t outChannelsShapeIndex = is_type<opset1::ConvolutionBackpropData>(layer) ? 1ul : 0ul;
if ( // Check if all dimensions of scale except the output channels are all ones
if (
// expected, it's ok: return true
(shape_size(constOutputShape) != 1ul) &&
// not expected, something wrong: return false
((constOutputShape.size() <= outChannelsShapeIndex) ||
// Check if all dimensions of scale except the output channels are all ones
(shape_size(constOutputShape) != constOutputShape[outChannelsShapeIndex]) ||
((constOutputShape[outChannelsShapeIndex] != 1ul) &&
(fqFromWeights->get_output_shape(0)[outChannelsShapeIndex] != constOutputShape[outChannelsShapeIndex]))) {
(fqFromWeights->get_output_shape(0)[outChannelsShapeIndex] != constOutputShape[outChannelsShapeIndex])))) {
return false;
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ class ConvolutionBackpropDataTransformation : public LayerTransformation, public
SimpleLowPrecisionTransformer transform;
transform.add<ngraph::pass::low_precision::ConvolutionBackpropDataTransformation, ngraph::opset1::Convolution>(testValues.params);
transform.transform(actualFunction);

std::shared_ptr<Node> refWeights = pass::low_precision::fold<opset1::Broadcast>(
testValues.expected.weights,
opset1::Constant::create(
Expand Down Expand Up @@ -202,6 +203,26 @@ const std::vector<ConvolutionBackpropDataTransformationTestValues> testValues =
true
}
},
// with zero point
{
LayerTransformation::createParamsU8I8(),
// ActualValues
{
ngraph::element::u8,
{{ngraph::element::f32}, { 128.f }, { 0.02f }},
{ 255ul, Shape({}), { 0.f }, { 254.f }, { -1.27f }, { 1.27f } },
op::Constant::create(ngraph::element::i8, ngraph::Shape{}, std::vector<float>{ 2.f })
},
// ExpectedValues
{
ngraph::element::u8,
{{}, { { 128.f }, ngraph::element::f32, {}, false }, {}},
{},
{{}, {}, {{ 0.0002f }, ngraph::element::f32, { 1 }}},
op::Constant::create(ngraph::element::i8, ngraph::Shape{}, std::vector<float>{ -125.f }),
true
}
},
// updatePrecisions = false
{
LayerTransformation::createParamsU8I8().setUpdatePrecisions(false),
Expand Down Expand Up @@ -262,6 +283,26 @@ const std::vector<ConvolutionBackpropDataTransformationTestValues> testValues =
true
}
},
// without zero point
{
LayerTransformation::createParamsU8I8(),
// ActualValues
{
ngraph::element::u8,
{{ngraph::element::f32}, {}, { 0.02f }},
{ 255ul, Shape({}), { 0.f }, { 254.f }, { -1.27f }, { 1.27f } },
op::Constant::create(ngraph::element::i8, ngraph::Shape{}, std::vector<float>{ 2.f })
},
// ExpectedValues
{
ngraph::element::u8,
{},
{},
{{}, {}, {{ 0.0002f }, ngraph::element::f32, { 1 }}},
op::Constant::create(ngraph::element::i8, ngraph::Shape{}, std::vector<float>{ -125.f }),
true
}
},
// QDq version
{
LayerTransformation::createParamsU8I8(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ const std::vector<LayerTestsDefinitions::ConvolutionBackpropDataTransformationPa
"U8"
},
// FQ on weights
{
{256ul, ngraph::Shape{}, { 0.f }, { 25.5f }, { 0.f }, { 25.5f }},
{255ul, ngraph::Shape{}, { -12.7f }, { 12.7f }, { -12.7f }, { 12.7f }},
"convolutionBackpropData_original",
"U8"
},
// FQ on weights
// with zero point
{
{256ul, ngraph::Shape{1, 1, 1, 1}, { 0.f }, { 255.f }, { -12.7f }, { 12.8f }},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,14 @@ const std::vector<LayerTestsDefinitions::ConvolutionTransformationParam> params
"Convolution",
"U8"
},
{
{ 256ul, ngraph::Shape {}, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
false,
{ 255ul, ngraph::Shape {}, { 0.f }, { 254.f }, { -12.7f }, { 12.7f } },
false,
"Convolution",
"U8"
},
{
{ 16ul, ngraph::Shape { 1, 1, 1, 1 }, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,13 @@ const std::vector<LayerTestsDefinitions::ConvolutionBackpropDataTransformationPa
"U8"
},
// FQ on weights
{
{256ul, ngraph::Shape{}, { 0.f }, { 25.5f }, { 0.f }, { 25.5f }},
{255ul, ngraph::Shape{}, { -12.7f }, { 12.7f }, { -12.7f }, { 12.7f }},
"convolutionBackpropData_original",
"U8"
},
// FQ on weights
{
{256ul, ngraph::Shape{1, 1, 1, 1}, { -12.8f }, { 12.7f }, { -12.8f }, { 12.7f }},
{255ul, ngraph::Shape{1, 1, 1, 1}, { -12.7f }, { 12.7f }, { -12.7f }, { 12.7f }},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,14 @@ const std::vector<LayerTestsDefinitions::ConvolutionTransformationParam> params
"Convolution",
"U8"
},
{
{ 256ul, ngraph::Shape {}, { 0.f }, { 255.f }, { 0.f }, { 25.5f } },
false,
{ 255ul, ngraph::Shape {}, { 0.f }, { 254.f }, { -12.7f }, { 12.7f } },
false,
"Convolution",
"U8"
},
{
{ 256ul, ngraph::Shape { 1, 1, 1, 1 }, { 0.f }, { 255.f }, { -12.75f }, { 6.375f } },
true,
Expand Down

0 comments on commit 8390f40

Please sign in to comment.