Skip to content

Commit

Permalink
Activation refs (#2)
Browse files Browse the repository at this point in the history
* HardSigmoid

* Elu

* Selu

* Gelu

* Move to test runtime
  • Loading branch information
iefode authored Aug 17, 2020
1 parent fa43065 commit 9cb9021
Show file tree
Hide file tree
Showing 7 changed files with 214 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ void ActivationParamLayerTest::SetUp() {
std::tie(activationType, netPrecision, shapes, targetDevice) = GetParam();
auto ngPrc = FuncTestUtils::PrecisionUtils::convertIE2nGraphPrc(netPrecision);
auto params = ngraph::builder::makeParams(ngPrc, {shapes.first});
auto activationParams = createActivationParams(ngPrc);
auto activationParams = createActivationParams(ngPrc, shapes.second);
params[0]->set_friendly_name("Input");
params.insert(params.end(), activationParams.begin(), activationParams.end());
auto activation = ngraph::builder::makeActivation(params, ngPrc, activationType);
Expand Down
67 changes: 66 additions & 1 deletion ngraph/test/runtime/interpreter/evaluates_map.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,16 @@
#include "ngraph/runtime/reference/mvn.hpp"
#include "ngraph/runtime/reference/lrn.hpp"
#include "ngraph/runtime/reference/avg_pool.hpp"
#include <ngraph/runtime/reference/ceiling.hpp>
#include <ngraph/runtime/reference/select.hpp>

#include "reference/detection_output.hpp"
#include "reference/scatter_nd_update.hpp"
#include "reference/scatter_update.hpp"
#include "ngraph/runtime/reference/select.hpp"
#include "reference/gelu.hpp"
#include "reference/hard_sigmoid.hpp"
#include "reference/elu.hpp"
#include "reference/selu.hpp"

using namespace ngraph;
using namespace std;
Expand Down Expand Up @@ -379,6 +385,65 @@ namespace {
return true;
}

template<element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::HardSigmoid> &op, const HostTensorVector &outputs,
const HostTensorVector &input) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::hard_sigmoid<T>(input[0]->get_data_ptr<T>(),
input[1]->get_data_ptr<T>(),
input[2]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<T>(),
shape_size(input[0]->get_shape()),
shape_size(input[1]->get_shape()),
shape_size(input[2]->get_shape()));
return true;
}

template<element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::Elu> &op, const HostTensorVector &outputs,
const HostTensorVector &input) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::elu<T>(input[0]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<T>(),
shape_size(input[0]->get_shape()),
op->get_alpha());
return true;
}

template<element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::Selu> &op, const HostTensorVector &outputs,
const HostTensorVector &input) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::selu<T>(input[0]->get_data_ptr<T>(),
input[1]->get_data_ptr<T>(),
input[2]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<T>(),
shape_size(input[0]->get_shape()),
shape_size(input[1]->get_shape()),
shape_size(input[2]->get_shape()));
return true;
}

template<element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::Ceiling> &op, const HostTensorVector &outputs,
const HostTensorVector &input) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::ceiling<T>(input[0]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<T>(),
shape_size(input[0]->get_shape()));
return true;
}

template<element::Type_t ET>
bool evaluate(const shared_ptr<op::v0::Gelu> &op, const HostTensorVector &outputs,
const HostTensorVector &input) {
using T = typename element_type_traits<ET>::value_type;
runtime::reference::gelu<T>(input[0]->get_data_ptr<T>(),
outputs[0]->get_data_ptr<T>(),
shape_size(input[0]->get_shape()));
return true;
}

template<typename T>
bool evaluate_node(std::shared_ptr<Node> node, const HostTensorVector &outputs, const HostTensorVector &inputs) {
switch (node->get_element_type()) {
Expand Down
5 changes: 5 additions & 0 deletions ngraph/test/runtime/interpreter/opset_int_tbl.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,3 +45,8 @@ NGRAPH_OP(ShapeOf, op::v3)
NGRAPH_OP(NonZero, op::v3)
NGRAPH_OP(ScatterNDUpdate, op::v3)
NGRAPH_OP(ScatterUpdate, op::v3)
NGRAPH_OP(HardSigmoid, op::v0)
NGRAPH_OP(Elu, op::v0)
NGRAPH_OP(Selu, op::v0)
NGRAPH_OP(Ceiling, op::v0)
NGRAPH_OP(Gelu, op::v0)
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@ namespace ngraph
{
for (size_t i = 0; i < count; i++)
{
out[i] = arg[i] < 0 ? alpha * (std::exp(arg[i]) - 1.0) : arg[i];
out[i] = arg[i] < T(0) ? T(alpha * (std::exp(arg[i]) - 1.0)) : arg[i];
}
}
}


}
}
38 changes: 38 additions & 0 deletions ngraph/test/runtime/interpreter/reference/gelu.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
//*****************************************************************************
// Copyright 2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************

#pragma once

#include <cmath>
#include <cstddef>

namespace ngraph
{
namespace runtime
{
namespace reference
{
template <typename T>
void gelu(const T* arg, T* out, size_t count)
{
for (size_t i = 0; i < count; i++)
{
out[i] = 0.5 * arg[i] * (1 + erf(arg[i] / std::sqrt(2)));
}
}
}
}
}
54 changes: 54 additions & 0 deletions ngraph/test/runtime/interpreter/reference/hard_sigmoid.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
//*****************************************************************************
// Copyright 2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************

#pragma once

#include <cfenv>
#include <cmath>
#include <numeric>
#include <stdexcept>
#include <vector>

#include "ngraph/axis_vector.hpp"
#include "ngraph/coordinate_transform.hpp"
#include "ngraph/shape.hpp"

namespace ngraph
{
namespace runtime
{
namespace reference
{

template <typename T>
void hard_sigmoid(const T* arg,
const T* alpha,
const T* beta,
T* out,
size_t size_arg,
size_t size_alpha,
size_t size_beta)
{
int cnt = 0;
for (size_t i = 0; i < size_arg; ++i)
{
out[i] = std::max(T(0), std::min(T(1), T(alpha[cnt % size_alpha] * arg[i] + beta[cnt % size_beta])));
cnt++;
}
}
}
}
}
47 changes: 47 additions & 0 deletions ngraph/test/runtime/interpreter/reference/selu.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
//*****************************************************************************
// Copyright 2020 Intel Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//*****************************************************************************

#pragma once

#include <cmath>
#include <cstddef>

namespace ngraph
{
namespace runtime
{
namespace reference
{
template <typename T>
void selu(const T* arg,
const T* alpha,
const T* lambda,
T* out,
size_t size_arg,
size_t size_alpha,
size_t size_lambda)
{
int cnt = 0;
for (size_t i = 0; i < size_arg; ++i)
{
out[i] = arg[i] > T(0) ? T(lambda[cnt % size_lambda] * arg[i]) :
T(alpha[cnt % size_alpha] * lambda[cnt % size_lambda] * (std::exp(arg[i]) - 1));
cnt++;
}
}
}
}
}

0 comments on commit 9cb9021

Please sign in to comment.