Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Formatted last commit
Browse files Browse the repository at this point in the history
  • Loading branch information
piotrwolinski-intel committed Feb 22, 2022
1 parent 482d190 commit 0510abc
Showing 1 changed file with 20 additions and 20 deletions.
40 changes: 20 additions & 20 deletions src/operator/nn/dnnl/dnnl_batch_norm-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,11 @@ static DNNLBNForward& GetBNForward(const BatchNormParam& param,

template <typename DType>
void DNNLBatchNormForwardImpl(const nnvm::NodeAttrs& attrs,
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs,
bool fuse_relu) {
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs,
bool fuse_relu) {
const BatchNormParam& param = nnvm::get<BatchNormParam>(attrs.parsed);
std::vector<NDArray> in_data(inputs.begin(), inputs.begin() + batchnorm::kInMovingMean);

Expand Down Expand Up @@ -263,10 +263,10 @@ void DNNLBatchNormForwardImpl(const nnvm::NodeAttrs& attrs,

template <typename DType, bool fuse_relu>
void DNNLBatchNormForward(const nnvm::NodeAttrs& attrs,
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs) {
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs) {
DNNLBatchNormForwardImpl<DType>(attrs, ctx, inputs, req, outputs, fuse_relu);
}

Expand Down Expand Up @@ -327,11 +327,11 @@ static DNNLBNBackward& GetBNBackward(const BatchNormParam& param,

template <typename DType>
void DNNLBatchNormBackwardImpl(const nnvm::NodeAttrs& attrs,
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs,
bool fuse_relu) {
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs,
bool fuse_relu) {
if (fuse_relu) {
CHECK_EQ(inputs.size(), 9U);
} else {
Expand Down Expand Up @@ -492,12 +492,12 @@ void DNNLBatchNormBackwardImpl(const nnvm::NodeAttrs& attrs,
}

template <typename DType, bool fuse_relu>
void DNNLBatchNormBackward(const nnvm::NodeAttrs &attrs, const OpContext &ctx,
const std::vector<NDArray> &inputs,
const std::vector<OpReqType> &req,
const std::vector<NDArray> &outputs) {
DNNLBatchNormBackwardImpl<DType>(attrs, ctx, inputs, req, outputs,
fuse_relu);
void DNNLBatchNormBackward(const nnvm::NodeAttrs& attrs,
const OpContext& ctx,
const std::vector<NDArray>& inputs,
const std::vector<OpReqType>& req,
const std::vector<NDArray>& outputs) {
DNNLBatchNormBackwardImpl<DType>(attrs, ctx, inputs, req, outputs, fuse_relu);
}
} // namespace op
} // namespace mxnet
Expand Down

0 comments on commit 0510abc

Please sign in to comment.