Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fix indent and comments
Browse files Browse the repository at this point in the history
  • Loading branch information
bgawrych committed May 19, 2020
1 parent b393cc1 commit 957eaf0
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 13 deletions.
16 changes: 8 additions & 8 deletions src/operator/nn/log_softmax.cc
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ static void LogSoftmaxGradComputeExCPU(const nnvm::NodeAttrs& attrs,
}

inline static bool LogSoftmaxStorageType(const nnvm::NodeAttrs& attrs,
const int dev_mask,
DispatchMode* dispatch_mode,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
const int dev_mask,
DispatchMode* dispatch_mode,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
CHECK_EQ(in_attrs->size(), 1U);
CHECK_EQ(out_attrs->size(), 1U);

Expand All @@ -82,10 +82,10 @@ inline static bool LogSoftmaxStorageType(const nnvm::NodeAttrs& attrs,
}

inline static bool LogSoftmaxGradStorageType(const nnvm::NodeAttrs& attrs,
const int dev_mask,
DispatchMode* dispatch_mode,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
const int dev_mask,
DispatchMode* dispatch_mode,
std::vector<int> *in_attrs,
std::vector<int> *out_attrs) {
bool support = true;
int num_inputs = 2U;
if (softmax_has_dtype_override(attrs)) {
Expand Down
8 changes: 3 additions & 5 deletions src/operator/nn/mkldnn/mkldnn_log_softmax.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,13 @@ static mkldnn::logsoftmax_backward::primitive_desc GetLogSoftmaxBwdPd(
bool SupportMKLDNNLogSoftmax(const SoftmaxParam &param,
const NDArray &data,
const NDArray &output) {
// MKLDNN does not support temperature argument in their softmax function
// now. Need update this once they start to support it.
const int ndim = data.shape().ndim();
const int in_dtype = data.dtype();
const int out_dtype = output.dtype();
const int axis = CheckAxis(param.axis, ndim);
// MKLDNN does not support temperature argument in their softmax function
// MKLDNN does not support temperature argument in their log_softmax function
// now. Need update this once they start to support it.
// Currently, MKLDNN shows bad performance when softmax is not performed on the last dimension
// Currently, MKLDNN shows bad performance when log_softmax is not performed on the last dimension
if (param.temperature.has_value() ||
in_dtype != mshadow::kFloat32 ||
in_dtype != out_dtype ||
Expand Down Expand Up @@ -133,7 +131,7 @@ void MKLDNNLogSoftmaxForward(const nnvm::NodeAttrs& attrs,
const OpReqType &req,
const NDArray &out_data) {
if (req == kNullOp) return;
// same as the FCompute path, softmax only supports kWriteTo and kWriteInplace for now.
// same as the FCompute path, log_softmax only supports kWriteTo and kWriteInplace for now.
CHECK_NE(req, kAddTo);

const SoftmaxParam& param = nnvm::get<SoftmaxParam>(attrs.parsed);
Expand Down

0 comments on commit 957eaf0

Please sign in to comment.