diff --git a/src/operator/nn/log_softmax.cc b/src/operator/nn/log_softmax.cc index 89af9b3813ba..16324b51c322 100644 --- a/src/operator/nn/log_softmax.cc +++ b/src/operator/nn/log_softmax.cc @@ -70,10 +70,10 @@ static void LogSoftmaxGradComputeExCPU(const nnvm::NodeAttrs& attrs, } inline static bool LogSoftmaxStorageType(const nnvm::NodeAttrs& attrs, - const int dev_mask, - DispatchMode* dispatch_mode, - std::vector *in_attrs, - std::vector *out_attrs) { + const int dev_mask, + DispatchMode* dispatch_mode, + std::vector *in_attrs, + std::vector *out_attrs) { CHECK_EQ(in_attrs->size(), 1U); CHECK_EQ(out_attrs->size(), 1U); @@ -82,10 +82,10 @@ inline static bool LogSoftmaxStorageType(const nnvm::NodeAttrs& attrs, } inline static bool LogSoftmaxGradStorageType(const nnvm::NodeAttrs& attrs, - const int dev_mask, - DispatchMode* dispatch_mode, - std::vector *in_attrs, - std::vector *out_attrs) { + const int dev_mask, + DispatchMode* dispatch_mode, + std::vector *in_attrs, + std::vector *out_attrs) { bool support = true; int num_inputs = 2U; if (softmax_has_dtype_override(attrs)) { diff --git a/src/operator/nn/mkldnn/mkldnn_log_softmax.cc b/src/operator/nn/mkldnn/mkldnn_log_softmax.cc index 0627c6f11044..0d992b252fa8 100644 --- a/src/operator/nn/mkldnn/mkldnn_log_softmax.cc +++ b/src/operator/nn/mkldnn/mkldnn_log_softmax.cc @@ -58,15 +58,13 @@ static mkldnn::logsoftmax_backward::primitive_desc GetLogSoftmaxBwdPd( bool SupportMKLDNNLogSoftmax(const SoftmaxParam ¶m, const NDArray &data, const NDArray &output) { - // MKLDNN does not support temperature argument in their softmax function - // now. Need update this once they start to support it. const int ndim = data.shape().ndim(); const int in_dtype = data.dtype(); const int out_dtype = output.dtype(); const int axis = CheckAxis(param.axis, ndim); - // MKLDNN does not support temperature argument in their softmax function + // MKLDNN does not support temperature argument in their log_softmax function // now. Need update this once they start to support it. - // Currently, MKLDNN shows bad performance when softmax is not performed on the last dimension + // Currently, MKLDNN shows bad performance when log_softmax is not performed on the last dimension if (param.temperature.has_value() || in_dtype != mshadow::kFloat32 || in_dtype != out_dtype || @@ -133,7 +131,7 @@ void MKLDNNLogSoftmaxForward(const nnvm::NodeAttrs& attrs, const OpReqType &req, const NDArray &out_data) { if (req == kNullOp) return; - // same as the FCompute path, softmax only supports kWriteTo and kWriteInplace for now. + // same as the FCompute path, log_softmax only supports kWriteTo and kWriteInplace for now. CHECK_NE(req, kAddTo); const SoftmaxParam& param = nnvm::get(attrs.parsed);