Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][CINN] fix cpplint codestyle [readability/braces] #55049

Merged
merged 4 commits into from
Jul 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions paddle/cinn/common/cas.cc
Original file line number Diff line number Diff line change
Expand Up @@ -123,10 +123,11 @@ Expr ProductGetNonConstantPart(Expr u) {
}
if (nonconstant_operands.empty()) {
return make_const(u->type(), 1);
} else if (nonconstant_operands.size() == 1)
} else if (nonconstant_operands.size() == 1) {
return nonconstant_operands.front();
else
} else {
return Product::Make(nonconstant_operands);
}
}
return u;
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/frontend/decomposer_registry.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class DecomposerContext {
NetBuilder* builder, absl::flat_hash_map<std::string, Variable>* var_map)
: builder_(builder), var_map_(var_map) {}

NetBuilder* builder() const { return builder_; };
NetBuilder* builder() const { return builder_; }

// Map the new var to the original var.
void MapOutToOrigin(const Variable& new_var, const Variable& ori_var) const {
Expand Down
12 changes: 6 additions & 6 deletions paddle/cinn/frontend/op_mappers/paddle/elementwise.cc
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,12 @@ EXPAND_ELTWISETYPE_STRING(kMin, " min ")
template <EltwiseType Type>
struct OpBuilder {};

#define ELTWISE_SPEC(enum_t, function) \
template <> \
struct OpBuilder<enum_t> { \
constexpr static Variable (NetBuilder::*func)(const Variable&, \
const Variable&, \
int){&function}; \
#define ELTWISE_SPEC(enum_t, function) \
template <> \
struct OpBuilder<enum_t> { \
constexpr static Variable (NetBuilder::*func)(const Variable&, \
const Variable&, \
int){&function}; /*NOLINT*/ \
}
ELTWISE_SPEC(EltwiseType::kAdd, NetBuilder::Add);
ELTWISE_SPEC(EltwiseType::kDiv, NetBuilder::Divide);
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/frontend/paddle/cpp/op_desc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ inline std::string AttrTypeToString(
EXPAND_SWITCH_CASE(SCALAR)
EXPAND_SWITCH_CASE(SCALARS)
#undef EXPAND_SWITCH_CASE
};
}
return "Invlid AttrType";
}

Expand Down
26 changes: 13 additions & 13 deletions paddle/cinn/hlir/op/elementwise.cc
Original file line number Diff line number Diff line change
Expand Up @@ -467,19 +467,19 @@ std::shared_ptr<OpStrategy> StrategyForAssignValue(

absl::optional<ir::Tensor> out;
#define EXPAND_VALUE_TO_TENSOR(TYPE) \
else if (absl::get_if<TYPE>(&value)) { \
else if (absl::get_if<TYPE>(&value)) { /*NOLINT*/ \
out = pe::AssignValue( \
std::vector<TYPE>{absl::get<TYPE>(value)}, out_type[0], tensor_name); \
} \
else if (absl::get_if<std::vector<TYPE>>(&value)) { \
else if (absl::get_if<std::vector<TYPE>>(&value)) { /*NOLINT*/ \
out = pe::AssignValue( \
absl::get<std::vector<TYPE>>(value), out_type[0], tensor_name); \
}

if (false) { // NOLINT
}
EXPAND_ATTR_TYPE(EXPAND_VALUE_TO_TENSOR)
else {
else { // NOLINT
LOG(FATAL) << "Assign value not support the type " << out_type[0];
}
#undef EXPAND_VALUE_TO_TENSOR
Expand Down Expand Up @@ -510,17 +510,17 @@ std::vector<shape_t> InferShapeForAssignValue(

shape_t shape;
#define EXPAND_ATTR_TO_GET_SHAPE(TYPE) \
else if (absl::get_if<TYPE>(&value)) { \
else if (absl::get_if<TYPE>(&value)) { /*NOLINT*/ \
shape.emplace_back(1); \
} \
else if (absl::get_if<std::vector<TYPE>>(&value)) { \
else if (absl::get_if<std::vector<TYPE>>(&value)) { /*NOLINT*/ \
shape.emplace_back(absl::get<std::vector<TYPE>>(value).size()); \
}

if (false) { // NOLINT
}
EXPAND_ATTR_TYPE(EXPAND_ATTR_TO_GET_SHAPE)
else {
else { // NOLINT
LOG(FATAL) << "assign_value not support the type!";
}
#undef EXPAND_ATTR_TO_GET_SHAPE
Expand Down Expand Up @@ -550,18 +550,18 @@ std::vector<Type> InferDtypeForAssignValue(
<< "assign_value should set attribute [values]! Please check.";
const auto &value = attrs.at("values");

#define EXPAND_ATTR_TO_GET_DTYPE(TYPE) \
else if (absl::get_if<TYPE>(&value)) { \
out_type = common::type_of<TYPE>(); \
} \
else if (absl::get_if<std::vector<TYPE>>(&value)) { \
out_type = common::type_of<TYPE>(); \
#define EXPAND_ATTR_TO_GET_DTYPE(TYPE) \
else if (absl::get_if<TYPE>(&value)) { /*NOLINT*/ \
out_type = common::type_of<TYPE>(); \
} \
else if (absl::get_if<std::vector<TYPE>>(&value)) { /*NOLINT*/ \
out_type = common::type_of<TYPE>(); \
}

if (false) { // NOLINT
}
EXPAND_ATTR_TYPE(EXPAND_ATTR_TO_GET_DTYPE)
else {
else { // NOLINT
LOG(FATAL) << "assign_value not support the type!";
}
#undef EXPAND_ATTR_TO_GET_DTYPE
Expand Down
4 changes: 2 additions & 2 deletions paddle/cinn/hlir/pass/dot_merger.cc
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ class DotBuilder {
shape_dict_{graph_->GetMutableAttrs<shape_dict_t>("infershape")} {}

framework::Graph* graph() const { return graph_; }
const dtype_dict_t& dtype_dict() const { return dtype_dict_; };
const shape_dict_t& shape_dict() const { return shape_dict_; };
const dtype_dict_t& dtype_dict() const { return dtype_dict_; }
const shape_dict_t& shape_dict() const { return shape_dict_; }

// Currently the constructor of `NodeData` needs to pass in `Shared<Node>`.
NodeData* Var(common::Shared<Node>& producer) {
Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/hlir/pass/fusion_merge_pass_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ bool is_const_group(const FusionHelperBase* helper,
const std::shared_ptr<Graph::Group>& group) {
return group->CollectNodes().size() == 1 &&
helper->IsConstOp(group->CollectNodes()[0]);
};
}

CONDITION_FUNC(elementwise_fuse_broadcast) {
// if producer just include const op.
Expand Down
5 changes: 3 additions & 2 deletions paddle/cinn/hlir/pe/reduction.cc
Original file line number Diff line number Diff line change
Expand Up @@ -759,8 +759,9 @@ std::vector<ir::Tensor> ReduceInternal(const ir::Tensor& A,
A, axes, keep_dim, output_name, Reduce##name, initial, reduce_type); \
if (rs.size() == 0) { \
return {Reduce##name(A, axes, keep_dim, output_name)}; \
} else \
} else { \
return rs; \
} \
} \
}

Expand Down Expand Up @@ -801,7 +802,7 @@ bool WithoutLastDimInReduce(const std::vector<ir::Expr>& inshape,
} else {
return false;
}
};
}

using BlockReduceFunc =
std::function<std::vector<ir::Tensor>(const ir::Tensor&,
Expand Down
3 changes: 2 additions & 1 deletion paddle/cinn/hlir/pe/schedule.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2814,8 +2814,9 @@ void CudaSplitSchedule(common::CINNValuePack *arg_pack,
stages[last_output]->Bind(0, "blockIdx.x");
stages[last_output]->Bind(1, "threadIdx.x");
compute_at_level++;
} else
} else {
stages[last_output]->Bind(0, "threadIdx.x");
}
}

for (int i = 0; i < out_tensors.size() - 1; i++) {
Expand Down
3 changes: 2 additions & 1 deletion paddle/cinn/ir/ir_schedule.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2142,8 +2142,9 @@ void ScheduleImpl::CopyTransformAndLoopInfo(const Expr& block,
!vars[i]->is_reduce_axis && !vars_target[i]->is_reduce_axis) {
new_iter_values.push_back(iter_values_target[i]);
VLOG(3) << "new_iter_values.push_back " << iter_values_target[i];
} else
} else {
break;
}
}

if (new_iter_values.empty())
Expand Down
6 changes: 3 additions & 3 deletions paddle/cinn/runtime/cuda/cuda_util.cc
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ std::string debug_cudnn_tensor_format(cudnnTensorFormat_t tensor_format) {
return "NHWC";
default:
LOG(FATAL) << "Only support NCHW and NHWC data layout\n";
};
}
return "";
}

Expand All @@ -681,7 +681,7 @@ std::string debug_cudnn_tensor_dtype(cudnnDataType_t tensor_dtype) {
return "float64";
default:
LOG(FATAL) << "Only support float16/bfloat16/float32/float64 now!";
};
}
return "";
}

Expand All @@ -697,7 +697,7 @@ std::string debug_cudnn_pool_mode(cudnnPoolingMode_t pool_mode) {
return "avg_exclulude_padding";
default:
LOG(FATAL) << "Pool only support max and avg now!";
};
}
return "";
}

Expand Down
2 changes: 1 addition & 1 deletion paddle/cinn/utils/event.h
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class Summary {
public:
struct Raito {
double value;
Raito(double val) : value(val){};
Raito(double val) : value(val) {}
std::string ToStr() const { return std::to_string(value); }
};

Expand Down
3 changes: 2 additions & 1 deletion paddle/cinn/utils/string.cc
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,9 @@ size_t Count(std::string *s, const std::string &sub) {
!IsSuffix(s->at(pos + sub.length())))) {
pos += sub.length();
times++;
} else
} else {
pos++;
}
}
return times;
}
Expand Down